How do I receive stream callbacks and video data?

A Stream can deliver events to your application about the stream data that is received as well as stream status messages.

See the IStreamListener class documentation for a description of the events.

Prerequisite: This example requires a Server and Encoder object

For complete examples refer to the sample applications that come with the DecoderSDK.

Example

Register for the stream listener and open the media stream

// Get the media stream
MediaStream mediaStream = enc.GetMediaStream();
// Check it is valid
if (mediaStream.IsValid())
{
// register to receive IStreamListener callbacks - including video data!
mediaStream.RegisterListener(this);
// open the MediaStream to start receiving frames
mediaStream.Open();
// allow this thread to sleep for a while to receive frames
Sleep(30000);
// frames will be provided via the DataReceived callback
mediaStream.Close();
}
}

Retreive stream data from the call back

// IStreamListener callbacks
void DataReceived(StreamData data)
{
if (data.IsValid())
{
std::wstring typeName = data.TypeName();
//Check what type of data - is it a video frame?
if(!typeName.compare(L"YCbCrFrame"))
{
YCbCrFrame frame = YCbCrFrame(data);
StreamTimestampContext stc = frame.GetTimeStampContext();
int input = frame.GetInput();
switch (stc)
{
default:
case STC_Live:
wcout << L" Got live frame - input " << to_wstring(input) << endl;
break;
wcout << L" Got archive frame - input " << to_wstring(input) << endl;
break;
}
}
}
}

Register for the stream listener and open the media stream

// Find the media stream on the encoder
MediaStream mediaStream = _encoder.GetMediaStream();
if (mediaStream.IsValid())
{
// register to receive IStreamListener callbacks - including video data!
mediaStream.RegisterListener(this);
Console.WriteLine("Opening MediaStream...");
// open the MediaStream to start receiving frames
OpResult result = mediaStream.Open();
if (result != OpResult.OR_Success)
{
Console.WriteLine("Failed to open MediaStream...");
return;
}
Console.WriteLine("MediaStream open");
// wait here while we're running...
// frames will be provided via the DataReceived callback
while (_isRunning)
{
Thread.Sleep(1);
}
mediaStream.Close();
Console.WriteLine("MediaStream closed");
}

Retreive stream data from the call back

private void AsyncProcessReceivedData(StreamData data)
{
if (data.IsValid())
{
string typeName = data.TypeName();
switch (typeName)
{
case "YCbCrFrame":
// The MediaStream has received a video frame
// Video frames are provided as planar YUV framess
YCbCrFrame frame = new YCbCrFrame(data);
StreamTimestampContext stc = frame.GetTimeStampContext();
int input = frame.GetInput();
switch (stc)
{
case StreamTimestampContext.STC_Live:
// In this case, the frame timestamp will be the time the frame was encoded at
Console.WriteLine(" Got live frame ({0}) - input index {1}", frame.GetTimeStamp().DateTimeFromEpocMilliseconds(), input);
break;
case StreamTimestampContext.STC_Archive:
// In this case, the frame timestamp will be the archive footage timestamp
Console.WriteLine(" Got Archive frame ({0}) - input index {1}", frame.GetTimeStamp().DateTimeFromEpocMilliseconds(), input);
break;
}
// Convert received frame to JPEG
// 85% quality, correct aspect ratio
JPEGFrame jpgFrame = new JPEGFrame(data);
if (jpgFrame.Convert(frame, 85, true))
{
// jpgFrame now contains the JPEG image
}
else
{
// JPEG conversion failed....
}
break;
case "AudioData":
// The MediaStream has received audio data
// Audio data is provided with a configuration to describe the data
break;
case "GPSData":
// The MediaStream has received GPS data
// GPS data consists of comma separated latitude and longitude values
break;
case "VPTZData":
// The MediaStream has received Virtual PTZ data
// Virtual PTZ data provides information on the position and size of the
// received frame with respect to the captured frame at the encoder
// This allows a client application to display an overlay indicating the region
// of the source frame that is currently being viewed
break;
}
}
}

Register for the stream listener and open the media stream

// Find the media stream on the encoder
MediaStream mediaStream = encoder.GetMediaStream();
if (mediaStream.IsValid())
{
// register to receive IStreamListener callbacks - including video data!
mediaStream.RegisterListener(new SDKStreamListener());
Log.i(TAG,"Opening MediaStream...");
// open the MediaStream to start receiving frames
OpResult result = mediaStream.Open();
if (result != OR_Success)
{
Log.w(TAG, "Failed to open MediaStream..." );
return;
}
Log.i(TAG, "MediaStream open");
// wait here while we're running...
// frames will be provided via the DataReceived callback
try {
Thread.sleep(30000);
} catch (InterruptedException e) {
e.printStackTrace();
}
// close the media stream - this will stop decoding frames
mediaStream.Close();
Log.i(TAG, "MediaStream closed");
}
}

Retreive stream data from the call back

private class SDKStreamListener extends IStreamListener {
@Override
public void DataReceived(final StreamData data) {
// The stream that this object is te listener for has received data from the stream.
// Get on to another thread from the callback here - doing too much processing on
// the callback thread may delay the decode
// TODO: a single thread could be used to process this data - set it up and post the
// StreamData object to it for processing
Log.i(TAG, "Streaming DataReceived");
new Thread(new Runnable() {
@Override
public void run() {
if(data != null && data.IsValid()){
String typeName = data.TypeName();
if(typeName.equals("YCbCrFrame"))
{
// The MediaStream has received a video frame
// Video frames are provided as planar YUV frames
YCbCrFrame frame = new YCbCrFrame(data);
StreamTimestampContext stc = frame.GetTimeStampContext();
int input = frame.GetInput();
Log.v(TAG, " got " + stc.toString());
if(stc.toString().equals(STC_Live.toString())){
Log.i(TAG, " Got live frame " + convertTimestampToUTCDateString(frame.GetTimeStamp()) + " (" + frame.GetTimeStamp() + ") - input index " + input);
}
else if(stc.toString().equals(STC_Archive.toString())){
Log.i(TAG, " Got archive frame " + convertTimestampToUTCDateString(frame.GetTimeStamp()) + " (" + frame.GetTimeStamp() + ") - input index " + input);
}
// Convert received frame to JPEG
// 85% quality, correct aspect ratio
JPEGFrame jpgFrame = new JPEGFrame();
if (jpgFrame.Convert(frame, 85, true))
{
// jpgFrame now contains the JPEG image
}
else
{
// JPEG conversion failed....
}
}
else if(typeName.equals("AudioData"))
{
// The MediaStream has received audio data
// Audio data is provided with a configuration to describe the data
}
else if(typeName.equals("GPSData"))
{
// The MediaStream has received GPS data
// GPS data consists of comma separated latitude and longitude values
}
else if(typeName.equals("VPTZData"))
{
// The MediaStream has received Virtual PTZ data
// Virtual PTZ data provides information on the position and size of the
// received frame with respect to the captured frame at the encoder
// This allows a client application to display an overlay indicating the region
// of the source frame that is currently being viewed
}
}
}
}).start();
}