1.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 1.2 +++ b/content/media/gstreamer/GStreamerReader-0.10.cpp Wed Dec 31 06:09:35 2014 +0100 1.3 @@ -0,0 +1,201 @@ 1.4 +#include "nsError.h" 1.5 +#include "MediaDecoderStateMachine.h" 1.6 +#include "AbstractMediaDecoder.h" 1.7 +#include "MediaResource.h" 1.8 +#include "GStreamerReader.h" 1.9 +#include "GStreamerMozVideoBuffer.h" 1.10 +#include "GStreamerFormatHelper.h" 1.11 +#include "VideoUtils.h" 1.12 +#include "mozilla/dom/TimeRanges.h" 1.13 +#include "mozilla/Endian.h" 1.14 +#include "mozilla/Preferences.h" 1.15 + 1.16 +using namespace mozilla; 1.17 +using mozilla::layers::PlanarYCbCrImage; 1.18 +using mozilla::layers::ImageContainer; 1.19 + 1.20 +GstFlowReturn GStreamerReader::AllocateVideoBufferCb(GstPad* aPad, 1.21 + guint64 aOffset, 1.22 + guint aSize, 1.23 + GstCaps* aCaps, 1.24 + GstBuffer** aBuf) 1.25 +{ 1.26 + GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(gst_pad_get_element_private(aPad)); 1.27 + return reader->AllocateVideoBuffer(aPad, aOffset, aSize, aCaps, aBuf); 1.28 +} 1.29 + 1.30 +GstFlowReturn GStreamerReader::AllocateVideoBuffer(GstPad* aPad, 1.31 + guint64 aOffset, 1.32 + guint aSize, 1.33 + GstCaps* aCaps, 1.34 + GstBuffer** aBuf) 1.35 +{ 1.36 + nsRefPtr<PlanarYCbCrImage> image; 1.37 + return AllocateVideoBufferFull(aPad, aOffset, aSize, aCaps, aBuf, image); 1.38 +} 1.39 + 1.40 +GstFlowReturn GStreamerReader::AllocateVideoBufferFull(GstPad* aPad, 1.41 + guint64 aOffset, 1.42 + guint aSize, 1.43 + GstCaps* aCaps, 1.44 + GstBuffer** aBuf, 1.45 + nsRefPtr<PlanarYCbCrImage>& aImage) 1.46 +{ 1.47 + /* allocate an image using the container */ 1.48 + ImageContainer* container = mDecoder->GetImageContainer(); 1.49 + if (container == nullptr) { 1.50 + return GST_FLOW_ERROR; 1.51 + } 1.52 + nsRefPtr<PlanarYCbCrImage> image = 1.53 + container->CreateImage(ImageFormat::PLANAR_YCBCR).downcast<PlanarYCbCrImage>(); 1.54 + 1.55 + /* prepare a GstBuffer pointing to the underlying PlanarYCbCrImage buffer */ 1.56 + GstBuffer* buf = GST_BUFFER(gst_moz_video_buffer_new()); 1.57 + GST_BUFFER_SIZE(buf) = aSize; 1.58 + /* allocate the actual YUV buffer */ 1.59 + GST_BUFFER_DATA(buf) = image->AllocateAndGetNewBuffer(aSize); 1.60 + 1.61 + aImage = image; 1.62 + 1.63 + /* create a GstMozVideoBufferData to hold the image */ 1.64 + GstMozVideoBufferData* bufferdata = new GstMozVideoBufferData(image); 1.65 + 1.66 + /* Attach bufferdata to our GstMozVideoBuffer, it will take care to free it */ 1.67 + gst_moz_video_buffer_set_data(GST_MOZ_VIDEO_BUFFER(buf), bufferdata); 1.68 + 1.69 + *aBuf = buf; 1.70 + return GST_FLOW_OK; 1.71 +} 1.72 + 1.73 +gboolean GStreamerReader::EventProbe(GstPad* aPad, GstEvent* aEvent) 1.74 +{ 1.75 + GstElement* parent = GST_ELEMENT(gst_pad_get_parent(aPad)); 1.76 + switch(GST_EVENT_TYPE(aEvent)) { 1.77 + case GST_EVENT_NEWSEGMENT: 1.78 + { 1.79 + gboolean update; 1.80 + gdouble rate; 1.81 + GstFormat format; 1.82 + gint64 start, stop, position; 1.83 + GstSegment* segment; 1.84 + 1.85 + /* Store the segments so we can convert timestamps to stream time, which 1.86 + * is what the upper layers sync on. 1.87 + */ 1.88 + ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); 1.89 + gst_event_parse_new_segment(aEvent, &update, &rate, &format, 1.90 + &start, &stop, &position); 1.91 + if (parent == GST_ELEMENT(mVideoAppSink)) 1.92 + segment = &mVideoSegment; 1.93 + else 1.94 + segment = &mAudioSegment; 1.95 + gst_segment_set_newsegment(segment, update, rate, format, 1.96 + start, stop, position); 1.97 + break; 1.98 + } 1.99 + case GST_EVENT_FLUSH_STOP: 1.100 + /* Reset on seeks */ 1.101 + ResetDecode(); 1.102 + break; 1.103 + default: 1.104 + break; 1.105 + } 1.106 + gst_object_unref(parent); 1.107 + 1.108 + return TRUE; 1.109 +} 1.110 + 1.111 +gboolean GStreamerReader::EventProbeCb(GstPad* aPad, 1.112 + GstEvent* aEvent, 1.113 + gpointer aUserData) 1.114 +{ 1.115 + GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData); 1.116 + return reader->EventProbe(aPad, aEvent); 1.117 +} 1.118 + 1.119 +nsRefPtr<PlanarYCbCrImage> GStreamerReader::GetImageFromBuffer(GstBuffer* aBuffer) 1.120 +{ 1.121 + if (!GST_IS_MOZ_VIDEO_BUFFER (aBuffer)) 1.122 + return nullptr; 1.123 + 1.124 + nsRefPtr<PlanarYCbCrImage> image; 1.125 + GstMozVideoBufferData* bufferdata = reinterpret_cast<GstMozVideoBufferData*>(gst_moz_video_buffer_get_data(GST_MOZ_VIDEO_BUFFER(aBuffer))); 1.126 + image = bufferdata->mImage; 1.127 + 1.128 + PlanarYCbCrImage::Data data; 1.129 + data.mPicX = data.mPicY = 0; 1.130 + data.mPicSize = gfx::IntSize(mPicture.width, mPicture.height); 1.131 + data.mStereoMode = StereoMode::MONO; 1.132 + 1.133 + data.mYChannel = GST_BUFFER_DATA(aBuffer); 1.134 + data.mYStride = gst_video_format_get_row_stride(mFormat, 0, mPicture.width); 1.135 + data.mYSize = gfx::IntSize(data.mYStride, 1.136 + gst_video_format_get_component_height(mFormat, 0, mPicture.height)); 1.137 + data.mYSkip = 0; 1.138 + data.mCbCrStride = gst_video_format_get_row_stride(mFormat, 1, mPicture.width); 1.139 + data.mCbCrSize = gfx::IntSize(data.mCbCrStride, 1.140 + gst_video_format_get_component_height(mFormat, 1, mPicture.height)); 1.141 + data.mCbChannel = data.mYChannel + gst_video_format_get_component_offset(mFormat, 1, 1.142 + mPicture.width, mPicture.height); 1.143 + data.mCrChannel = data.mYChannel + gst_video_format_get_component_offset(mFormat, 2, 1.144 + mPicture.width, mPicture.height); 1.145 + data.mCbSkip = 0; 1.146 + data.mCrSkip = 0; 1.147 + 1.148 + image->SetDataNoCopy(data); 1.149 + 1.150 + return image; 1.151 +} 1.152 + 1.153 +void GStreamerReader::CopyIntoImageBuffer(GstBuffer* aBuffer, 1.154 + GstBuffer** aOutBuffer, 1.155 + nsRefPtr<PlanarYCbCrImage> &aImage) 1.156 +{ 1.157 + AllocateVideoBufferFull(nullptr, GST_BUFFER_OFFSET(aBuffer), 1.158 + GST_BUFFER_SIZE(aBuffer), nullptr, aOutBuffer, aImage); 1.159 + 1.160 + gst_buffer_copy_metadata(*aOutBuffer, aBuffer, (GstBufferCopyFlags)GST_BUFFER_COPY_ALL); 1.161 + memcpy(GST_BUFFER_DATA(*aOutBuffer), GST_BUFFER_DATA(aBuffer), GST_BUFFER_SIZE(*aOutBuffer)); 1.162 + 1.163 + aImage = GetImageFromBuffer(*aOutBuffer); 1.164 +} 1.165 + 1.166 +GstCaps* GStreamerReader::BuildAudioSinkCaps() 1.167 +{ 1.168 + GstCaps* caps; 1.169 +#if MOZ_LITTLE_ENDIAN 1.170 + int endianness = 1234; 1.171 +#else 1.172 + int endianness = 4321; 1.173 +#endif 1.174 + gint width; 1.175 +#ifdef MOZ_SAMPLE_TYPE_FLOAT32 1.176 + caps = gst_caps_from_string("audio/x-raw-float, channels={1,2}"); 1.177 + width = 32; 1.178 +#else /* !MOZ_SAMPLE_TYPE_FLOAT32 */ 1.179 + caps = gst_caps_from_string("audio/x-raw-int, channels={1,2}"); 1.180 + width = 16; 1.181 +#endif 1.182 + gst_caps_set_simple(caps, 1.183 + "width", G_TYPE_INT, width, 1.184 + "endianness", G_TYPE_INT, endianness, 1.185 + NULL); 1.186 + 1.187 + return caps; 1.188 +} 1.189 + 1.190 +void GStreamerReader::InstallPadCallbacks() 1.191 +{ 1.192 + GstPad* sinkpad = gst_element_get_static_pad(GST_ELEMENT(mVideoAppSink), "sink"); 1.193 + gst_pad_add_event_probe(sinkpad, 1.194 + G_CALLBACK(&GStreamerReader::EventProbeCb), this); 1.195 + 1.196 + gst_pad_set_bufferalloc_function(sinkpad, GStreamerReader::AllocateVideoBufferCb); 1.197 + gst_pad_set_element_private(sinkpad, this); 1.198 + gst_object_unref(sinkpad); 1.199 + 1.200 + sinkpad = gst_element_get_static_pad(GST_ELEMENT(mAudioAppSink), "sink"); 1.201 + gst_pad_add_event_probe(sinkpad, 1.202 + G_CALLBACK(&GStreamerReader::EventProbeCb), this); 1.203 + gst_object_unref(sinkpad); 1.204 +}