michael@0: #include "nsError.h" michael@0: #include "MediaDecoderStateMachine.h" michael@0: #include "AbstractMediaDecoder.h" michael@0: #include "MediaResource.h" michael@0: #include "GStreamerReader.h" michael@0: #include "GStreamerMozVideoBuffer.h" michael@0: #include "GStreamerFormatHelper.h" michael@0: #include "VideoUtils.h" michael@0: #include "mozilla/dom/TimeRanges.h" michael@0: #include "mozilla/Endian.h" michael@0: #include "mozilla/Preferences.h" michael@0: michael@0: using namespace mozilla; michael@0: using mozilla::layers::PlanarYCbCrImage; michael@0: using mozilla::layers::ImageContainer; michael@0: michael@0: GstFlowReturn GStreamerReader::AllocateVideoBufferCb(GstPad* aPad, michael@0: guint64 aOffset, michael@0: guint aSize, michael@0: GstCaps* aCaps, michael@0: GstBuffer** aBuf) michael@0: { michael@0: GStreamerReader* reader = reinterpret_cast(gst_pad_get_element_private(aPad)); michael@0: return reader->AllocateVideoBuffer(aPad, aOffset, aSize, aCaps, aBuf); michael@0: } michael@0: michael@0: GstFlowReturn GStreamerReader::AllocateVideoBuffer(GstPad* aPad, michael@0: guint64 aOffset, michael@0: guint aSize, michael@0: GstCaps* aCaps, michael@0: GstBuffer** aBuf) michael@0: { michael@0: nsRefPtr image; michael@0: return AllocateVideoBufferFull(aPad, aOffset, aSize, aCaps, aBuf, image); michael@0: } michael@0: michael@0: GstFlowReturn GStreamerReader::AllocateVideoBufferFull(GstPad* aPad, michael@0: guint64 aOffset, michael@0: guint aSize, michael@0: GstCaps* aCaps, michael@0: GstBuffer** aBuf, michael@0: nsRefPtr& aImage) michael@0: { michael@0: /* allocate an image using the container */ michael@0: ImageContainer* container = mDecoder->GetImageContainer(); michael@0: if (container == nullptr) { michael@0: return GST_FLOW_ERROR; michael@0: } michael@0: nsRefPtr image = michael@0: container->CreateImage(ImageFormat::PLANAR_YCBCR).downcast(); michael@0: michael@0: /* prepare a GstBuffer pointing to the underlying PlanarYCbCrImage buffer */ michael@0: GstBuffer* buf = GST_BUFFER(gst_moz_video_buffer_new()); michael@0: GST_BUFFER_SIZE(buf) = aSize; michael@0: /* allocate the actual YUV buffer */ michael@0: GST_BUFFER_DATA(buf) = image->AllocateAndGetNewBuffer(aSize); michael@0: michael@0: aImage = image; michael@0: michael@0: /* create a GstMozVideoBufferData to hold the image */ michael@0: GstMozVideoBufferData* bufferdata = new GstMozVideoBufferData(image); michael@0: michael@0: /* Attach bufferdata to our GstMozVideoBuffer, it will take care to free it */ michael@0: gst_moz_video_buffer_set_data(GST_MOZ_VIDEO_BUFFER(buf), bufferdata); michael@0: michael@0: *aBuf = buf; michael@0: return GST_FLOW_OK; michael@0: } michael@0: michael@0: gboolean GStreamerReader::EventProbe(GstPad* aPad, GstEvent* aEvent) michael@0: { michael@0: GstElement* parent = GST_ELEMENT(gst_pad_get_parent(aPad)); michael@0: switch(GST_EVENT_TYPE(aEvent)) { michael@0: case GST_EVENT_NEWSEGMENT: michael@0: { michael@0: gboolean update; michael@0: gdouble rate; michael@0: GstFormat format; michael@0: gint64 start, stop, position; michael@0: GstSegment* segment; michael@0: michael@0: /* Store the segments so we can convert timestamps to stream time, which michael@0: * is what the upper layers sync on. michael@0: */ michael@0: ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); michael@0: gst_event_parse_new_segment(aEvent, &update, &rate, &format, michael@0: &start, &stop, &position); michael@0: if (parent == GST_ELEMENT(mVideoAppSink)) michael@0: segment = &mVideoSegment; michael@0: else michael@0: segment = &mAudioSegment; michael@0: gst_segment_set_newsegment(segment, update, rate, format, michael@0: start, stop, position); michael@0: break; michael@0: } michael@0: case GST_EVENT_FLUSH_STOP: michael@0: /* Reset on seeks */ michael@0: ResetDecode(); michael@0: break; michael@0: default: michael@0: break; michael@0: } michael@0: gst_object_unref(parent); michael@0: michael@0: return TRUE; michael@0: } michael@0: michael@0: gboolean GStreamerReader::EventProbeCb(GstPad* aPad, michael@0: GstEvent* aEvent, michael@0: gpointer aUserData) michael@0: { michael@0: GStreamerReader* reader = reinterpret_cast(aUserData); michael@0: return reader->EventProbe(aPad, aEvent); michael@0: } michael@0: michael@0: nsRefPtr GStreamerReader::GetImageFromBuffer(GstBuffer* aBuffer) michael@0: { michael@0: if (!GST_IS_MOZ_VIDEO_BUFFER (aBuffer)) michael@0: return nullptr; michael@0: michael@0: nsRefPtr image; michael@0: GstMozVideoBufferData* bufferdata = reinterpret_cast(gst_moz_video_buffer_get_data(GST_MOZ_VIDEO_BUFFER(aBuffer))); michael@0: image = bufferdata->mImage; michael@0: michael@0: PlanarYCbCrImage::Data data; michael@0: data.mPicX = data.mPicY = 0; michael@0: data.mPicSize = gfx::IntSize(mPicture.width, mPicture.height); michael@0: data.mStereoMode = StereoMode::MONO; michael@0: michael@0: data.mYChannel = GST_BUFFER_DATA(aBuffer); michael@0: data.mYStride = gst_video_format_get_row_stride(mFormat, 0, mPicture.width); michael@0: data.mYSize = gfx::IntSize(data.mYStride, michael@0: gst_video_format_get_component_height(mFormat, 0, mPicture.height)); michael@0: data.mYSkip = 0; michael@0: data.mCbCrStride = gst_video_format_get_row_stride(mFormat, 1, mPicture.width); michael@0: data.mCbCrSize = gfx::IntSize(data.mCbCrStride, michael@0: gst_video_format_get_component_height(mFormat, 1, mPicture.height)); michael@0: data.mCbChannel = data.mYChannel + gst_video_format_get_component_offset(mFormat, 1, michael@0: mPicture.width, mPicture.height); michael@0: data.mCrChannel = data.mYChannel + gst_video_format_get_component_offset(mFormat, 2, michael@0: mPicture.width, mPicture.height); michael@0: data.mCbSkip = 0; michael@0: data.mCrSkip = 0; michael@0: michael@0: image->SetDataNoCopy(data); michael@0: michael@0: return image; michael@0: } michael@0: michael@0: void GStreamerReader::CopyIntoImageBuffer(GstBuffer* aBuffer, michael@0: GstBuffer** aOutBuffer, michael@0: nsRefPtr &aImage) michael@0: { michael@0: AllocateVideoBufferFull(nullptr, GST_BUFFER_OFFSET(aBuffer), michael@0: GST_BUFFER_SIZE(aBuffer), nullptr, aOutBuffer, aImage); michael@0: michael@0: gst_buffer_copy_metadata(*aOutBuffer, aBuffer, (GstBufferCopyFlags)GST_BUFFER_COPY_ALL); michael@0: memcpy(GST_BUFFER_DATA(*aOutBuffer), GST_BUFFER_DATA(aBuffer), GST_BUFFER_SIZE(*aOutBuffer)); michael@0: michael@0: aImage = GetImageFromBuffer(*aOutBuffer); michael@0: } michael@0: michael@0: GstCaps* GStreamerReader::BuildAudioSinkCaps() michael@0: { michael@0: GstCaps* caps; michael@0: #if MOZ_LITTLE_ENDIAN michael@0: int endianness = 1234; michael@0: #else michael@0: int endianness = 4321; michael@0: #endif michael@0: gint width; michael@0: #ifdef MOZ_SAMPLE_TYPE_FLOAT32 michael@0: caps = gst_caps_from_string("audio/x-raw-float, channels={1,2}"); michael@0: width = 32; michael@0: #else /* !MOZ_SAMPLE_TYPE_FLOAT32 */ michael@0: caps = gst_caps_from_string("audio/x-raw-int, channels={1,2}"); michael@0: width = 16; michael@0: #endif michael@0: gst_caps_set_simple(caps, michael@0: "width", G_TYPE_INT, width, michael@0: "endianness", G_TYPE_INT, endianness, michael@0: NULL); michael@0: michael@0: return caps; michael@0: } michael@0: michael@0: void GStreamerReader::InstallPadCallbacks() michael@0: { michael@0: GstPad* sinkpad = gst_element_get_static_pad(GST_ELEMENT(mVideoAppSink), "sink"); michael@0: gst_pad_add_event_probe(sinkpad, michael@0: G_CALLBACK(&GStreamerReader::EventProbeCb), this); michael@0: michael@0: gst_pad_set_bufferalloc_function(sinkpad, GStreamerReader::AllocateVideoBufferCb); michael@0: gst_pad_set_element_private(sinkpad, this); michael@0: gst_object_unref(sinkpad); michael@0: michael@0: sinkpad = gst_element_get_static_pad(GST_ELEMENT(mAudioAppSink), "sink"); michael@0: gst_pad_add_event_probe(sinkpad, michael@0: G_CALLBACK(&GStreamerReader::EventProbeCb), this); michael@0: gst_object_unref(sinkpad); michael@0: }