content/media/gstreamer/GStreamerReader-0.10.cpp

Tue, 06 Jan 2015 21:39:09 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Tue, 06 Jan 2015 21:39:09 +0100
branch
TOR_BUG_9701
changeset 8
97036ab72558
permissions
-rw-r--r--

Conditionally force memory storage according to privacy.thirdparty.isolate;
This solves Tor bug #9701, complying with disk avoidance documented in
https://www.torproject.org/projects/torbrowser/design/#disk-avoidance.

michael@0 1 #include "nsError.h"
michael@0 2 #include "MediaDecoderStateMachine.h"
michael@0 3 #include "AbstractMediaDecoder.h"
michael@0 4 #include "MediaResource.h"
michael@0 5 #include "GStreamerReader.h"
michael@0 6 #include "GStreamerMozVideoBuffer.h"
michael@0 7 #include "GStreamerFormatHelper.h"
michael@0 8 #include "VideoUtils.h"
michael@0 9 #include "mozilla/dom/TimeRanges.h"
michael@0 10 #include "mozilla/Endian.h"
michael@0 11 #include "mozilla/Preferences.h"
michael@0 12
michael@0 13 using namespace mozilla;
michael@0 14 using mozilla::layers::PlanarYCbCrImage;
michael@0 15 using mozilla::layers::ImageContainer;
michael@0 16
michael@0 17 GstFlowReturn GStreamerReader::AllocateVideoBufferCb(GstPad* aPad,
michael@0 18 guint64 aOffset,
michael@0 19 guint aSize,
michael@0 20 GstCaps* aCaps,
michael@0 21 GstBuffer** aBuf)
michael@0 22 {
michael@0 23 GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(gst_pad_get_element_private(aPad));
michael@0 24 return reader->AllocateVideoBuffer(aPad, aOffset, aSize, aCaps, aBuf);
michael@0 25 }
michael@0 26
michael@0 27 GstFlowReturn GStreamerReader::AllocateVideoBuffer(GstPad* aPad,
michael@0 28 guint64 aOffset,
michael@0 29 guint aSize,
michael@0 30 GstCaps* aCaps,
michael@0 31 GstBuffer** aBuf)
michael@0 32 {
michael@0 33 nsRefPtr<PlanarYCbCrImage> image;
michael@0 34 return AllocateVideoBufferFull(aPad, aOffset, aSize, aCaps, aBuf, image);
michael@0 35 }
michael@0 36
michael@0 37 GstFlowReturn GStreamerReader::AllocateVideoBufferFull(GstPad* aPad,
michael@0 38 guint64 aOffset,
michael@0 39 guint aSize,
michael@0 40 GstCaps* aCaps,
michael@0 41 GstBuffer** aBuf,
michael@0 42 nsRefPtr<PlanarYCbCrImage>& aImage)
michael@0 43 {
michael@0 44 /* allocate an image using the container */
michael@0 45 ImageContainer* container = mDecoder->GetImageContainer();
michael@0 46 if (container == nullptr) {
michael@0 47 return GST_FLOW_ERROR;
michael@0 48 }
michael@0 49 nsRefPtr<PlanarYCbCrImage> image =
michael@0 50 container->CreateImage(ImageFormat::PLANAR_YCBCR).downcast<PlanarYCbCrImage>();
michael@0 51
michael@0 52 /* prepare a GstBuffer pointing to the underlying PlanarYCbCrImage buffer */
michael@0 53 GstBuffer* buf = GST_BUFFER(gst_moz_video_buffer_new());
michael@0 54 GST_BUFFER_SIZE(buf) = aSize;
michael@0 55 /* allocate the actual YUV buffer */
michael@0 56 GST_BUFFER_DATA(buf) = image->AllocateAndGetNewBuffer(aSize);
michael@0 57
michael@0 58 aImage = image;
michael@0 59
michael@0 60 /* create a GstMozVideoBufferData to hold the image */
michael@0 61 GstMozVideoBufferData* bufferdata = new GstMozVideoBufferData(image);
michael@0 62
michael@0 63 /* Attach bufferdata to our GstMozVideoBuffer, it will take care to free it */
michael@0 64 gst_moz_video_buffer_set_data(GST_MOZ_VIDEO_BUFFER(buf), bufferdata);
michael@0 65
michael@0 66 *aBuf = buf;
michael@0 67 return GST_FLOW_OK;
michael@0 68 }
michael@0 69
michael@0 70 gboolean GStreamerReader::EventProbe(GstPad* aPad, GstEvent* aEvent)
michael@0 71 {
michael@0 72 GstElement* parent = GST_ELEMENT(gst_pad_get_parent(aPad));
michael@0 73 switch(GST_EVENT_TYPE(aEvent)) {
michael@0 74 case GST_EVENT_NEWSEGMENT:
michael@0 75 {
michael@0 76 gboolean update;
michael@0 77 gdouble rate;
michael@0 78 GstFormat format;
michael@0 79 gint64 start, stop, position;
michael@0 80 GstSegment* segment;
michael@0 81
michael@0 82 /* Store the segments so we can convert timestamps to stream time, which
michael@0 83 * is what the upper layers sync on.
michael@0 84 */
michael@0 85 ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
michael@0 86 gst_event_parse_new_segment(aEvent, &update, &rate, &format,
michael@0 87 &start, &stop, &position);
michael@0 88 if (parent == GST_ELEMENT(mVideoAppSink))
michael@0 89 segment = &mVideoSegment;
michael@0 90 else
michael@0 91 segment = &mAudioSegment;
michael@0 92 gst_segment_set_newsegment(segment, update, rate, format,
michael@0 93 start, stop, position);
michael@0 94 break;
michael@0 95 }
michael@0 96 case GST_EVENT_FLUSH_STOP:
michael@0 97 /* Reset on seeks */
michael@0 98 ResetDecode();
michael@0 99 break;
michael@0 100 default:
michael@0 101 break;
michael@0 102 }
michael@0 103 gst_object_unref(parent);
michael@0 104
michael@0 105 return TRUE;
michael@0 106 }
michael@0 107
michael@0 108 gboolean GStreamerReader::EventProbeCb(GstPad* aPad,
michael@0 109 GstEvent* aEvent,
michael@0 110 gpointer aUserData)
michael@0 111 {
michael@0 112 GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);
michael@0 113 return reader->EventProbe(aPad, aEvent);
michael@0 114 }
michael@0 115
michael@0 116 nsRefPtr<PlanarYCbCrImage> GStreamerReader::GetImageFromBuffer(GstBuffer* aBuffer)
michael@0 117 {
michael@0 118 if (!GST_IS_MOZ_VIDEO_BUFFER (aBuffer))
michael@0 119 return nullptr;
michael@0 120
michael@0 121 nsRefPtr<PlanarYCbCrImage> image;
michael@0 122 GstMozVideoBufferData* bufferdata = reinterpret_cast<GstMozVideoBufferData*>(gst_moz_video_buffer_get_data(GST_MOZ_VIDEO_BUFFER(aBuffer)));
michael@0 123 image = bufferdata->mImage;
michael@0 124
michael@0 125 PlanarYCbCrImage::Data data;
michael@0 126 data.mPicX = data.mPicY = 0;
michael@0 127 data.mPicSize = gfx::IntSize(mPicture.width, mPicture.height);
michael@0 128 data.mStereoMode = StereoMode::MONO;
michael@0 129
michael@0 130 data.mYChannel = GST_BUFFER_DATA(aBuffer);
michael@0 131 data.mYStride = gst_video_format_get_row_stride(mFormat, 0, mPicture.width);
michael@0 132 data.mYSize = gfx::IntSize(data.mYStride,
michael@0 133 gst_video_format_get_component_height(mFormat, 0, mPicture.height));
michael@0 134 data.mYSkip = 0;
michael@0 135 data.mCbCrStride = gst_video_format_get_row_stride(mFormat, 1, mPicture.width);
michael@0 136 data.mCbCrSize = gfx::IntSize(data.mCbCrStride,
michael@0 137 gst_video_format_get_component_height(mFormat, 1, mPicture.height));
michael@0 138 data.mCbChannel = data.mYChannel + gst_video_format_get_component_offset(mFormat, 1,
michael@0 139 mPicture.width, mPicture.height);
michael@0 140 data.mCrChannel = data.mYChannel + gst_video_format_get_component_offset(mFormat, 2,
michael@0 141 mPicture.width, mPicture.height);
michael@0 142 data.mCbSkip = 0;
michael@0 143 data.mCrSkip = 0;
michael@0 144
michael@0 145 image->SetDataNoCopy(data);
michael@0 146
michael@0 147 return image;
michael@0 148 }
michael@0 149
michael@0 150 void GStreamerReader::CopyIntoImageBuffer(GstBuffer* aBuffer,
michael@0 151 GstBuffer** aOutBuffer,
michael@0 152 nsRefPtr<PlanarYCbCrImage> &aImage)
michael@0 153 {
michael@0 154 AllocateVideoBufferFull(nullptr, GST_BUFFER_OFFSET(aBuffer),
michael@0 155 GST_BUFFER_SIZE(aBuffer), nullptr, aOutBuffer, aImage);
michael@0 156
michael@0 157 gst_buffer_copy_metadata(*aOutBuffer, aBuffer, (GstBufferCopyFlags)GST_BUFFER_COPY_ALL);
michael@0 158 memcpy(GST_BUFFER_DATA(*aOutBuffer), GST_BUFFER_DATA(aBuffer), GST_BUFFER_SIZE(*aOutBuffer));
michael@0 159
michael@0 160 aImage = GetImageFromBuffer(*aOutBuffer);
michael@0 161 }
michael@0 162
michael@0 163 GstCaps* GStreamerReader::BuildAudioSinkCaps()
michael@0 164 {
michael@0 165 GstCaps* caps;
michael@0 166 #if MOZ_LITTLE_ENDIAN
michael@0 167 int endianness = 1234;
michael@0 168 #else
michael@0 169 int endianness = 4321;
michael@0 170 #endif
michael@0 171 gint width;
michael@0 172 #ifdef MOZ_SAMPLE_TYPE_FLOAT32
michael@0 173 caps = gst_caps_from_string("audio/x-raw-float, channels={1,2}");
michael@0 174 width = 32;
michael@0 175 #else /* !MOZ_SAMPLE_TYPE_FLOAT32 */
michael@0 176 caps = gst_caps_from_string("audio/x-raw-int, channels={1,2}");
michael@0 177 width = 16;
michael@0 178 #endif
michael@0 179 gst_caps_set_simple(caps,
michael@0 180 "width", G_TYPE_INT, width,
michael@0 181 "endianness", G_TYPE_INT, endianness,
michael@0 182 NULL);
michael@0 183
michael@0 184 return caps;
michael@0 185 }
michael@0 186
michael@0 187 void GStreamerReader::InstallPadCallbacks()
michael@0 188 {
michael@0 189 GstPad* sinkpad = gst_element_get_static_pad(GST_ELEMENT(mVideoAppSink), "sink");
michael@0 190 gst_pad_add_event_probe(sinkpad,
michael@0 191 G_CALLBACK(&GStreamerReader::EventProbeCb), this);
michael@0 192
michael@0 193 gst_pad_set_bufferalloc_function(sinkpad, GStreamerReader::AllocateVideoBufferCb);
michael@0 194 gst_pad_set_element_private(sinkpad, this);
michael@0 195 gst_object_unref(sinkpad);
michael@0 196
michael@0 197 sinkpad = gst_element_get_static_pad(GST_ELEMENT(mAudioAppSink), "sink");
michael@0 198 gst_pad_add_event_probe(sinkpad,
michael@0 199 G_CALLBACK(&GStreamerReader::EventProbeCb), this);
michael@0 200 gst_object_unref(sinkpad);
michael@0 201 }

mercurial