|
1 #include "nsError.h" |
|
2 #include "MediaDecoderStateMachine.h" |
|
3 #include "AbstractMediaDecoder.h" |
|
4 #include "MediaResource.h" |
|
5 #include "GStreamerReader.h" |
|
6 #include "GStreamerMozVideoBuffer.h" |
|
7 #include "GStreamerFormatHelper.h" |
|
8 #include "VideoUtils.h" |
|
9 #include "mozilla/dom/TimeRanges.h" |
|
10 #include "mozilla/Endian.h" |
|
11 #include "mozilla/Preferences.h" |
|
12 |
|
13 using namespace mozilla; |
|
14 using mozilla::layers::PlanarYCbCrImage; |
|
15 using mozilla::layers::ImageContainer; |
|
16 |
|
17 GstFlowReturn GStreamerReader::AllocateVideoBufferCb(GstPad* aPad, |
|
18 guint64 aOffset, |
|
19 guint aSize, |
|
20 GstCaps* aCaps, |
|
21 GstBuffer** aBuf) |
|
22 { |
|
23 GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(gst_pad_get_element_private(aPad)); |
|
24 return reader->AllocateVideoBuffer(aPad, aOffset, aSize, aCaps, aBuf); |
|
25 } |
|
26 |
|
27 GstFlowReturn GStreamerReader::AllocateVideoBuffer(GstPad* aPad, |
|
28 guint64 aOffset, |
|
29 guint aSize, |
|
30 GstCaps* aCaps, |
|
31 GstBuffer** aBuf) |
|
32 { |
|
33 nsRefPtr<PlanarYCbCrImage> image; |
|
34 return AllocateVideoBufferFull(aPad, aOffset, aSize, aCaps, aBuf, image); |
|
35 } |
|
36 |
|
37 GstFlowReturn GStreamerReader::AllocateVideoBufferFull(GstPad* aPad, |
|
38 guint64 aOffset, |
|
39 guint aSize, |
|
40 GstCaps* aCaps, |
|
41 GstBuffer** aBuf, |
|
42 nsRefPtr<PlanarYCbCrImage>& aImage) |
|
43 { |
|
44 /* allocate an image using the container */ |
|
45 ImageContainer* container = mDecoder->GetImageContainer(); |
|
46 if (container == nullptr) { |
|
47 return GST_FLOW_ERROR; |
|
48 } |
|
49 nsRefPtr<PlanarYCbCrImage> image = |
|
50 container->CreateImage(ImageFormat::PLANAR_YCBCR).downcast<PlanarYCbCrImage>(); |
|
51 |
|
52 /* prepare a GstBuffer pointing to the underlying PlanarYCbCrImage buffer */ |
|
53 GstBuffer* buf = GST_BUFFER(gst_moz_video_buffer_new()); |
|
54 GST_BUFFER_SIZE(buf) = aSize; |
|
55 /* allocate the actual YUV buffer */ |
|
56 GST_BUFFER_DATA(buf) = image->AllocateAndGetNewBuffer(aSize); |
|
57 |
|
58 aImage = image; |
|
59 |
|
60 /* create a GstMozVideoBufferData to hold the image */ |
|
61 GstMozVideoBufferData* bufferdata = new GstMozVideoBufferData(image); |
|
62 |
|
63 /* Attach bufferdata to our GstMozVideoBuffer, it will take care to free it */ |
|
64 gst_moz_video_buffer_set_data(GST_MOZ_VIDEO_BUFFER(buf), bufferdata); |
|
65 |
|
66 *aBuf = buf; |
|
67 return GST_FLOW_OK; |
|
68 } |
|
69 |
|
70 gboolean GStreamerReader::EventProbe(GstPad* aPad, GstEvent* aEvent) |
|
71 { |
|
72 GstElement* parent = GST_ELEMENT(gst_pad_get_parent(aPad)); |
|
73 switch(GST_EVENT_TYPE(aEvent)) { |
|
74 case GST_EVENT_NEWSEGMENT: |
|
75 { |
|
76 gboolean update; |
|
77 gdouble rate; |
|
78 GstFormat format; |
|
79 gint64 start, stop, position; |
|
80 GstSegment* segment; |
|
81 |
|
82 /* Store the segments so we can convert timestamps to stream time, which |
|
83 * is what the upper layers sync on. |
|
84 */ |
|
85 ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); |
|
86 gst_event_parse_new_segment(aEvent, &update, &rate, &format, |
|
87 &start, &stop, &position); |
|
88 if (parent == GST_ELEMENT(mVideoAppSink)) |
|
89 segment = &mVideoSegment; |
|
90 else |
|
91 segment = &mAudioSegment; |
|
92 gst_segment_set_newsegment(segment, update, rate, format, |
|
93 start, stop, position); |
|
94 break; |
|
95 } |
|
96 case GST_EVENT_FLUSH_STOP: |
|
97 /* Reset on seeks */ |
|
98 ResetDecode(); |
|
99 break; |
|
100 default: |
|
101 break; |
|
102 } |
|
103 gst_object_unref(parent); |
|
104 |
|
105 return TRUE; |
|
106 } |
|
107 |
|
108 gboolean GStreamerReader::EventProbeCb(GstPad* aPad, |
|
109 GstEvent* aEvent, |
|
110 gpointer aUserData) |
|
111 { |
|
112 GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData); |
|
113 return reader->EventProbe(aPad, aEvent); |
|
114 } |
|
115 |
|
116 nsRefPtr<PlanarYCbCrImage> GStreamerReader::GetImageFromBuffer(GstBuffer* aBuffer) |
|
117 { |
|
118 if (!GST_IS_MOZ_VIDEO_BUFFER (aBuffer)) |
|
119 return nullptr; |
|
120 |
|
121 nsRefPtr<PlanarYCbCrImage> image; |
|
122 GstMozVideoBufferData* bufferdata = reinterpret_cast<GstMozVideoBufferData*>(gst_moz_video_buffer_get_data(GST_MOZ_VIDEO_BUFFER(aBuffer))); |
|
123 image = bufferdata->mImage; |
|
124 |
|
125 PlanarYCbCrImage::Data data; |
|
126 data.mPicX = data.mPicY = 0; |
|
127 data.mPicSize = gfx::IntSize(mPicture.width, mPicture.height); |
|
128 data.mStereoMode = StereoMode::MONO; |
|
129 |
|
130 data.mYChannel = GST_BUFFER_DATA(aBuffer); |
|
131 data.mYStride = gst_video_format_get_row_stride(mFormat, 0, mPicture.width); |
|
132 data.mYSize = gfx::IntSize(data.mYStride, |
|
133 gst_video_format_get_component_height(mFormat, 0, mPicture.height)); |
|
134 data.mYSkip = 0; |
|
135 data.mCbCrStride = gst_video_format_get_row_stride(mFormat, 1, mPicture.width); |
|
136 data.mCbCrSize = gfx::IntSize(data.mCbCrStride, |
|
137 gst_video_format_get_component_height(mFormat, 1, mPicture.height)); |
|
138 data.mCbChannel = data.mYChannel + gst_video_format_get_component_offset(mFormat, 1, |
|
139 mPicture.width, mPicture.height); |
|
140 data.mCrChannel = data.mYChannel + gst_video_format_get_component_offset(mFormat, 2, |
|
141 mPicture.width, mPicture.height); |
|
142 data.mCbSkip = 0; |
|
143 data.mCrSkip = 0; |
|
144 |
|
145 image->SetDataNoCopy(data); |
|
146 |
|
147 return image; |
|
148 } |
|
149 |
|
150 void GStreamerReader::CopyIntoImageBuffer(GstBuffer* aBuffer, |
|
151 GstBuffer** aOutBuffer, |
|
152 nsRefPtr<PlanarYCbCrImage> &aImage) |
|
153 { |
|
154 AllocateVideoBufferFull(nullptr, GST_BUFFER_OFFSET(aBuffer), |
|
155 GST_BUFFER_SIZE(aBuffer), nullptr, aOutBuffer, aImage); |
|
156 |
|
157 gst_buffer_copy_metadata(*aOutBuffer, aBuffer, (GstBufferCopyFlags)GST_BUFFER_COPY_ALL); |
|
158 memcpy(GST_BUFFER_DATA(*aOutBuffer), GST_BUFFER_DATA(aBuffer), GST_BUFFER_SIZE(*aOutBuffer)); |
|
159 |
|
160 aImage = GetImageFromBuffer(*aOutBuffer); |
|
161 } |
|
162 |
|
163 GstCaps* GStreamerReader::BuildAudioSinkCaps() |
|
164 { |
|
165 GstCaps* caps; |
|
166 #if MOZ_LITTLE_ENDIAN |
|
167 int endianness = 1234; |
|
168 #else |
|
169 int endianness = 4321; |
|
170 #endif |
|
171 gint width; |
|
172 #ifdef MOZ_SAMPLE_TYPE_FLOAT32 |
|
173 caps = gst_caps_from_string("audio/x-raw-float, channels={1,2}"); |
|
174 width = 32; |
|
175 #else /* !MOZ_SAMPLE_TYPE_FLOAT32 */ |
|
176 caps = gst_caps_from_string("audio/x-raw-int, channels={1,2}"); |
|
177 width = 16; |
|
178 #endif |
|
179 gst_caps_set_simple(caps, |
|
180 "width", G_TYPE_INT, width, |
|
181 "endianness", G_TYPE_INT, endianness, |
|
182 NULL); |
|
183 |
|
184 return caps; |
|
185 } |
|
186 |
|
187 void GStreamerReader::InstallPadCallbacks() |
|
188 { |
|
189 GstPad* sinkpad = gst_element_get_static_pad(GST_ELEMENT(mVideoAppSink), "sink"); |
|
190 gst_pad_add_event_probe(sinkpad, |
|
191 G_CALLBACK(&GStreamerReader::EventProbeCb), this); |
|
192 |
|
193 gst_pad_set_bufferalloc_function(sinkpad, GStreamerReader::AllocateVideoBufferCb); |
|
194 gst_pad_set_element_private(sinkpad, this); |
|
195 gst_object_unref(sinkpad); |
|
196 |
|
197 sinkpad = gst_element_get_static_pad(GST_ELEMENT(mAudioAppSink), "sink"); |
|
198 gst_pad_add_event_probe(sinkpad, |
|
199 G_CALLBACK(&GStreamerReader::EventProbeCb), this); |
|
200 gst_object_unref(sinkpad); |
|
201 } |