Fri, 16 Jan 2015 04:50:19 +0100
Replace accessor implementation with direct member state manipulation, by
request https://trac.torproject.org/projects/tor/ticket/9701#comment:32
1 /* This Source Code Form is subject to the terms of the Mozilla Public
2 * License, v. 2.0. If a copy of the MPL was not distributed with this file,
3 * You can obtain one at http://mozilla.org/MPL/2.0/. */
5 #if !defined(GStreamerReader_h_)
6 #define GStreamerReader_h_
8 #include <map>
10 #include <gst/gst.h>
11 #include <gst/app/gstappsrc.h>
12 #include <gst/app/gstappsink.h>
13 // This include trips -Wreserved-user-defined-literal on clang. Ignoring it
14 // trips -Wpragmas on GCC (unknown warning), but ignoring that trips
15 // -Wunknown-pragmas on clang (unknown pragma).
16 #pragma GCC diagnostic push
17 #pragma GCC diagnostic ignored "-Wunknown-pragmas"
18 #pragma GCC diagnostic ignored "-Wpragmas"
19 #pragma GCC diagnostic ignored "-Wreserved-user-defined-literal"
20 #include <gst/video/video.h>
21 #pragma GCC diagnostic pop
23 #include "MediaDecoderReader.h"
24 #include "MP3FrameParser.h"
25 #include "ImageContainer.h"
26 #include "nsRect.h"
28 namespace mozilla {
30 namespace dom {
31 class TimeRanges;
32 }
34 class AbstractMediaDecoder;
36 class GStreamerReader : public MediaDecoderReader
37 {
38 typedef gfx::IntRect IntRect;
40 public:
41 GStreamerReader(AbstractMediaDecoder* aDecoder);
42 virtual ~GStreamerReader();
44 virtual nsresult Init(MediaDecoderReader* aCloneDonor);
45 virtual nsresult ResetDecode();
46 virtual bool DecodeAudioData();
47 virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
48 int64_t aTimeThreshold);
49 virtual nsresult ReadMetadata(MediaInfo* aInfo,
50 MetadataTags** aTags);
51 virtual nsresult Seek(int64_t aTime,
52 int64_t aStartTime,
53 int64_t aEndTime,
54 int64_t aCurrentTime);
55 virtual nsresult GetBuffered(dom::TimeRanges* aBuffered, int64_t aStartTime);
57 virtual void NotifyDataArrived(const char *aBuffer,
58 uint32_t aLength,
59 int64_t aOffset) MOZ_OVERRIDE;
61 virtual bool HasAudio() {
62 return mInfo.HasAudio();
63 }
65 virtual bool HasVideo() {
66 return mInfo.HasVideo();
67 }
69 layers::ImageContainer* GetImageContainer() { return mDecoder->GetImageContainer(); }
71 private:
73 void ReadAndPushData(guint aLength);
74 nsRefPtr<layers::PlanarYCbCrImage> GetImageFromBuffer(GstBuffer* aBuffer);
75 void CopyIntoImageBuffer(GstBuffer *aBuffer, GstBuffer** aOutBuffer, nsRefPtr<layers::PlanarYCbCrImage> &image);
76 GstCaps* BuildAudioSinkCaps();
77 void InstallPadCallbacks();
79 #if GST_VERSION_MAJOR >= 1
80 void ImageDataFromVideoFrame(GstVideoFrame *aFrame, layers::PlanarYCbCrImage::Data *aData);
81 #endif
83 /* Called once the pipeline is setup to check that the stream only contains
84 * supported formats
85 */
86 nsresult CheckSupportedFormats();
88 /* Gst callbacks */
90 static GstBusSyncReply ErrorCb(GstBus *aBus, GstMessage *aMessage, gpointer aUserData);
91 GstBusSyncReply Error(GstBus *aBus, GstMessage *aMessage);
93 /* Called on the source-setup signal emitted by playbin. Used to
94 * configure appsrc .
95 */
96 static void PlayBinSourceSetupCb(GstElement* aPlayBin,
97 GParamSpec* pspec,
98 gpointer aUserData);
99 void PlayBinSourceSetup(GstAppSrc* aSource);
101 /* Called from appsrc when we need to read more data from the resource */
102 static void NeedDataCb(GstAppSrc* aSrc, guint aLength, gpointer aUserData);
103 void NeedData(GstAppSrc* aSrc, guint aLength);
105 /* Called when appsrc has enough data and we can stop reading */
106 static void EnoughDataCb(GstAppSrc* aSrc, gpointer aUserData);
107 void EnoughData(GstAppSrc* aSrc);
109 /* Called when a seek is issued on the pipeline */
110 static gboolean SeekDataCb(GstAppSrc* aSrc,
111 guint64 aOffset,
112 gpointer aUserData);
113 gboolean SeekData(GstAppSrc* aSrc, guint64 aOffset);
115 /* Called when events reach the sinks. See inline comments */
116 #if GST_VERSION_MAJOR == 1
117 static GstPadProbeReturn EventProbeCb(GstPad *aPad, GstPadProbeInfo *aInfo, gpointer aUserData);
118 GstPadProbeReturn EventProbe(GstPad *aPad, GstEvent *aEvent);
119 #else
120 static gboolean EventProbeCb(GstPad* aPad, GstEvent* aEvent, gpointer aUserData);
121 gboolean EventProbe(GstPad* aPad, GstEvent* aEvent);
122 #endif
124 /* Called when the video part of the pipeline allocates buffers. Used to
125 * provide PlanarYCbCrImage backed GstBuffers to the pipeline so that a memory
126 * copy can be avoided when handling YUV buffers from the pipeline to the gfx
127 * side.
128 */
129 #if GST_VERSION_MAJOR == 1
130 static GstPadProbeReturn QueryProbeCb(GstPad *aPad, GstPadProbeInfo *aInfo, gpointer aUserData);
131 GstPadProbeReturn QueryProbe(GstPad *aPad, GstPadProbeInfo *aInfo, gpointer aUserData);
132 #else
133 static GstFlowReturn AllocateVideoBufferCb(GstPad* aPad, guint64 aOffset, guint aSize,
134 GstCaps* aCaps, GstBuffer** aBuf);
135 GstFlowReturn AllocateVideoBufferFull(GstPad* aPad, guint64 aOffset, guint aSize,
136 GstCaps* aCaps, GstBuffer** aBuf, nsRefPtr<layers::PlanarYCbCrImage>& aImage);
137 GstFlowReturn AllocateVideoBuffer(GstPad* aPad, guint64 aOffset, guint aSize,
138 GstCaps* aCaps, GstBuffer** aBuf);
139 #endif
142 /* Called when the pipeline is prerolled, that is when at start or after a
143 * seek, the first audio and video buffers are queued in the sinks.
144 */
145 static GstFlowReturn NewPrerollCb(GstAppSink* aSink, gpointer aUserData);
146 void VideoPreroll();
147 void AudioPreroll();
149 /* Called when buffers reach the sinks */
150 static GstFlowReturn NewBufferCb(GstAppSink* aSink, gpointer aUserData);
151 void NewVideoBuffer();
152 void NewAudioBuffer();
154 /* Called at end of stream, when decoding has finished */
155 static void EosCb(GstAppSink* aSink, gpointer aUserData);
156 /* Notifies that a sink will no longer receive any more data. If nullptr
157 * is passed to this, we'll assume all streams have reached EOS (for example
158 * an error has occurred). */
159 void Eos(GstAppSink* aSink = nullptr);
161 /* Called when an element is added inside playbin. We use it to find the
162 * decodebin instance.
163 */
164 static void PlayElementAddedCb(GstBin *aBin, GstElement *aElement,
165 gpointer *aUserData);
167 /* Called during decoding, to decide whether a (sub)stream should be decoded or
168 * ignored */
169 static bool ShouldAutoplugFactory(GstElementFactory* aFactory, GstCaps* aCaps);
171 /* Called by decodebin during autoplugging. We use it to apply our
172 * container/codec whitelist.
173 */
174 static GValueArray* AutoplugSortCb(GstElement* aElement,
175 GstPad* aPad, GstCaps* aCaps,
176 GValueArray* aFactories);
178 // Try to find MP3 headers in this stream using our MP3 frame parser.
179 nsresult ParseMP3Headers();
181 // Get the length of the stream, excluding any metadata we have ignored at the
182 // start of the stream: ID3 headers, for example.
183 int64_t GetDataLength();
185 // Use our own MP3 parser here, largely for consistency with other platforms.
186 MP3FrameParser mMP3FrameParser;
188 // The byte position in the stream where the actual media (ignoring, for
189 // example, ID3 tags) starts.
190 uint64_t mDataOffset;
192 // We want to be able to decide in |ReadMetadata| whether or not we use the
193 // duration from the MP3 frame parser, as this backend supports more than just
194 // MP3. But |NotifyDataArrived| can update the duration and is often called
195 // _before_ |ReadMetadata|. This flag stops the former from using the parser
196 // duration until we are sure we want to.
197 bool mUseParserDuration;
198 int64_t mLastParserDuration;
200 #if GST_VERSION_MAJOR >= 1
201 GstAllocator *mAllocator;
202 GstBufferPool *mBufferPool;
203 GstVideoInfo mVideoInfo;
204 #endif
205 GstElement* mPlayBin;
206 GstBus* mBus;
207 GstAppSrc* mSource;
208 /* video sink bin */
209 GstElement* mVideoSink;
210 /* the actual video app sink */
211 GstAppSink* mVideoAppSink;
212 /* audio sink bin */
213 GstElement* mAudioSink;
214 /* the actual audio app sink */
215 GstAppSink* mAudioAppSink;
216 GstVideoFormat mFormat;
217 IntRect mPicture;
218 int mVideoSinkBufferCount;
219 int mAudioSinkBufferCount;
220 GstAppSrcCallbacks mSrcCallbacks;
221 GstAppSinkCallbacks mSinkCallbacks;
222 /* monitor used to synchronize access to shared state between gstreamer
223 * threads and other gecko threads */
224 ReentrantMonitor mGstThreadsMonitor;
225 /* video and audio segments we use to convert absolute timestamps to [0,
226 * stream_duration]. They're set when the pipeline is started or after a seek.
227 * Concurrent access guarded with mGstThreadsMonitor.
228 */
229 GstSegment mVideoSegment;
230 GstSegment mAudioSegment;
231 /* bool used to signal when gst has detected the end of stream and
232 * DecodeAudioData and DecodeVideoFrame should not expect any more data
233 */
234 bool mReachedAudioEos;
235 bool mReachedVideoEos;
236 #if GST_VERSION_MAJOR >= 1
237 bool mConfigureAlignment;
238 #endif
239 int fpsNum;
240 int fpsDen;
241 };
243 } // namespace mozilla
245 #endif