content/media/gstreamer/GStreamerReader.h

Sat, 03 Jan 2015 20:18:00 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Sat, 03 Jan 2015 20:18:00 +0100
branch
TOR_BUG_3246
changeset 7
129ffea94266
permissions
-rw-r--r--

Conditionally enable double key logic according to:
private browsing mode or privacy.thirdparty.isolate preference and
implement in GetCookieStringCommon and FindCookie where it counts...
With some reservations of how to convince FindCookie users to test
condition and pass a nullptr when disabling double key logic.

michael@0 1 /* This Source Code Form is subject to the terms of the Mozilla Public
michael@0 2 * License, v. 2.0. If a copy of the MPL was not distributed with this file,
michael@0 3 * You can obtain one at http://mozilla.org/MPL/2.0/. */
michael@0 4
michael@0 5 #if !defined(GStreamerReader_h_)
michael@0 6 #define GStreamerReader_h_
michael@0 7
michael@0 8 #include <map>
michael@0 9
michael@0 10 #include <gst/gst.h>
michael@0 11 #include <gst/app/gstappsrc.h>
michael@0 12 #include <gst/app/gstappsink.h>
michael@0 13 // This include trips -Wreserved-user-defined-literal on clang. Ignoring it
michael@0 14 // trips -Wpragmas on GCC (unknown warning), but ignoring that trips
michael@0 15 // -Wunknown-pragmas on clang (unknown pragma).
michael@0 16 #pragma GCC diagnostic push
michael@0 17 #pragma GCC diagnostic ignored "-Wunknown-pragmas"
michael@0 18 #pragma GCC diagnostic ignored "-Wpragmas"
michael@0 19 #pragma GCC diagnostic ignored "-Wreserved-user-defined-literal"
michael@0 20 #include <gst/video/video.h>
michael@0 21 #pragma GCC diagnostic pop
michael@0 22
michael@0 23 #include "MediaDecoderReader.h"
michael@0 24 #include "MP3FrameParser.h"
michael@0 25 #include "ImageContainer.h"
michael@0 26 #include "nsRect.h"
michael@0 27
michael@0 28 namespace mozilla {
michael@0 29
michael@0 30 namespace dom {
michael@0 31 class TimeRanges;
michael@0 32 }
michael@0 33
michael@0 34 class AbstractMediaDecoder;
michael@0 35
michael@0 36 class GStreamerReader : public MediaDecoderReader
michael@0 37 {
michael@0 38 typedef gfx::IntRect IntRect;
michael@0 39
michael@0 40 public:
michael@0 41 GStreamerReader(AbstractMediaDecoder* aDecoder);
michael@0 42 virtual ~GStreamerReader();
michael@0 43
michael@0 44 virtual nsresult Init(MediaDecoderReader* aCloneDonor);
michael@0 45 virtual nsresult ResetDecode();
michael@0 46 virtual bool DecodeAudioData();
michael@0 47 virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
michael@0 48 int64_t aTimeThreshold);
michael@0 49 virtual nsresult ReadMetadata(MediaInfo* aInfo,
michael@0 50 MetadataTags** aTags);
michael@0 51 virtual nsresult Seek(int64_t aTime,
michael@0 52 int64_t aStartTime,
michael@0 53 int64_t aEndTime,
michael@0 54 int64_t aCurrentTime);
michael@0 55 virtual nsresult GetBuffered(dom::TimeRanges* aBuffered, int64_t aStartTime);
michael@0 56
michael@0 57 virtual void NotifyDataArrived(const char *aBuffer,
michael@0 58 uint32_t aLength,
michael@0 59 int64_t aOffset) MOZ_OVERRIDE;
michael@0 60
michael@0 61 virtual bool HasAudio() {
michael@0 62 return mInfo.HasAudio();
michael@0 63 }
michael@0 64
michael@0 65 virtual bool HasVideo() {
michael@0 66 return mInfo.HasVideo();
michael@0 67 }
michael@0 68
michael@0 69 layers::ImageContainer* GetImageContainer() { return mDecoder->GetImageContainer(); }
michael@0 70
michael@0 71 private:
michael@0 72
michael@0 73 void ReadAndPushData(guint aLength);
michael@0 74 nsRefPtr<layers::PlanarYCbCrImage> GetImageFromBuffer(GstBuffer* aBuffer);
michael@0 75 void CopyIntoImageBuffer(GstBuffer *aBuffer, GstBuffer** aOutBuffer, nsRefPtr<layers::PlanarYCbCrImage> &image);
michael@0 76 GstCaps* BuildAudioSinkCaps();
michael@0 77 void InstallPadCallbacks();
michael@0 78
michael@0 79 #if GST_VERSION_MAJOR >= 1
michael@0 80 void ImageDataFromVideoFrame(GstVideoFrame *aFrame, layers::PlanarYCbCrImage::Data *aData);
michael@0 81 #endif
michael@0 82
michael@0 83 /* Called once the pipeline is setup to check that the stream only contains
michael@0 84 * supported formats
michael@0 85 */
michael@0 86 nsresult CheckSupportedFormats();
michael@0 87
michael@0 88 /* Gst callbacks */
michael@0 89
michael@0 90 static GstBusSyncReply ErrorCb(GstBus *aBus, GstMessage *aMessage, gpointer aUserData);
michael@0 91 GstBusSyncReply Error(GstBus *aBus, GstMessage *aMessage);
michael@0 92
michael@0 93 /* Called on the source-setup signal emitted by playbin. Used to
michael@0 94 * configure appsrc .
michael@0 95 */
michael@0 96 static void PlayBinSourceSetupCb(GstElement* aPlayBin,
michael@0 97 GParamSpec* pspec,
michael@0 98 gpointer aUserData);
michael@0 99 void PlayBinSourceSetup(GstAppSrc* aSource);
michael@0 100
michael@0 101 /* Called from appsrc when we need to read more data from the resource */
michael@0 102 static void NeedDataCb(GstAppSrc* aSrc, guint aLength, gpointer aUserData);
michael@0 103 void NeedData(GstAppSrc* aSrc, guint aLength);
michael@0 104
michael@0 105 /* Called when appsrc has enough data and we can stop reading */
michael@0 106 static void EnoughDataCb(GstAppSrc* aSrc, gpointer aUserData);
michael@0 107 void EnoughData(GstAppSrc* aSrc);
michael@0 108
michael@0 109 /* Called when a seek is issued on the pipeline */
michael@0 110 static gboolean SeekDataCb(GstAppSrc* aSrc,
michael@0 111 guint64 aOffset,
michael@0 112 gpointer aUserData);
michael@0 113 gboolean SeekData(GstAppSrc* aSrc, guint64 aOffset);
michael@0 114
michael@0 115 /* Called when events reach the sinks. See inline comments */
michael@0 116 #if GST_VERSION_MAJOR == 1
michael@0 117 static GstPadProbeReturn EventProbeCb(GstPad *aPad, GstPadProbeInfo *aInfo, gpointer aUserData);
michael@0 118 GstPadProbeReturn EventProbe(GstPad *aPad, GstEvent *aEvent);
michael@0 119 #else
michael@0 120 static gboolean EventProbeCb(GstPad* aPad, GstEvent* aEvent, gpointer aUserData);
michael@0 121 gboolean EventProbe(GstPad* aPad, GstEvent* aEvent);
michael@0 122 #endif
michael@0 123
michael@0 124 /* Called when the video part of the pipeline allocates buffers. Used to
michael@0 125 * provide PlanarYCbCrImage backed GstBuffers to the pipeline so that a memory
michael@0 126 * copy can be avoided when handling YUV buffers from the pipeline to the gfx
michael@0 127 * side.
michael@0 128 */
michael@0 129 #if GST_VERSION_MAJOR == 1
michael@0 130 static GstPadProbeReturn QueryProbeCb(GstPad *aPad, GstPadProbeInfo *aInfo, gpointer aUserData);
michael@0 131 GstPadProbeReturn QueryProbe(GstPad *aPad, GstPadProbeInfo *aInfo, gpointer aUserData);
michael@0 132 #else
michael@0 133 static GstFlowReturn AllocateVideoBufferCb(GstPad* aPad, guint64 aOffset, guint aSize,
michael@0 134 GstCaps* aCaps, GstBuffer** aBuf);
michael@0 135 GstFlowReturn AllocateVideoBufferFull(GstPad* aPad, guint64 aOffset, guint aSize,
michael@0 136 GstCaps* aCaps, GstBuffer** aBuf, nsRefPtr<layers::PlanarYCbCrImage>& aImage);
michael@0 137 GstFlowReturn AllocateVideoBuffer(GstPad* aPad, guint64 aOffset, guint aSize,
michael@0 138 GstCaps* aCaps, GstBuffer** aBuf);
michael@0 139 #endif
michael@0 140
michael@0 141
michael@0 142 /* Called when the pipeline is prerolled, that is when at start or after a
michael@0 143 * seek, the first audio and video buffers are queued in the sinks.
michael@0 144 */
michael@0 145 static GstFlowReturn NewPrerollCb(GstAppSink* aSink, gpointer aUserData);
michael@0 146 void VideoPreroll();
michael@0 147 void AudioPreroll();
michael@0 148
michael@0 149 /* Called when buffers reach the sinks */
michael@0 150 static GstFlowReturn NewBufferCb(GstAppSink* aSink, gpointer aUserData);
michael@0 151 void NewVideoBuffer();
michael@0 152 void NewAudioBuffer();
michael@0 153
michael@0 154 /* Called at end of stream, when decoding has finished */
michael@0 155 static void EosCb(GstAppSink* aSink, gpointer aUserData);
michael@0 156 /* Notifies that a sink will no longer receive any more data. If nullptr
michael@0 157 * is passed to this, we'll assume all streams have reached EOS (for example
michael@0 158 * an error has occurred). */
michael@0 159 void Eos(GstAppSink* aSink = nullptr);
michael@0 160
michael@0 161 /* Called when an element is added inside playbin. We use it to find the
michael@0 162 * decodebin instance.
michael@0 163 */
michael@0 164 static void PlayElementAddedCb(GstBin *aBin, GstElement *aElement,
michael@0 165 gpointer *aUserData);
michael@0 166
michael@0 167 /* Called during decoding, to decide whether a (sub)stream should be decoded or
michael@0 168 * ignored */
michael@0 169 static bool ShouldAutoplugFactory(GstElementFactory* aFactory, GstCaps* aCaps);
michael@0 170
michael@0 171 /* Called by decodebin during autoplugging. We use it to apply our
michael@0 172 * container/codec whitelist.
michael@0 173 */
michael@0 174 static GValueArray* AutoplugSortCb(GstElement* aElement,
michael@0 175 GstPad* aPad, GstCaps* aCaps,
michael@0 176 GValueArray* aFactories);
michael@0 177
michael@0 178 // Try to find MP3 headers in this stream using our MP3 frame parser.
michael@0 179 nsresult ParseMP3Headers();
michael@0 180
michael@0 181 // Get the length of the stream, excluding any metadata we have ignored at the
michael@0 182 // start of the stream: ID3 headers, for example.
michael@0 183 int64_t GetDataLength();
michael@0 184
michael@0 185 // Use our own MP3 parser here, largely for consistency with other platforms.
michael@0 186 MP3FrameParser mMP3FrameParser;
michael@0 187
michael@0 188 // The byte position in the stream where the actual media (ignoring, for
michael@0 189 // example, ID3 tags) starts.
michael@0 190 uint64_t mDataOffset;
michael@0 191
michael@0 192 // We want to be able to decide in |ReadMetadata| whether or not we use the
michael@0 193 // duration from the MP3 frame parser, as this backend supports more than just
michael@0 194 // MP3. But |NotifyDataArrived| can update the duration and is often called
michael@0 195 // _before_ |ReadMetadata|. This flag stops the former from using the parser
michael@0 196 // duration until we are sure we want to.
michael@0 197 bool mUseParserDuration;
michael@0 198 int64_t mLastParserDuration;
michael@0 199
michael@0 200 #if GST_VERSION_MAJOR >= 1
michael@0 201 GstAllocator *mAllocator;
michael@0 202 GstBufferPool *mBufferPool;
michael@0 203 GstVideoInfo mVideoInfo;
michael@0 204 #endif
michael@0 205 GstElement* mPlayBin;
michael@0 206 GstBus* mBus;
michael@0 207 GstAppSrc* mSource;
michael@0 208 /* video sink bin */
michael@0 209 GstElement* mVideoSink;
michael@0 210 /* the actual video app sink */
michael@0 211 GstAppSink* mVideoAppSink;
michael@0 212 /* audio sink bin */
michael@0 213 GstElement* mAudioSink;
michael@0 214 /* the actual audio app sink */
michael@0 215 GstAppSink* mAudioAppSink;
michael@0 216 GstVideoFormat mFormat;
michael@0 217 IntRect mPicture;
michael@0 218 int mVideoSinkBufferCount;
michael@0 219 int mAudioSinkBufferCount;
michael@0 220 GstAppSrcCallbacks mSrcCallbacks;
michael@0 221 GstAppSinkCallbacks mSinkCallbacks;
michael@0 222 /* monitor used to synchronize access to shared state between gstreamer
michael@0 223 * threads and other gecko threads */
michael@0 224 ReentrantMonitor mGstThreadsMonitor;
michael@0 225 /* video and audio segments we use to convert absolute timestamps to [0,
michael@0 226 * stream_duration]. They're set when the pipeline is started or after a seek.
michael@0 227 * Concurrent access guarded with mGstThreadsMonitor.
michael@0 228 */
michael@0 229 GstSegment mVideoSegment;
michael@0 230 GstSegment mAudioSegment;
michael@0 231 /* bool used to signal when gst has detected the end of stream and
michael@0 232 * DecodeAudioData and DecodeVideoFrame should not expect any more data
michael@0 233 */
michael@0 234 bool mReachedAudioEos;
michael@0 235 bool mReachedVideoEos;
michael@0 236 #if GST_VERSION_MAJOR >= 1
michael@0 237 bool mConfigureAlignment;
michael@0 238 #endif
michael@0 239 int fpsNum;
michael@0 240 int fpsDen;
michael@0 241 };
michael@0 242
michael@0 243 } // namespace mozilla
michael@0 244
michael@0 245 #endif

mercurial