content/media/gstreamer/GStreamerReader.h

changeset 0
6474c204b198
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/content/media/gstreamer/GStreamerReader.h	Wed Dec 31 06:09:35 2014 +0100
     1.3 @@ -0,0 +1,245 @@
     1.4 +/* This Source Code Form is subject to the terms of the Mozilla Public
     1.5 + * License, v. 2.0. If a copy of the MPL was not distributed with this file,
     1.6 + * You can obtain one at http://mozilla.org/MPL/2.0/. */
     1.7 +
     1.8 +#if !defined(GStreamerReader_h_)
     1.9 +#define GStreamerReader_h_
    1.10 +
    1.11 +#include <map>
    1.12 +
    1.13 +#include <gst/gst.h>
    1.14 +#include <gst/app/gstappsrc.h>
    1.15 +#include <gst/app/gstappsink.h>
    1.16 +// This include trips -Wreserved-user-defined-literal on clang. Ignoring it
    1.17 +// trips -Wpragmas on GCC (unknown warning), but ignoring that trips
    1.18 +// -Wunknown-pragmas on clang (unknown pragma).
    1.19 +#pragma GCC diagnostic push
    1.20 +#pragma GCC diagnostic ignored "-Wunknown-pragmas"
    1.21 +#pragma GCC diagnostic ignored "-Wpragmas"
    1.22 +#pragma GCC diagnostic ignored "-Wreserved-user-defined-literal"
    1.23 +#include <gst/video/video.h>
    1.24 +#pragma GCC diagnostic pop
    1.25 +
    1.26 +#include "MediaDecoderReader.h"
    1.27 +#include "MP3FrameParser.h"
    1.28 +#include "ImageContainer.h"
    1.29 +#include "nsRect.h"
    1.30 +
    1.31 +namespace mozilla {
    1.32 +
    1.33 +namespace dom {
    1.34 +class TimeRanges;
    1.35 +}
    1.36 +
    1.37 +class AbstractMediaDecoder;
    1.38 +
    1.39 +class GStreamerReader : public MediaDecoderReader
    1.40 +{
    1.41 +  typedef gfx::IntRect IntRect;
    1.42 +
    1.43 +public:
    1.44 +  GStreamerReader(AbstractMediaDecoder* aDecoder);
    1.45 +  virtual ~GStreamerReader();
    1.46 +
    1.47 +  virtual nsresult Init(MediaDecoderReader* aCloneDonor);
    1.48 +  virtual nsresult ResetDecode();
    1.49 +  virtual bool DecodeAudioData();
    1.50 +  virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
    1.51 +                                int64_t aTimeThreshold);
    1.52 +  virtual nsresult ReadMetadata(MediaInfo* aInfo,
    1.53 +                                MetadataTags** aTags);
    1.54 +  virtual nsresult Seek(int64_t aTime,
    1.55 +                        int64_t aStartTime,
    1.56 +                        int64_t aEndTime,
    1.57 +                        int64_t aCurrentTime);
    1.58 +  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered, int64_t aStartTime);
    1.59 +
    1.60 +  virtual void NotifyDataArrived(const char *aBuffer,
    1.61 +                                 uint32_t aLength,
    1.62 +                                 int64_t aOffset) MOZ_OVERRIDE;
    1.63 +
    1.64 +  virtual bool HasAudio() {
    1.65 +    return mInfo.HasAudio();
    1.66 +  }
    1.67 +
    1.68 +  virtual bool HasVideo() {
    1.69 +    return mInfo.HasVideo();
    1.70 +  }
    1.71 +
    1.72 +  layers::ImageContainer* GetImageContainer() { return mDecoder->GetImageContainer(); }
    1.73 +
    1.74 +private:
    1.75 +
    1.76 +  void ReadAndPushData(guint aLength);
    1.77 +  nsRefPtr<layers::PlanarYCbCrImage> GetImageFromBuffer(GstBuffer* aBuffer);
    1.78 +  void CopyIntoImageBuffer(GstBuffer *aBuffer, GstBuffer** aOutBuffer, nsRefPtr<layers::PlanarYCbCrImage> &image);
    1.79 +  GstCaps* BuildAudioSinkCaps();
    1.80 +  void InstallPadCallbacks();
    1.81 +
    1.82 +#if GST_VERSION_MAJOR >= 1
    1.83 +  void ImageDataFromVideoFrame(GstVideoFrame *aFrame, layers::PlanarYCbCrImage::Data *aData);
    1.84 +#endif
    1.85 +
    1.86 +  /* Called once the pipeline is setup to check that the stream only contains
    1.87 +   * supported formats
    1.88 +   */
    1.89 +  nsresult CheckSupportedFormats();
    1.90 +
    1.91 +  /* Gst callbacks */
    1.92 +
    1.93 +  static GstBusSyncReply ErrorCb(GstBus *aBus, GstMessage *aMessage, gpointer aUserData);
    1.94 +  GstBusSyncReply Error(GstBus *aBus, GstMessage *aMessage);
    1.95 +
    1.96 +  /* Called on the source-setup signal emitted by playbin. Used to
    1.97 +   * configure appsrc .
    1.98 +   */
    1.99 +  static void PlayBinSourceSetupCb(GstElement* aPlayBin,
   1.100 +                                   GParamSpec* pspec,
   1.101 +                                   gpointer aUserData);
   1.102 +  void PlayBinSourceSetup(GstAppSrc* aSource);
   1.103 +
   1.104 +  /* Called from appsrc when we need to read more data from the resource */
   1.105 +  static void NeedDataCb(GstAppSrc* aSrc, guint aLength, gpointer aUserData);
   1.106 +  void NeedData(GstAppSrc* aSrc, guint aLength);
   1.107 +
   1.108 +  /* Called when appsrc has enough data and we can stop reading */
   1.109 +  static void EnoughDataCb(GstAppSrc* aSrc, gpointer aUserData);
   1.110 +  void EnoughData(GstAppSrc* aSrc);
   1.111 +
   1.112 +  /* Called when a seek is issued on the pipeline */
   1.113 +  static gboolean SeekDataCb(GstAppSrc* aSrc,
   1.114 +                             guint64 aOffset,
   1.115 +                             gpointer aUserData);
   1.116 +  gboolean SeekData(GstAppSrc* aSrc, guint64 aOffset);
   1.117 +
   1.118 +  /* Called when events reach the sinks. See inline comments */
   1.119 +#if GST_VERSION_MAJOR == 1
   1.120 +  static GstPadProbeReturn EventProbeCb(GstPad *aPad, GstPadProbeInfo *aInfo, gpointer aUserData);
   1.121 +  GstPadProbeReturn EventProbe(GstPad *aPad, GstEvent *aEvent);
   1.122 +#else
   1.123 +  static gboolean EventProbeCb(GstPad* aPad, GstEvent* aEvent, gpointer aUserData);
   1.124 +  gboolean EventProbe(GstPad* aPad, GstEvent* aEvent);
   1.125 +#endif
   1.126 +
   1.127 +  /* Called when the video part of the pipeline allocates buffers. Used to
   1.128 +   * provide PlanarYCbCrImage backed GstBuffers to the pipeline so that a memory
   1.129 +   * copy can be avoided when handling YUV buffers from the pipeline to the gfx
   1.130 +   * side.
   1.131 +   */
   1.132 +#if GST_VERSION_MAJOR == 1
   1.133 +  static GstPadProbeReturn QueryProbeCb(GstPad *aPad, GstPadProbeInfo *aInfo, gpointer aUserData);
   1.134 +  GstPadProbeReturn QueryProbe(GstPad *aPad, GstPadProbeInfo *aInfo, gpointer aUserData);
   1.135 +#else
   1.136 +  static GstFlowReturn AllocateVideoBufferCb(GstPad* aPad, guint64 aOffset, guint aSize,
   1.137 +                                             GstCaps* aCaps, GstBuffer** aBuf);
   1.138 +  GstFlowReturn AllocateVideoBufferFull(GstPad* aPad, guint64 aOffset, guint aSize,
   1.139 +                                     GstCaps* aCaps, GstBuffer** aBuf, nsRefPtr<layers::PlanarYCbCrImage>& aImage);
   1.140 +  GstFlowReturn AllocateVideoBuffer(GstPad* aPad, guint64 aOffset, guint aSize,
   1.141 +                                     GstCaps* aCaps, GstBuffer** aBuf);
   1.142 +#endif
   1.143 +
   1.144 +
   1.145 +  /* Called when the pipeline is prerolled, that is when at start or after a
   1.146 +   * seek, the first audio and video buffers are queued in the sinks.
   1.147 +   */
   1.148 +  static GstFlowReturn NewPrerollCb(GstAppSink* aSink, gpointer aUserData);
   1.149 +  void VideoPreroll();
   1.150 +  void AudioPreroll();
   1.151 +
   1.152 +  /* Called when buffers reach the sinks */
   1.153 +  static GstFlowReturn NewBufferCb(GstAppSink* aSink, gpointer aUserData);
   1.154 +  void NewVideoBuffer();
   1.155 +  void NewAudioBuffer();
   1.156 +
   1.157 +  /* Called at end of stream, when decoding has finished */
   1.158 +  static void EosCb(GstAppSink* aSink, gpointer aUserData);
   1.159 +  /* Notifies that a sink will no longer receive any more data. If nullptr
   1.160 +   * is passed to this, we'll assume all streams have reached EOS (for example
   1.161 +   * an error has occurred). */
   1.162 +  void Eos(GstAppSink* aSink = nullptr);
   1.163 +
   1.164 +  /* Called when an element is added inside playbin. We use it to find the
   1.165 +   * decodebin instance.
   1.166 +   */
   1.167 +  static void PlayElementAddedCb(GstBin *aBin, GstElement *aElement,
   1.168 +                                 gpointer *aUserData);
   1.169 +
   1.170 +  /* Called during decoding, to decide whether a (sub)stream should be decoded or
   1.171 +   * ignored */
   1.172 +  static bool ShouldAutoplugFactory(GstElementFactory* aFactory, GstCaps* aCaps);
   1.173 +
   1.174 +  /* Called by decodebin during autoplugging. We use it to apply our
   1.175 +   * container/codec whitelist.
   1.176 +   */
   1.177 +  static GValueArray* AutoplugSortCb(GstElement* aElement,
   1.178 +                                     GstPad* aPad, GstCaps* aCaps,
   1.179 +                                     GValueArray* aFactories);
   1.180 +
   1.181 +  // Try to find MP3 headers in this stream using our MP3 frame parser.
   1.182 +  nsresult ParseMP3Headers();
   1.183 +
   1.184 +  // Get the length of the stream, excluding any metadata we have ignored at the
   1.185 +  // start of the stream: ID3 headers, for example.
   1.186 +  int64_t GetDataLength();
   1.187 +
   1.188 +  // Use our own MP3 parser here, largely for consistency with other platforms.
   1.189 +  MP3FrameParser mMP3FrameParser;
   1.190 +
   1.191 +  // The byte position in the stream where the actual media (ignoring, for
   1.192 +  // example, ID3 tags) starts.
   1.193 +  uint64_t mDataOffset;
   1.194 +
   1.195 +  // We want to be able to decide in |ReadMetadata| whether or not we use the
   1.196 +  // duration from the MP3 frame parser, as this backend supports more than just
   1.197 +  // MP3. But |NotifyDataArrived| can update the duration and is often called
   1.198 +  // _before_ |ReadMetadata|. This flag stops the former from using the parser
   1.199 +  // duration until we are sure we want to.
   1.200 +  bool mUseParserDuration;
   1.201 +  int64_t mLastParserDuration;
   1.202 +
   1.203 +#if GST_VERSION_MAJOR >= 1
   1.204 +  GstAllocator *mAllocator;
   1.205 +  GstBufferPool *mBufferPool;
   1.206 +  GstVideoInfo mVideoInfo;
   1.207 +#endif
   1.208 +  GstElement* mPlayBin;
   1.209 +  GstBus* mBus;
   1.210 +  GstAppSrc* mSource;
   1.211 +  /* video sink bin */
   1.212 +  GstElement* mVideoSink;
   1.213 +  /* the actual video app sink */
   1.214 +  GstAppSink* mVideoAppSink;
   1.215 +  /* audio sink bin */
   1.216 +  GstElement* mAudioSink;
   1.217 +  /* the actual audio app sink */
   1.218 +  GstAppSink* mAudioAppSink;
   1.219 +  GstVideoFormat mFormat;
   1.220 +  IntRect mPicture;
   1.221 +  int mVideoSinkBufferCount;
   1.222 +  int mAudioSinkBufferCount;
   1.223 +  GstAppSrcCallbacks mSrcCallbacks;
   1.224 +  GstAppSinkCallbacks mSinkCallbacks;
   1.225 +  /* monitor used to synchronize access to shared state between gstreamer
   1.226 +   * threads and other gecko threads */
   1.227 +  ReentrantMonitor mGstThreadsMonitor;
   1.228 +  /* video and audio segments we use to convert absolute timestamps to [0,
   1.229 +   * stream_duration]. They're set when the pipeline is started or after a seek.
   1.230 +   * Concurrent access guarded with mGstThreadsMonitor.
   1.231 +   */
   1.232 +  GstSegment mVideoSegment;
   1.233 +  GstSegment mAudioSegment;
   1.234 +  /* bool used to signal when gst has detected the end of stream and
   1.235 +   * DecodeAudioData and DecodeVideoFrame should not expect any more data
   1.236 +   */
   1.237 +  bool mReachedAudioEos;
   1.238 +  bool mReachedVideoEos;
   1.239 +#if GST_VERSION_MAJOR >= 1
   1.240 +  bool mConfigureAlignment;
   1.241 +#endif
   1.242 +  int fpsNum;
   1.243 +  int fpsDen;
   1.244 +};
   1.245 +
   1.246 +} // namespace mozilla
   1.247 +
   1.248 +#endif

mercurial