content/media/gstreamer/GStreamerReader.cpp

Tue, 06 Jan 2015 21:39:09 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Tue, 06 Jan 2015 21:39:09 +0100
branch
TOR_BUG_9701
changeset 8
97036ab72558
permissions
-rw-r--r--

Conditionally force memory storage according to privacy.thirdparty.isolate;
This solves Tor bug #9701, complying with disk avoidance documented in
https://www.torproject.org/projects/torbrowser/design/#disk-avoidance.

     1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
     2 /* vim:set ts=2 sw=2 sts=2 et cindent: */
     3 /* This Source Code Form is subject to the terms of the Mozilla Public
     4  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
     5  * You can obtain one at http://mozilla.org/MPL/2.0/. */
     7 #include "nsError.h"
     8 #include "nsMimeTypes.h"
     9 #include "MediaDecoderStateMachine.h"
    10 #include "AbstractMediaDecoder.h"
    11 #include "MediaResource.h"
    12 #include "GStreamerReader.h"
    13 #if GST_VERSION_MAJOR >= 1
    14 #include "GStreamerAllocator.h"
    15 #endif
    16 #include "GStreamerFormatHelper.h"
    17 #include "VideoUtils.h"
    18 #include "mozilla/dom/TimeRanges.h"
    19 #include "mozilla/Endian.h"
    20 #include "mozilla/Preferences.h"
    21 #include "mozilla/unused.h"
    22 #include "GStreamerLoader.h"
    23 #include "gfx2DGlue.h"
    25 namespace mozilla {
    27 using namespace gfx;
    28 using namespace layers;
    30 // Un-comment to enable logging of seek bisections.
    31 //#define SEEK_LOGGING
    33 #ifdef PR_LOGGING
    34 extern PRLogModuleInfo* gMediaDecoderLog;
    35 #define LOG(type, msg, ...) \
    36   PR_LOG(gMediaDecoderLog, type, ("GStreamerReader(%p) " msg, this, ##__VA_ARGS__))
    37 #else
    38 #define LOG(type, msg, ...)
    39 #endif
    41 #if DEBUG
    42 static const unsigned int MAX_CHANNELS = 4;
    43 #endif
    44 // Let the demuxer work in pull mode for short files. This used to be a micro
    45 // optimization to have more accurate durations for ogg files in mochitests.
    46 // Since as of today we aren't using gstreamer to demux ogg, and having demuxers
    47 // work in pull mode over http makes them slower (since they really assume
    48 // near-zero latency in pull mode) set the constant to 0 for now, which
    49 // effectively disables it.
    50 static const int SHORT_FILE_SIZE = 0;
    51 // The default resource->Read() size when working in push mode
    52 static const int DEFAULT_SOURCE_READ_SIZE = 50 * 1024;
    54 typedef enum {
    55   GST_PLAY_FLAG_VIDEO         = (1 << 0),
    56   GST_PLAY_FLAG_AUDIO         = (1 << 1),
    57   GST_PLAY_FLAG_TEXT          = (1 << 2),
    58   GST_PLAY_FLAG_VIS           = (1 << 3),
    59   GST_PLAY_FLAG_SOFT_VOLUME   = (1 << 4),
    60   GST_PLAY_FLAG_NATIVE_AUDIO  = (1 << 5),
    61   GST_PLAY_FLAG_NATIVE_VIDEO  = (1 << 6),
    62   GST_PLAY_FLAG_DOWNLOAD      = (1 << 7),
    63   GST_PLAY_FLAG_BUFFERING     = (1 << 8),
    64   GST_PLAY_FLAG_DEINTERLACE   = (1 << 9),
    65   GST_PLAY_FLAG_SOFT_COLORBALANCE = (1 << 10)
    66 } PlayFlags;
    68 GStreamerReader::GStreamerReader(AbstractMediaDecoder* aDecoder)
    69   : MediaDecoderReader(aDecoder),
    70   mMP3FrameParser(aDecoder->GetResource()->GetLength()),
    71   mDataOffset(0),
    72   mUseParserDuration(false),
    73 #if GST_VERSION_MAJOR >= 1
    74   mAllocator(nullptr),
    75   mBufferPool(nullptr),
    76 #endif
    77   mPlayBin(nullptr),
    78   mBus(nullptr),
    79   mSource(nullptr),
    80   mVideoSink(nullptr),
    81   mVideoAppSink(nullptr),
    82   mAudioSink(nullptr),
    83   mAudioAppSink(nullptr),
    84   mFormat(GST_VIDEO_FORMAT_UNKNOWN),
    85   mVideoSinkBufferCount(0),
    86   mAudioSinkBufferCount(0),
    87   mGstThreadsMonitor("media.gst.threads"),
    88   mReachedAudioEos(false),
    89   mReachedVideoEos(false),
    90 #if GST_VERSION_MAJOR >= 1
    91   mConfigureAlignment(true),
    92 #endif
    93   fpsNum(0),
    94   fpsDen(0)
    95 {
    96   MOZ_COUNT_CTOR(GStreamerReader);
    98   mSrcCallbacks.need_data = GStreamerReader::NeedDataCb;
    99   mSrcCallbacks.enough_data = GStreamerReader::EnoughDataCb;
   100   mSrcCallbacks.seek_data = GStreamerReader::SeekDataCb;
   102   mSinkCallbacks.eos = GStreamerReader::EosCb;
   103   mSinkCallbacks.new_preroll = GStreamerReader::NewPrerollCb;
   104 #if GST_VERSION_MAJOR >= 1
   105   mSinkCallbacks.new_sample = GStreamerReader::NewBufferCb;
   106 #else
   107   mSinkCallbacks.new_buffer = GStreamerReader::NewBufferCb;
   108   mSinkCallbacks.new_buffer_list = nullptr;
   109 #endif
   111   gst_segment_init(&mVideoSegment, GST_FORMAT_UNDEFINED);
   112   gst_segment_init(&mAudioSegment, GST_FORMAT_UNDEFINED);
   113 }
   115 GStreamerReader::~GStreamerReader()
   116 {
   117   MOZ_COUNT_DTOR(GStreamerReader);
   118   ResetDecode();
   120   if (mPlayBin) {
   121     gst_app_src_end_of_stream(mSource);
   122     if (mSource)
   123       gst_object_unref(mSource);
   124     gst_element_set_state(mPlayBin, GST_STATE_NULL);
   125     gst_object_unref(mPlayBin);
   126     mPlayBin = nullptr;
   127     mVideoSink = nullptr;
   128     mVideoAppSink = nullptr;
   129     mAudioSink = nullptr;
   130     mAudioAppSink = nullptr;
   131     gst_object_unref(mBus);
   132     mBus = nullptr;
   133 #if GST_VERSION_MAJOR >= 1
   134     g_object_unref(mAllocator);
   135     g_object_unref(mBufferPool);
   136 #endif
   137   }
   138 }
   140 nsresult GStreamerReader::Init(MediaDecoderReader* aCloneDonor)
   141 {
   142   GStreamerFormatHelper::Instance();
   144 #if GST_VERSION_MAJOR >= 1
   145   mAllocator = static_cast<GstAllocator*>(g_object_new(GST_TYPE_MOZ_GFX_MEMORY_ALLOCATOR, nullptr));
   146   moz_gfx_memory_allocator_set_reader(mAllocator, this);
   148   mBufferPool = static_cast<GstBufferPool*>(g_object_new(GST_TYPE_MOZ_GFX_BUFFER_POOL, nullptr));
   149 #endif
   151 #if GST_VERSION_MAJOR >= 1
   152   mPlayBin = gst_element_factory_make("playbin", nullptr);
   153 #else
   154   mPlayBin = gst_element_factory_make("playbin2", nullptr);
   155 #endif
   156   if (!mPlayBin) {
   157     LOG(PR_LOG_ERROR, "couldn't create playbin");
   158     return NS_ERROR_FAILURE;
   159   }
   160   g_object_set(mPlayBin, "buffer-size", 0, nullptr);
   161   mBus = gst_pipeline_get_bus(GST_PIPELINE(mPlayBin));
   163   mVideoSink = gst_parse_bin_from_description("capsfilter name=filter ! "
   164       "appsink name=videosink sync=false max-buffers=1 "
   165 #if GST_VERSION_MAJOR >= 1
   166       "caps=video/x-raw,format=I420"
   167 #else
   168       "caps=video/x-raw-yuv,format=(fourcc)I420"
   169 #endif
   170       , TRUE, nullptr);
   171   mVideoAppSink = GST_APP_SINK(gst_bin_get_by_name(GST_BIN(mVideoSink),
   172         "videosink"));
   173   mAudioSink = gst_parse_bin_from_description("capsfilter name=filter ! "
   174         "appsink name=audiosink sync=false max-buffers=1", TRUE, nullptr);
   175   mAudioAppSink = GST_APP_SINK(gst_bin_get_by_name(GST_BIN(mAudioSink),
   176                                                    "audiosink"));
   177   GstCaps* caps = BuildAudioSinkCaps();
   178   g_object_set(mAudioAppSink, "caps", caps, nullptr);
   179   gst_caps_unref(caps);
   181   gst_app_sink_set_callbacks(mVideoAppSink, &mSinkCallbacks,
   182       (gpointer) this, nullptr);
   183   gst_app_sink_set_callbacks(mAudioAppSink, &mSinkCallbacks,
   184                              (gpointer) this, nullptr);
   185   InstallPadCallbacks();
   187   g_object_set(mPlayBin, "uri", "appsrc://",
   188                "video-sink", mVideoSink,
   189                "audio-sink", mAudioSink,
   190                nullptr);
   192   g_signal_connect(G_OBJECT(mPlayBin), "notify::source",
   193                    G_CALLBACK(GStreamerReader::PlayBinSourceSetupCb), this);
   194   g_signal_connect(G_OBJECT(mPlayBin), "element-added",
   195                    G_CALLBACK(GStreamerReader::PlayElementAddedCb), this);
   197   return NS_OK;
   198 }
   200 GstBusSyncReply
   201 GStreamerReader::ErrorCb(GstBus *aBus, GstMessage *aMessage, gpointer aUserData)
   202 {
   203   return static_cast<GStreamerReader*>(aUserData)->Error(aBus, aMessage);
   204 }
   206 GstBusSyncReply
   207 GStreamerReader::Error(GstBus *aBus, GstMessage *aMessage)
   208 {
   209   if (GST_MESSAGE_TYPE(aMessage) == GST_MESSAGE_ERROR) {
   210     Eos();
   211   }
   213   return GST_BUS_PASS;
   214 }
   216 void GStreamerReader::PlayBinSourceSetupCb(GstElement* aPlayBin,
   217                                            GParamSpec* pspec,
   218                                            gpointer aUserData)
   219 {
   220   GstElement *source;
   221   GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);
   223   g_object_get(aPlayBin, "source", &source, nullptr);
   224   reader->PlayBinSourceSetup(GST_APP_SRC(source));
   225 }
   227 void GStreamerReader::PlayBinSourceSetup(GstAppSrc* aSource)
   228 {
   229   mSource = GST_APP_SRC(aSource);
   230   gst_app_src_set_callbacks(mSource, &mSrcCallbacks, (gpointer) this, nullptr);
   231   MediaResource* resource = mDecoder->GetResource();
   233   /* do a short read to trigger a network request so that GetLength() below
   234    * returns something meaningful and not -1
   235    */
   236   char buf[512];
   237   unsigned int size = 0;
   238   resource->Read(buf, sizeof(buf), &size);
   239   resource->Seek(SEEK_SET, 0);
   241   /* now we should have a length */
   242   int64_t resourceLength = GetDataLength();
   243   gst_app_src_set_size(mSource, resourceLength);
   244   if (resource->IsDataCachedToEndOfResource(0) ||
   245       (resourceLength != -1 && resourceLength <= SHORT_FILE_SIZE)) {
   246     /* let the demuxer work in pull mode for local files (or very short files)
   247      * so that we get optimal seeking accuracy/performance
   248      */
   249     LOG(PR_LOG_DEBUG, "configuring random access, len %lld", resourceLength);
   250     gst_app_src_set_stream_type(mSource, GST_APP_STREAM_TYPE_RANDOM_ACCESS);
   251   } else {
   252     /* make the demuxer work in push mode so that seeking is kept to a minimum
   253      */
   254     LOG(PR_LOG_DEBUG, "configuring push mode, len %lld", resourceLength);
   255     gst_app_src_set_stream_type(mSource, GST_APP_STREAM_TYPE_SEEKABLE);
   256   }
   258   // Set the source MIME type to stop typefind trying every. single. format.
   259   GstCaps *caps =
   260     GStreamerFormatHelper::ConvertFormatsToCaps(mDecoder->GetResource()->GetContentType().get(),
   261                                                 nullptr);
   263   gst_app_src_set_caps(aSource, caps);
   264   gst_caps_unref(caps);
   265 }
   267 /**
   268  * If this stream is an MP3, we want to parse the headers to estimate the
   269  * stream duration.
   270  */
   271 nsresult GStreamerReader::ParseMP3Headers()
   272 {
   273   MediaResource *resource = mDecoder->GetResource();
   275   const uint32_t MAX_READ_BYTES = 4096;
   277   uint64_t offset = 0;
   278   char bytes[MAX_READ_BYTES];
   279   uint32_t bytesRead;
   280   do {
   281     nsresult rv = resource->ReadAt(offset, bytes, MAX_READ_BYTES, &bytesRead);
   282     NS_ENSURE_SUCCESS(rv, rv);
   283     NS_ENSURE_TRUE(bytesRead, NS_ERROR_FAILURE);
   285     mMP3FrameParser.Parse(bytes, bytesRead, offset);
   286     offset += bytesRead;
   287   } while (!mMP3FrameParser.ParsedHeaders());
   289   if (mMP3FrameParser.IsMP3()) {
   290     mLastParserDuration = mMP3FrameParser.GetDuration();
   291     mDataOffset = mMP3FrameParser.GetMP3Offset();
   293     // Update GStreamer's stream length in case we found any ID3 headers to
   294     // ignore.
   295     gst_app_src_set_size(mSource, GetDataLength());
   296   }
   298   return NS_OK;
   299 }
   301 int64_t
   302 GStreamerReader::GetDataLength()
   303 {
   304   int64_t streamLen = mDecoder->GetResource()->GetLength();
   306   if (streamLen < 0) {
   307     return streamLen;
   308   }
   310   return streamLen - mDataOffset;
   311 }
   313 nsresult GStreamerReader::ReadMetadata(MediaInfo* aInfo,
   314                                        MetadataTags** aTags)
   315 {
   316   NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
   317   nsresult ret = NS_OK;
   319   /*
   320    * Parse MP3 headers before we kick off the GStreamer pipeline otherwise there
   321    * might be concurrent stream operations happening on both decoding and gstreamer
   322    * threads which will screw the GStreamer state machine.
   323    */
   324   bool isMP3 = mDecoder->GetResource()->GetContentType().EqualsASCII(AUDIO_MP3);
   325   if (isMP3) {
   326     ParseMP3Headers();
   327   }
   330   /* We do 3 attempts here: decoding audio and video, decoding video only,
   331    * decoding audio only. This allows us to play streams that have one broken
   332    * stream but that are otherwise decodeable.
   333    */
   334   guint flags[3] = {GST_PLAY_FLAG_VIDEO|GST_PLAY_FLAG_AUDIO,
   335     static_cast<guint>(~GST_PLAY_FLAG_AUDIO), static_cast<guint>(~GST_PLAY_FLAG_VIDEO)};
   336   guint default_flags, current_flags;
   337   g_object_get(mPlayBin, "flags", &default_flags, nullptr);
   339   GstMessage* message = nullptr;
   340   for (unsigned int i = 0; i < G_N_ELEMENTS(flags); i++) {
   341     current_flags = default_flags & flags[i];
   342     g_object_set(G_OBJECT(mPlayBin), "flags", current_flags, nullptr);
   344     /* reset filter caps to ANY */
   345     GstCaps* caps = gst_caps_new_any();
   346     GstElement* filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter");
   347     g_object_set(filter, "caps", caps, nullptr);
   348     gst_object_unref(filter);
   350     filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter");
   351     g_object_set(filter, "caps", caps, nullptr);
   352     gst_object_unref(filter);
   353     gst_caps_unref(caps);
   354     filter = nullptr;
   356     if (!(current_flags & GST_PLAY_FLAG_AUDIO))
   357       filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter");
   358     else if (!(current_flags & GST_PLAY_FLAG_VIDEO))
   359       filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter");
   361     if (filter) {
   362       /* Little trick: set the target caps to "skip" so that playbin2 fails to
   363        * find a decoder for the stream we want to skip.
   364        */
   365       GstCaps* filterCaps = gst_caps_new_simple ("skip", nullptr, nullptr);
   366       g_object_set(filter, "caps", filterCaps, nullptr);
   367       gst_caps_unref(filterCaps);
   368       gst_object_unref(filter);
   369     }
   371     LOG(PR_LOG_DEBUG, "starting metadata pipeline");
   372     if (gst_element_set_state(mPlayBin, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE) {
   373       LOG(PR_LOG_DEBUG, "metadata pipeline state change failed");
   374       ret = NS_ERROR_FAILURE;
   375       continue;
   376     }
   378     /* Wait for ASYNC_DONE, which is emitted when the pipeline is built,
   379      * prerolled and ready to play. Also watch for errors.
   380      */
   381     message = gst_bus_timed_pop_filtered(mBus, GST_CLOCK_TIME_NONE,
   382                  (GstMessageType)(GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
   383     if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ASYNC_DONE) {
   384       LOG(PR_LOG_DEBUG, "read metadata pipeline prerolled");
   385       gst_message_unref(message);
   386       ret = NS_OK;
   387       break;
   388     } else {
   389       LOG(PR_LOG_DEBUG, "read metadata pipeline failed to preroll: %s",
   390             gst_message_type_get_name (GST_MESSAGE_TYPE (message)));
   392       if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
   393         GError* error;
   394         gchar* debug;
   395         gst_message_parse_error(message, &error, &debug);
   396         LOG(PR_LOG_ERROR, "read metadata error: %s: %s", error->message, debug);
   397         g_error_free(error);
   398         g_free(debug);
   399       }
   400       /* Unexpected stream close/EOS or other error. We'll give up if all
   401        * streams are in error/eos. */
   402       gst_element_set_state(mPlayBin, GST_STATE_NULL);
   403       gst_message_unref(message);
   404       ret = NS_ERROR_FAILURE;
   405     }
   406   }
   408   if (NS_SUCCEEDED(ret))
   409     ret = CheckSupportedFormats();
   411   if (NS_FAILED(ret))
   412     /* we couldn't get this to play */
   413     return ret;
   415   /* report the duration */
   416   gint64 duration;
   418   if (isMP3 && mMP3FrameParser.IsMP3()) {
   419     // The MP3FrameParser has reported a duration; use that over the gstreamer
   420     // reported duration for inter-platform consistency.
   421     ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
   422     mUseParserDuration = true;
   423     mLastParserDuration = mMP3FrameParser.GetDuration();
   424     mDecoder->SetMediaDuration(mLastParserDuration);
   425   } else {
   426     LOG(PR_LOG_DEBUG, "querying duration");
   427     // Otherwise use the gstreamer duration.
   428 #if GST_VERSION_MAJOR >= 1
   429     if (gst_element_query_duration(GST_ELEMENT(mPlayBin),
   430           GST_FORMAT_TIME, &duration)) {
   431 #else
   432     GstFormat format = GST_FORMAT_TIME;
   433     if (gst_element_query_duration(GST_ELEMENT(mPlayBin),
   434       &format, &duration) && format == GST_FORMAT_TIME) {
   435 #endif
   436       ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
   437       LOG(PR_LOG_DEBUG, "have duration %" GST_TIME_FORMAT, GST_TIME_ARGS(duration));
   438       duration = GST_TIME_AS_USECONDS (duration);
   439       mDecoder->SetMediaDuration(duration);
   440     } else {
   441       mDecoder->SetMediaSeekable(false);
   442     }
   443   }
   445   int n_video = 0, n_audio = 0;
   446   g_object_get(mPlayBin, "n-video", &n_video, "n-audio", &n_audio, nullptr);
   447   mInfo.mVideo.mHasVideo = n_video != 0;
   448   mInfo.mAudio.mHasAudio = n_audio != 0;
   450   *aInfo = mInfo;
   452   *aTags = nullptr;
   454   // Watch the pipeline for fatal errors
   455 #if GST_VERSION_MAJOR >= 1
   456   gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this, nullptr);
   457 #else
   458   gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this);
   459 #endif
   461   /* set the pipeline to PLAYING so that it starts decoding and queueing data in
   462    * the appsinks */
   463   gst_element_set_state(mPlayBin, GST_STATE_PLAYING);
   465   return NS_OK;
   466 }
   468 nsresult GStreamerReader::CheckSupportedFormats()
   469 {
   470   bool done = false;
   471   bool unsupported = false;
   473   GstIterator* it = gst_bin_iterate_recurse(GST_BIN(mPlayBin));
   474   while (!done) {
   475     GstIteratorResult res;
   476     GstElement* element;
   478 #if GST_VERSION_MAJOR >= 1
   479     GValue value = {0,};
   480     res = gst_iterator_next(it, &value);
   481 #else
   482     res = gst_iterator_next(it, (void **) &element);
   483 #endif
   484     switch(res) {
   485       case GST_ITERATOR_OK:
   486       {
   487 #if GST_VERSION_MAJOR >= 1
   488         element = GST_ELEMENT (g_value_get_object (&value));
   489 #endif
   490         GstElementFactory* factory = gst_element_get_factory(element);
   491         if (factory) {
   492           const char* klass = gst_element_factory_get_klass(factory);
   493           GstPad* pad = gst_element_get_static_pad(element, "sink");
   494           if (pad) {
   495             GstCaps* caps;
   497 #if GST_VERSION_MAJOR >= 1
   498             caps = gst_pad_get_current_caps(pad);
   499 #else
   500             caps = gst_pad_get_negotiated_caps(pad);
   501 #endif
   503             if (caps) {
   504               /* check for demuxers but ignore elements like id3demux */
   505               if (strstr (klass, "Demuxer") && !strstr(klass, "Metadata"))
   506                 unsupported = !GStreamerFormatHelper::Instance()->CanHandleContainerCaps(caps);
   507               else if (strstr (klass, "Decoder") && !strstr(klass, "Generic"))
   508                 unsupported = !GStreamerFormatHelper::Instance()->CanHandleCodecCaps(caps);
   510               gst_caps_unref(caps);
   511             }
   512             gst_object_unref(pad);
   513           }
   514         }
   516 #if GST_VERSION_MAJOR >= 1
   517         g_value_unset (&value);
   518 #else
   519         gst_object_unref(element);
   520 #endif
   521         done = unsupported;
   522         break;
   523       }
   524       case GST_ITERATOR_RESYNC:
   525         unsupported = false;
   526         done = false;
   527         break;
   528       case GST_ITERATOR_ERROR:
   529         done = true;
   530         break;
   531       case GST_ITERATOR_DONE:
   532         done = true;
   533         break;
   534     }
   535   }
   537   return unsupported ? NS_ERROR_FAILURE : NS_OK;
   538 }
   540 nsresult GStreamerReader::ResetDecode()
   541 {
   542   nsresult res = NS_OK;
   544   LOG(PR_LOG_DEBUG, "reset decode");
   546   if (NS_FAILED(MediaDecoderReader::ResetDecode())) {
   547     res = NS_ERROR_FAILURE;
   548   }
   550   mVideoQueue.Reset();
   551   mAudioQueue.Reset();
   553   mVideoSinkBufferCount = 0;
   554   mAudioSinkBufferCount = 0;
   555   mReachedAudioEos = false;
   556   mReachedVideoEos = false;
   557 #if GST_VERSION_MAJOR >= 1
   558   mConfigureAlignment = true;
   559 #endif
   561   LOG(PR_LOG_DEBUG, "reset decode done");
   563   return res;
   564 }
   566 bool GStreamerReader::DecodeAudioData()
   567 {
   568   NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
   570   GstBuffer *buffer = nullptr;
   572   {
   573     ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
   575     if (mReachedAudioEos && !mAudioSinkBufferCount) {
   576       return false;
   577     }
   579     /* Wait something to be decoded before return or continue */
   580     if (!mAudioSinkBufferCount) {
   581       if(!mVideoSinkBufferCount) {
   582         /* We have nothing decoded so it makes no sense to return to the state machine
   583          * as it will call us back immediately, we'll return again and so on, wasting
   584          * CPU cycles for no job done. So, block here until there is either video or
   585          * audio data available
   586         */
   587         mon.Wait();
   588         if (!mAudioSinkBufferCount) {
   589           /* There is still no audio data available, so either there is video data or
   590            * something else has happened (Eos, etc...). Return to the state machine
   591            * to process it.
   592            */
   593           return true;
   594         }
   595       }
   596       else {
   597         return true;
   598       }
   599     }
   601 #if GST_VERSION_MAJOR >= 1
   602     GstSample *sample = gst_app_sink_pull_sample(mAudioAppSink);
   603     buffer = gst_buffer_ref(gst_sample_get_buffer(sample));
   604     gst_sample_unref(sample);
   605 #else
   606     buffer = gst_app_sink_pull_buffer(mAudioAppSink);
   607 #endif
   609     mAudioSinkBufferCount--;
   610   }
   612   int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer);
   613   timestamp = gst_segment_to_stream_time(&mAudioSegment,
   614       GST_FORMAT_TIME, timestamp);
   616   timestamp = GST_TIME_AS_USECONDS(timestamp);
   618   int64_t offset = GST_BUFFER_OFFSET(buffer);
   619   guint8* data;
   620 #if GST_VERSION_MAJOR >= 1
   621   GstMapInfo info;
   622   gst_buffer_map(buffer, &info, GST_MAP_READ);
   623   unsigned int size = info.size;
   624   data = info.data;
   625 #else
   626   unsigned int size = GST_BUFFER_SIZE(buffer);
   627   data = GST_BUFFER_DATA(buffer);
   628 #endif
   629   int32_t frames = (size / sizeof(AudioDataValue)) / mInfo.mAudio.mChannels;
   631   typedef AudioCompactor::NativeCopy GstCopy;
   632   mAudioCompactor.Push(offset,
   633                        timestamp,
   634                        mInfo.mAudio.mRate,
   635                        frames,
   636                        mInfo.mAudio.mChannels,
   637                        GstCopy(data,
   638                                size,
   639                                mInfo.mAudio.mChannels));
   640 #if GST_VERSION_MAJOR >= 1
   641   gst_buffer_unmap(buffer, &info);
   642 #endif
   644   gst_buffer_unref(buffer);
   646   return true;
   647 }
   649 bool GStreamerReader::DecodeVideoFrame(bool &aKeyFrameSkip,
   650                                        int64_t aTimeThreshold)
   651 {
   652   NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
   654   GstBuffer *buffer = nullptr;
   656   {
   657     ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
   659     if (mReachedVideoEos && !mVideoSinkBufferCount) {
   660       return false;
   661     }
   663     /* Wait something to be decoded before return or continue */
   664     if (!mVideoSinkBufferCount) {
   665       if (!mAudioSinkBufferCount) {
   666         /* We have nothing decoded so it makes no sense to return to the state machine
   667          * as it will call us back immediately, we'll return again and so on, wasting
   668          * CPU cycles for no job done. So, block here until there is either video or
   669          * audio data available
   670         */
   671         mon.Wait();
   672         if (!mVideoSinkBufferCount) {
   673           /* There is still no video data available, so either there is audio data or
   674            * something else has happened (Eos, etc...). Return to the state machine
   675            * to process it
   676            */
   677           return true;
   678         }
   679       }
   680       else {
   681         return true;
   682       }
   683     }
   685     mDecoder->NotifyDecodedFrames(0, 1);
   687 #if GST_VERSION_MAJOR >= 1
   688     GstSample *sample = gst_app_sink_pull_sample(mVideoAppSink);
   689     buffer = gst_buffer_ref(gst_sample_get_buffer(sample));
   690     gst_sample_unref(sample);
   691 #else
   692     buffer = gst_app_sink_pull_buffer(mVideoAppSink);
   693 #endif
   694     mVideoSinkBufferCount--;
   695   }
   697   bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
   698   if ((aKeyFrameSkip && !isKeyframe)) {
   699     gst_buffer_unref(buffer);
   700     return true;
   701   }
   703   int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer);
   704   {
   705     ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
   706     timestamp = gst_segment_to_stream_time(&mVideoSegment,
   707                                            GST_FORMAT_TIME, timestamp);
   708   }
   709   NS_ASSERTION(GST_CLOCK_TIME_IS_VALID(timestamp),
   710                "frame has invalid timestamp");
   712   timestamp = GST_TIME_AS_USECONDS(timestamp);
   713   int64_t duration = 0;
   714   if (GST_CLOCK_TIME_IS_VALID(GST_BUFFER_DURATION(buffer)))
   715     duration = GST_TIME_AS_USECONDS(GST_BUFFER_DURATION(buffer));
   716   else if (fpsNum && fpsDen)
   717     /* add 1-frame duration */
   718     duration = gst_util_uint64_scale(GST_USECOND, fpsDen, fpsNum);
   720   if (timestamp < aTimeThreshold) {
   721     LOG(PR_LOG_DEBUG, "skipping frame %" GST_TIME_FORMAT
   722                       " threshold %" GST_TIME_FORMAT,
   723                       GST_TIME_ARGS(timestamp * 1000),
   724                       GST_TIME_ARGS(aTimeThreshold * 1000));
   725     gst_buffer_unref(buffer);
   726     return true;
   727   }
   729   if (!buffer)
   730     /* no more frames */
   731     return true;
   733 #if GST_VERSION_MAJOR >= 1
   734   if (mConfigureAlignment && buffer->pool) {
   735     GstStructure *config = gst_buffer_pool_get_config(buffer->pool);
   736     GstVideoAlignment align;
   737     if (gst_buffer_pool_config_get_video_alignment(config, &align))
   738       gst_video_info_align(&mVideoInfo, &align);
   739     gst_structure_free(config);
   740     mConfigureAlignment = false;
   741   }
   742 #endif
   744   nsRefPtr<PlanarYCbCrImage> image = GetImageFromBuffer(buffer);
   745   if (!image) {
   746     /* Ugh, upstream is not calling gst_pad_alloc_buffer(). Fallback to
   747      * allocating a PlanarYCbCrImage backed GstBuffer here and memcpy.
   748      */
   749     GstBuffer* tmp = nullptr;
   750     CopyIntoImageBuffer(buffer, &tmp, image);
   751     gst_buffer_unref(buffer);
   752     buffer = tmp;
   753   }
   755   int64_t offset = mDecoder->GetResource()->Tell(); // Estimate location in media.
   756   VideoData* video = VideoData::CreateFromImage(mInfo.mVideo,
   757                                                 mDecoder->GetImageContainer(),
   758                                                 offset, timestamp, duration,
   759                                                 static_cast<Image*>(image.get()),
   760                                                 isKeyframe, -1, mPicture);
   761   mVideoQueue.Push(video);
   763   gst_buffer_unref(buffer);
   765   return true;
   766 }
   768 nsresult GStreamerReader::Seek(int64_t aTarget,
   769                                  int64_t aStartTime,
   770                                  int64_t aEndTime,
   771                                  int64_t aCurrentTime)
   772 {
   773   NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
   775   gint64 seekPos = aTarget * GST_USECOND;
   776   LOG(PR_LOG_DEBUG, "%p About to seek to %" GST_TIME_FORMAT,
   777         mDecoder, GST_TIME_ARGS(seekPos));
   779   int flags = GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT;
   780   if (!gst_element_seek_simple(mPlayBin,
   781                                GST_FORMAT_TIME,
   782                                static_cast<GstSeekFlags>(flags),
   783                                seekPos)) {
   784     LOG(PR_LOG_ERROR, "seek failed");
   785     return NS_ERROR_FAILURE;
   786   }
   787   LOG(PR_LOG_DEBUG, "seek succeeded");
   788   GstMessage* message = gst_bus_timed_pop_filtered(mBus, GST_CLOCK_TIME_NONE,
   789                (GstMessageType)(GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_ERROR));
   790   gst_message_unref(message);
   791   LOG(PR_LOG_DEBUG, "seek completed");
   793   return NS_OK;
   794 }
   796 nsresult GStreamerReader::GetBuffered(dom::TimeRanges* aBuffered,
   797                                       int64_t aStartTime)
   798 {
   799   if (!mInfo.HasValidMedia()) {
   800     return NS_OK;
   801   }
   803 #if GST_VERSION_MAJOR == 0
   804   GstFormat format = GST_FORMAT_TIME;
   805 #endif
   806   MediaResource* resource = mDecoder->GetResource();
   807   nsTArray<MediaByteRange> ranges;
   808   resource->GetCachedRanges(ranges);
   810   if (resource->IsDataCachedToEndOfResource(0)) {
   811     /* fast path for local or completely cached files */
   812     gint64 duration = 0;
   814     {
   815       ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
   816       duration = mDecoder->GetMediaDuration();
   817     }
   819     double end = (double) duration / GST_MSECOND;
   820     LOG(PR_LOG_DEBUG, "complete range [0, %f] for [0, %li]",
   821           end, GetDataLength());
   822     aBuffered->Add(0, end);
   823     return NS_OK;
   824   }
   826   for(uint32_t index = 0; index < ranges.Length(); index++) {
   827     int64_t startOffset = ranges[index].mStart;
   828     int64_t endOffset = ranges[index].mEnd;
   829     gint64 startTime, endTime;
   831 #if GST_VERSION_MAJOR >= 1
   832     if (!gst_element_query_convert(GST_ELEMENT(mPlayBin), GST_FORMAT_BYTES,
   833       startOffset, GST_FORMAT_TIME, &startTime))
   834       continue;
   835     if (!gst_element_query_convert(GST_ELEMENT(mPlayBin), GST_FORMAT_BYTES,
   836       endOffset, GST_FORMAT_TIME, &endTime))
   837       continue;
   838 #else
   839     if (!gst_element_query_convert(GST_ELEMENT(mPlayBin), GST_FORMAT_BYTES,
   840       startOffset, &format, &startTime) || format != GST_FORMAT_TIME)
   841       continue;
   842     if (!gst_element_query_convert(GST_ELEMENT(mPlayBin), GST_FORMAT_BYTES,
   843       endOffset, &format, &endTime) || format != GST_FORMAT_TIME)
   844       continue;
   845 #endif
   847     double start = (double) GST_TIME_AS_USECONDS (startTime) / GST_MSECOND;
   848     double end = (double) GST_TIME_AS_USECONDS (endTime) / GST_MSECOND;
   849     LOG(PR_LOG_DEBUG, "adding range [%f, %f] for [%li %li] size %li",
   850           start, end, startOffset, endOffset, GetDataLength());
   851     aBuffered->Add(start, end);
   852   }
   854   return NS_OK;
   855 }
   857 void GStreamerReader::ReadAndPushData(guint aLength)
   858 {
   859   MediaResource* resource = mDecoder->GetResource();
   860   NS_ASSERTION(resource, "Decoder has no media resource");
   861   int64_t offset1 = resource->Tell();
   862   unused << offset1;
   863   nsresult rv = NS_OK;
   865   GstBuffer* buffer = gst_buffer_new_and_alloc(aLength);
   866 #if GST_VERSION_MAJOR >= 1
   867   GstMapInfo info;
   868   gst_buffer_map(buffer, &info, GST_MAP_WRITE);
   869   guint8 *data = info.data;
   870 #else
   871   guint8* data = GST_BUFFER_DATA(buffer);
   872 #endif
   873   uint32_t size = 0, bytesRead = 0;
   874   while(bytesRead < aLength) {
   875     rv = resource->Read(reinterpret_cast<char*>(data + bytesRead),
   876         aLength - bytesRead, &size);
   877     if (NS_FAILED(rv) || size == 0)
   878       break;
   880     bytesRead += size;
   881   }
   883   int64_t offset2 = resource->Tell();
   884   unused << offset2;
   886 #if GST_VERSION_MAJOR >= 1
   887   gst_buffer_unmap(buffer, &info);
   888   gst_buffer_set_size(buffer, bytesRead);
   889 #else
   890   GST_BUFFER_SIZE(buffer) = bytesRead;
   891 #endif
   893   GstFlowReturn ret = gst_app_src_push_buffer(mSource, gst_buffer_ref(buffer));
   894   if (ret != GST_FLOW_OK) {
   895     LOG(PR_LOG_ERROR, "ReadAndPushData push ret %s(%d)", gst_flow_get_name(ret), ret);
   896   }
   898   if (NS_FAILED(rv)) {
   899     /* Terminate the stream if there is an error in reading */
   900     LOG(PR_LOG_ERROR, "ReadAndPushData read error, rv=%x", rv);
   901     gst_app_src_end_of_stream(mSource);
   902   } else if (bytesRead < aLength) {
   903     /* If we read less than what we wanted, we reached the end */
   904     LOG(PR_LOG_WARNING, "ReadAndPushData read underflow, "
   905         "bytesRead=%u, aLength=%u, offset(%lld,%lld)",
   906         bytesRead, aLength, offset1, offset2);
   907     gst_app_src_end_of_stream(mSource);
   908   }
   910   gst_buffer_unref(buffer);
   912   /* Ensure offset change is consistent in this function.
   913    * If there are other stream operations on another thread at the same time,
   914    * it will disturb the GStreamer state machine.
   915    */
   916   MOZ_ASSERT(offset1 + bytesRead == offset2);
   917 }
   919 void GStreamerReader::NeedDataCb(GstAppSrc* aSrc,
   920                                  guint aLength,
   921                                  gpointer aUserData)
   922 {
   923   GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);
   924   reader->NeedData(aSrc, aLength);
   925 }
   927 void GStreamerReader::NeedData(GstAppSrc* aSrc, guint aLength)
   928 {
   929   if (aLength == static_cast<guint>(-1))
   930     aLength = DEFAULT_SOURCE_READ_SIZE;
   931   ReadAndPushData(aLength);
   932 }
   934 void GStreamerReader::EnoughDataCb(GstAppSrc* aSrc, gpointer aUserData)
   935 {
   936   GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);
   937   reader->EnoughData(aSrc);
   938 }
   940 void GStreamerReader::EnoughData(GstAppSrc* aSrc)
   941 {
   942 }
   944 gboolean GStreamerReader::SeekDataCb(GstAppSrc* aSrc,
   945                                      guint64 aOffset,
   946                                      gpointer aUserData)
   947 {
   948   GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);
   949   return reader->SeekData(aSrc, aOffset);
   950 }
   952 gboolean GStreamerReader::SeekData(GstAppSrc* aSrc, guint64 aOffset)
   953 {
   954   aOffset += mDataOffset;
   956   ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
   957   MediaResource* resource = mDecoder->GetResource();
   958   int64_t resourceLength = resource->GetLength();
   960   if (gst_app_src_get_size(mSource) == -1) {
   961     /* It's possible that we didn't know the length when we initialized mSource
   962      * but maybe we do now
   963      */
   964     gst_app_src_set_size(mSource, GetDataLength());
   965   }
   967   nsresult rv = NS_ERROR_FAILURE;
   968   if (aOffset < static_cast<guint64>(resourceLength)) {
   969     rv = resource->Seek(SEEK_SET, aOffset);
   970   }
   972   if (NS_FAILED(rv)) {
   973     LOG(PR_LOG_ERROR, "seek at %lu failed", aOffset);
   974   } else {
   975     MOZ_ASSERT(aOffset == static_cast<guint64>(resource->Tell()));
   976   }
   978   return NS_SUCCEEDED(rv);
   979 }
   981 GstFlowReturn GStreamerReader::NewPrerollCb(GstAppSink* aSink,
   982                                               gpointer aUserData)
   983 {
   984   GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);
   986   if (aSink == reader->mVideoAppSink)
   987     reader->VideoPreroll();
   988   else
   989     reader->AudioPreroll();
   990   return GST_FLOW_OK;
   991 }
   993 void GStreamerReader::AudioPreroll()
   994 {
   995   /* The first audio buffer has reached the audio sink. Get rate and channels */
   996   LOG(PR_LOG_DEBUG, "Audio preroll");
   997   GstPad* sinkpad = gst_element_get_static_pad(GST_ELEMENT(mAudioAppSink), "sink");
   998 #if GST_VERSION_MAJOR >= 1
   999   GstCaps *caps = gst_pad_get_current_caps(sinkpad);
  1000 #else
  1001   GstCaps* caps = gst_pad_get_negotiated_caps(sinkpad);
  1002 #endif
  1003   GstStructure* s = gst_caps_get_structure(caps, 0);
  1004   mInfo.mAudio.mRate = mInfo.mAudio.mChannels = 0;
  1005   gst_structure_get_int(s, "rate", (gint*) &mInfo.mAudio.mRate);
  1006   gst_structure_get_int(s, "channels", (gint*) &mInfo.mAudio.mChannels);
  1007   NS_ASSERTION(mInfo.mAudio.mRate != 0, ("audio rate is zero"));
  1008   NS_ASSERTION(mInfo.mAudio.mChannels != 0, ("audio channels is zero"));
  1009   NS_ASSERTION(mInfo.mAudio.mChannels > 0 && mInfo.mAudio.mChannels <= MAX_CHANNELS,
  1010       "invalid audio channels number");
  1011   mInfo.mAudio.mHasAudio = true;
  1012   gst_caps_unref(caps);
  1013   gst_object_unref(sinkpad);
  1016 void GStreamerReader::VideoPreroll()
  1018   /* The first video buffer has reached the video sink. Get width and height */
  1019   LOG(PR_LOG_DEBUG, "Video preroll");
  1020   GstPad* sinkpad = gst_element_get_static_pad(GST_ELEMENT(mVideoAppSink), "sink");
  1021   int PARNumerator, PARDenominator;
  1022 #if GST_VERSION_MAJOR >= 1
  1023   GstCaps* caps = gst_pad_get_current_caps(sinkpad);
  1024   memset (&mVideoInfo, 0, sizeof (mVideoInfo));
  1025   gst_video_info_from_caps(&mVideoInfo, caps);
  1026   mFormat = mVideoInfo.finfo->format;
  1027   mPicture.width = mVideoInfo.width;
  1028   mPicture.height = mVideoInfo.height;
  1029   PARNumerator = GST_VIDEO_INFO_PAR_N(&mVideoInfo);
  1030   PARDenominator = GST_VIDEO_INFO_PAR_D(&mVideoInfo);
  1031 #else
  1032   GstCaps* caps = gst_pad_get_negotiated_caps(sinkpad);
  1033   gst_video_format_parse_caps(caps, &mFormat, &mPicture.width, &mPicture.height);
  1034   if (!gst_video_parse_caps_pixel_aspect_ratio(caps, &PARNumerator, &PARDenominator)) {
  1035     PARNumerator = 1;
  1036     PARDenominator = 1;
  1038 #endif
  1039   NS_ASSERTION(mPicture.width && mPicture.height, "invalid video resolution");
  1041   // Calculate display size according to pixel aspect ratio.
  1042   nsIntRect pictureRect(0, 0, mPicture.width, mPicture.height);
  1043   nsIntSize frameSize = nsIntSize(mPicture.width, mPicture.height);
  1044   nsIntSize displaySize = nsIntSize(mPicture.width, mPicture.height);
  1045   ScaleDisplayByAspectRatio(displaySize, float(PARNumerator) / float(PARDenominator));
  1047   // If video frame size is overflow, stop playing.
  1048   if (IsValidVideoRegion(frameSize, pictureRect, displaySize)) {
  1049     GstStructure* structure = gst_caps_get_structure(caps, 0);
  1050     gst_structure_get_fraction(structure, "framerate", &fpsNum, &fpsDen);
  1051     mInfo.mVideo.mDisplay = ThebesIntSize(displaySize.ToIntSize());
  1052     mInfo.mVideo.mHasVideo = true;
  1053   } else {
  1054     LOG(PR_LOG_DEBUG, "invalid video region");
  1055     Eos();
  1057   gst_caps_unref(caps);
  1058   gst_object_unref(sinkpad);
  1061 GstFlowReturn GStreamerReader::NewBufferCb(GstAppSink* aSink,
  1062                                            gpointer aUserData)
  1064   GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);
  1066   if (aSink == reader->mVideoAppSink)
  1067     reader->NewVideoBuffer();
  1068   else
  1069     reader->NewAudioBuffer();
  1071   return GST_FLOW_OK;
  1074 void GStreamerReader::NewVideoBuffer()
  1076   ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
  1077   /* We have a new video buffer queued in the video sink. Increment the counter
  1078    * and notify the decode thread potentially blocked in DecodeVideoFrame
  1079    */
  1081   mDecoder->NotifyDecodedFrames(1, 0);
  1082   mVideoSinkBufferCount++;
  1083   mon.NotifyAll();
  1086 void GStreamerReader::NewAudioBuffer()
  1088   ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
  1089   /* We have a new audio buffer queued in the audio sink. Increment the counter
  1090    * and notify the decode thread potentially blocked in DecodeAudioData
  1091    */
  1092   mAudioSinkBufferCount++;
  1093   mon.NotifyAll();
  1096 void GStreamerReader::EosCb(GstAppSink* aSink, gpointer aUserData)
  1098   GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);
  1099   reader->Eos(aSink);
  1102 void GStreamerReader::Eos(GstAppSink* aSink)
  1104   /* We reached the end of the stream */
  1106     ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
  1107     /* Potentially unblock DecodeVideoFrame and DecodeAudioData */
  1108     if (aSink == mVideoAppSink) {
  1109       mReachedVideoEos = true;
  1110     } else if (aSink == mAudioAppSink) {
  1111       mReachedAudioEos = true;
  1112     } else {
  1113       // Assume this is an error causing an EOS.
  1114       mReachedAudioEos = true;
  1115       mReachedVideoEos = true;
  1117     mon.NotifyAll();
  1121     ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
  1122     /* Potentially unblock the decode thread in ::DecodeLoop */
  1123     mon.NotifyAll();
  1127 /**
  1128  * This callback is called while the pipeline is automatically built, after a
  1129  * new element has been added to the pipeline. We use it to find the
  1130  * uridecodebin instance used by playbin and connect to it to apply our
  1131  * whitelist.
  1132  */
  1133 void
  1134 GStreamerReader::PlayElementAddedCb(GstBin *aBin, GstElement *aElement,
  1135                                     gpointer *aUserData)
  1137   const static char sUriDecodeBinPrefix[] = "uridecodebin";
  1138   gchar *name = gst_element_get_name(aElement);
  1140   // Attach this callback to uridecodebin, child of playbin.
  1141   if (!strncmp(name, sUriDecodeBinPrefix, sizeof(sUriDecodeBinPrefix) - 1)) {
  1142     g_signal_connect(G_OBJECT(aElement), "autoplug-sort",
  1143                      G_CALLBACK(GStreamerReader::AutoplugSortCb), aUserData);
  1146   g_free(name);
  1149 bool
  1150 GStreamerReader::ShouldAutoplugFactory(GstElementFactory* aFactory, GstCaps* aCaps)
  1152   bool autoplug;
  1153   const gchar *klass = gst_element_factory_get_klass(aFactory);
  1154   if (strstr(klass, "Demuxer") && !strstr(klass, "Metadata")) {
  1155     autoplug = GStreamerFormatHelper::Instance()->CanHandleContainerCaps(aCaps);
  1156   } else if (strstr(klass, "Decoder") && !strstr(klass, "Generic")) {
  1157     autoplug = GStreamerFormatHelper::Instance()->CanHandleCodecCaps(aCaps);
  1158   } else {
  1159     /* we only filter demuxers and decoders, let everything else be autoplugged */
  1160     autoplug = true;
  1163   return autoplug;
  1166 /**
  1167  * This is called by uridecodebin (running inside playbin), after it has found
  1168  * candidate factories to continue decoding the stream. We apply the whitelist
  1169  * here, allowing only demuxers and decoders that output the formats we want to
  1170  * support.
  1171  */
  1172 GValueArray*
  1173 GStreamerReader::AutoplugSortCb(GstElement* aElement, GstPad* aPad,
  1174                                 GstCaps* aCaps, GValueArray* aFactories)
  1176   if (!aFactories->n_values) {
  1177     return nullptr;
  1180   /* aFactories[0] is the element factory that is going to be used to
  1181    * create the next element needed to demux or decode the stream.
  1182    */
  1183   GstElementFactory *factory = (GstElementFactory*) g_value_get_object(g_value_array_get_nth(aFactories, 0));
  1184   if (!ShouldAutoplugFactory(factory, aCaps)) {
  1185     /* We don't support this factory. Return an empty array to signal that we
  1186      * don't want to continue decoding this (sub)stream.
  1187      */
  1188     return g_value_array_new(0);
  1191   /* nullptr means that we're ok with the candidates and don't need to apply any
  1192    * sorting/filtering.
  1193    */
  1194   return nullptr;
  1197 /**
  1198  * If this is an MP3 stream, pass any new data we get to the MP3 frame parser
  1199  * for duration estimation.
  1200  */
  1201 void GStreamerReader::NotifyDataArrived(const char *aBuffer,
  1202                                         uint32_t aLength,
  1203                                         int64_t aOffset)
  1205   MOZ_ASSERT(NS_IsMainThread());
  1207   if (HasVideo()) {
  1208     return;
  1211   if (!mMP3FrameParser.NeedsData()) {
  1212     return;
  1215   mMP3FrameParser.Parse(aBuffer, aLength, aOffset);
  1217   int64_t duration = mMP3FrameParser.GetDuration();
  1218   if (duration != mLastParserDuration && mUseParserDuration) {
  1219     ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
  1220     mLastParserDuration = duration;
  1221     mDecoder->UpdateEstimatedMediaDuration(mLastParserDuration);
  1225 #if GST_VERSION_MAJOR >= 1
  1226 GstCaps* GStreamerReader::BuildAudioSinkCaps()
  1228   GstCaps* caps = gst_caps_from_string("audio/x-raw, channels={1,2}");
  1229   const char* format;
  1230 #ifdef MOZ_SAMPLE_TYPE_FLOAT32
  1231 #if MOZ_LITTLE_ENDIAN
  1232   format = "F32LE";
  1233 #else
  1234   format = "F32BE";
  1235 #endif
  1236 #else /* !MOZ_SAMPLE_TYPE_FLOAT32 */
  1237 #if MOZ_LITTLE_ENDIAN
  1238   format = "S16LE";
  1239 #else
  1240   format = "S16BE";
  1241 #endif
  1242 #endif
  1243   gst_caps_set_simple(caps, "format", G_TYPE_STRING, format, nullptr);
  1245   return caps;
  1248 void GStreamerReader::InstallPadCallbacks()
  1250   GstPad* sinkpad = gst_element_get_static_pad(GST_ELEMENT(mVideoAppSink), "sink");
  1252   gst_pad_add_probe(sinkpad,
  1253       (GstPadProbeType) (GST_PAD_PROBE_TYPE_SCHEDULING |
  1254         GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM |
  1255         GST_PAD_PROBE_TYPE_EVENT_UPSTREAM |
  1256         GST_PAD_PROBE_TYPE_EVENT_FLUSH),
  1257       &GStreamerReader::EventProbeCb, this, nullptr);
  1258   gst_pad_add_probe(sinkpad, GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM,
  1259       GStreamerReader::QueryProbeCb, nullptr, nullptr);
  1261   gst_pad_set_element_private(sinkpad, this);
  1262   gst_object_unref(sinkpad);
  1264   sinkpad = gst_element_get_static_pad(GST_ELEMENT(mAudioAppSink), "sink");
  1265   gst_pad_add_probe(sinkpad,
  1266       (GstPadProbeType) (GST_PAD_PROBE_TYPE_SCHEDULING |
  1267         GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM |
  1268         GST_PAD_PROBE_TYPE_EVENT_UPSTREAM |
  1269         GST_PAD_PROBE_TYPE_EVENT_FLUSH),
  1270       &GStreamerReader::EventProbeCb, this, nullptr);
  1271   gst_object_unref(sinkpad);
  1274 GstPadProbeReturn GStreamerReader::EventProbeCb(GstPad *aPad,
  1275                                                 GstPadProbeInfo *aInfo,
  1276                                                 gpointer aUserData)
  1278   GStreamerReader *reader = (GStreamerReader *) aUserData;
  1279   GstEvent *aEvent = (GstEvent *)aInfo->data;
  1280   return reader->EventProbe(aPad, aEvent);
  1283 GstPadProbeReturn GStreamerReader::EventProbe(GstPad *aPad, GstEvent *aEvent)
  1285   GstElement* parent = GST_ELEMENT(gst_pad_get_parent(aPad));
  1287   LOG(PR_LOG_DEBUG, "event probe %s", GST_EVENT_TYPE_NAME (aEvent));
  1289   switch(GST_EVENT_TYPE(aEvent)) {
  1290     case GST_EVENT_SEGMENT:
  1292       const GstSegment *newSegment;
  1293       GstSegment* segment;
  1295       /* Store the segments so we can convert timestamps to stream time, which
  1296        * is what the upper layers sync on.
  1297        */
  1298       ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
  1299 #if GST_VERSION_MINOR <= 1 && GST_VERSION_MICRO < 1
  1300       ResetDecode();
  1301 #endif
  1302       gst_event_parse_segment(aEvent, &newSegment);
  1303       if (parent == GST_ELEMENT(mVideoAppSink))
  1304         segment = &mVideoSegment;
  1305       else
  1306         segment = &mAudioSegment;
  1307       gst_segment_copy_into (newSegment, segment);
  1308       break;
  1310     case GST_EVENT_FLUSH_STOP:
  1311       /* Reset on seeks */
  1312       ResetDecode();
  1313       break;
  1314     default:
  1315       break;
  1317   gst_object_unref(parent);
  1319   return GST_PAD_PROBE_OK;
  1322 GstPadProbeReturn GStreamerReader::QueryProbeCb(GstPad* aPad, GstPadProbeInfo* aInfo, gpointer aUserData)
  1324   GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(gst_pad_get_element_private(aPad));
  1325   return reader->QueryProbe(aPad, aInfo, aUserData);
  1328 GstPadProbeReturn GStreamerReader::QueryProbe(GstPad* aPad, GstPadProbeInfo* aInfo, gpointer aUserData)
  1330   GstQuery *query = gst_pad_probe_info_get_query(aInfo);
  1331   GstPadProbeReturn ret = GST_PAD_PROBE_OK;
  1333   switch (GST_QUERY_TYPE (query)) {
  1334     case GST_QUERY_ALLOCATION:
  1335       GstCaps *caps;
  1336       GstVideoInfo info;
  1337       gboolean need_pool;
  1339       gst_query_parse_allocation(query, &caps, &need_pool);
  1340       gst_video_info_init(&info);
  1341       gst_video_info_from_caps(&info, caps);
  1342       gst_query_add_allocation_param(query, mAllocator, nullptr);
  1343       gst_query_add_allocation_pool(query, mBufferPool, info.size, 0, 0);
  1344       break;
  1345     default:
  1346       break;
  1349   return ret;
  1352 void GStreamerReader::ImageDataFromVideoFrame(GstVideoFrame *aFrame,
  1353                                               PlanarYCbCrImage::Data *aData)
  1355   NS_ASSERTION(GST_VIDEO_INFO_IS_YUV(&mVideoInfo),
  1356                "Non-YUV video frame formats not supported");
  1357   NS_ASSERTION(GST_VIDEO_FRAME_N_COMPONENTS(aFrame) == 3,
  1358                "Unsupported number of components in video frame");
  1360   aData->mPicX = aData->mPicY = 0;
  1361   aData->mPicSize = gfx::IntSize(mPicture.width, mPicture.height);
  1362   aData->mStereoMode = StereoMode::MONO;
  1364   aData->mYChannel = GST_VIDEO_FRAME_COMP_DATA(aFrame, 0);
  1365   aData->mYStride = GST_VIDEO_FRAME_COMP_STRIDE(aFrame, 0);
  1366   aData->mYSize = gfx::IntSize(GST_VIDEO_FRAME_COMP_WIDTH(aFrame, 0),
  1367                           GST_VIDEO_FRAME_COMP_HEIGHT(aFrame, 0));
  1368   aData->mYSkip = GST_VIDEO_FRAME_COMP_PSTRIDE(aFrame, 0) - 1;
  1369   aData->mCbCrStride = GST_VIDEO_FRAME_COMP_STRIDE(aFrame, 1);
  1370   aData->mCbCrSize = gfx::IntSize(GST_VIDEO_FRAME_COMP_WIDTH(aFrame, 1),
  1371                              GST_VIDEO_FRAME_COMP_HEIGHT(aFrame, 1));
  1372   aData->mCbChannel = GST_VIDEO_FRAME_COMP_DATA(aFrame, 1);
  1373   aData->mCrChannel = GST_VIDEO_FRAME_COMP_DATA(aFrame, 2);
  1374   aData->mCbSkip = GST_VIDEO_FRAME_COMP_PSTRIDE(aFrame, 1) - 1;
  1375   aData->mCrSkip = GST_VIDEO_FRAME_COMP_PSTRIDE(aFrame, 2) - 1;
  1378 nsRefPtr<PlanarYCbCrImage> GStreamerReader::GetImageFromBuffer(GstBuffer* aBuffer)
  1380   nsRefPtr<PlanarYCbCrImage> image = nullptr;
  1382   if (gst_buffer_n_memory(aBuffer) == 1) {
  1383     GstMemory* mem = gst_buffer_peek_memory(aBuffer, 0);
  1384     if (GST_IS_MOZ_GFX_MEMORY_ALLOCATOR(mem->allocator)) {
  1385       image = moz_gfx_memory_get_image(mem);
  1387       GstVideoFrame frame;
  1388       gst_video_frame_map(&frame, &mVideoInfo, aBuffer, GST_MAP_READ);
  1389       PlanarYCbCrImage::Data data;
  1390       ImageDataFromVideoFrame(&frame, &data);
  1391       image->SetDataNoCopy(data);
  1392       gst_video_frame_unmap(&frame);
  1396   return image;
  1399 void GStreamerReader::CopyIntoImageBuffer(GstBuffer* aBuffer,
  1400                                           GstBuffer** aOutBuffer,
  1401                                           nsRefPtr<PlanarYCbCrImage> &image)
  1403   *aOutBuffer = gst_buffer_new_allocate(mAllocator, gst_buffer_get_size(aBuffer), nullptr);
  1404   GstMemory *mem = gst_buffer_peek_memory(*aOutBuffer, 0);
  1405   GstMapInfo map_info;
  1406   gst_memory_map(mem, &map_info, GST_MAP_WRITE);
  1407   gst_buffer_extract(aBuffer, 0, map_info.data, gst_buffer_get_size(aBuffer));
  1408   gst_memory_unmap(mem, &map_info);
  1410   /* create a new gst buffer with the newly created memory and copy the
  1411    * metadata over from the incoming buffer */
  1412   gst_buffer_copy_into(*aOutBuffer, aBuffer,
  1413       (GstBufferCopyFlags)(GST_BUFFER_COPY_METADATA), 0, -1);
  1414   image = GetImageFromBuffer(*aOutBuffer);
  1416 #endif
  1418 } // namespace mozilla

mercurial