content/media/gstreamer/GStreamerReader.cpp

changeset 0
6474c204b198
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/content/media/gstreamer/GStreamerReader.cpp	Wed Dec 31 06:09:35 2014 +0100
     1.3 @@ -0,0 +1,1419 @@
     1.4 +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
     1.5 +/* vim:set ts=2 sw=2 sts=2 et cindent: */
     1.6 +/* This Source Code Form is subject to the terms of the Mozilla Public
     1.7 + * License, v. 2.0. If a copy of the MPL was not distributed with this file,
     1.8 + * You can obtain one at http://mozilla.org/MPL/2.0/. */
     1.9 +
    1.10 +#include "nsError.h"
    1.11 +#include "nsMimeTypes.h"
    1.12 +#include "MediaDecoderStateMachine.h"
    1.13 +#include "AbstractMediaDecoder.h"
    1.14 +#include "MediaResource.h"
    1.15 +#include "GStreamerReader.h"
    1.16 +#if GST_VERSION_MAJOR >= 1
    1.17 +#include "GStreamerAllocator.h"
    1.18 +#endif
    1.19 +#include "GStreamerFormatHelper.h"
    1.20 +#include "VideoUtils.h"
    1.21 +#include "mozilla/dom/TimeRanges.h"
    1.22 +#include "mozilla/Endian.h"
    1.23 +#include "mozilla/Preferences.h"
    1.24 +#include "mozilla/unused.h"
    1.25 +#include "GStreamerLoader.h"
    1.26 +#include "gfx2DGlue.h"
    1.27 +
    1.28 +namespace mozilla {
    1.29 +
    1.30 +using namespace gfx;
    1.31 +using namespace layers;
    1.32 +
    1.33 +// Un-comment to enable logging of seek bisections.
    1.34 +//#define SEEK_LOGGING
    1.35 +
    1.36 +#ifdef PR_LOGGING
    1.37 +extern PRLogModuleInfo* gMediaDecoderLog;
    1.38 +#define LOG(type, msg, ...) \
    1.39 +  PR_LOG(gMediaDecoderLog, type, ("GStreamerReader(%p) " msg, this, ##__VA_ARGS__))
    1.40 +#else
    1.41 +#define LOG(type, msg, ...)
    1.42 +#endif
    1.43 +
    1.44 +#if DEBUG
    1.45 +static const unsigned int MAX_CHANNELS = 4;
    1.46 +#endif
    1.47 +// Let the demuxer work in pull mode for short files. This used to be a micro
    1.48 +// optimization to have more accurate durations for ogg files in mochitests.
    1.49 +// Since as of today we aren't using gstreamer to demux ogg, and having demuxers
    1.50 +// work in pull mode over http makes them slower (since they really assume
    1.51 +// near-zero latency in pull mode) set the constant to 0 for now, which
    1.52 +// effectively disables it.
    1.53 +static const int SHORT_FILE_SIZE = 0;
    1.54 +// The default resource->Read() size when working in push mode
    1.55 +static const int DEFAULT_SOURCE_READ_SIZE = 50 * 1024;
    1.56 +
    1.57 +typedef enum {
    1.58 +  GST_PLAY_FLAG_VIDEO         = (1 << 0),
    1.59 +  GST_PLAY_FLAG_AUDIO         = (1 << 1),
    1.60 +  GST_PLAY_FLAG_TEXT          = (1 << 2),
    1.61 +  GST_PLAY_FLAG_VIS           = (1 << 3),
    1.62 +  GST_PLAY_FLAG_SOFT_VOLUME   = (1 << 4),
    1.63 +  GST_PLAY_FLAG_NATIVE_AUDIO  = (1 << 5),
    1.64 +  GST_PLAY_FLAG_NATIVE_VIDEO  = (1 << 6),
    1.65 +  GST_PLAY_FLAG_DOWNLOAD      = (1 << 7),
    1.66 +  GST_PLAY_FLAG_BUFFERING     = (1 << 8),
    1.67 +  GST_PLAY_FLAG_DEINTERLACE   = (1 << 9),
    1.68 +  GST_PLAY_FLAG_SOFT_COLORBALANCE = (1 << 10)
    1.69 +} PlayFlags;
    1.70 +
    1.71 +GStreamerReader::GStreamerReader(AbstractMediaDecoder* aDecoder)
    1.72 +  : MediaDecoderReader(aDecoder),
    1.73 +  mMP3FrameParser(aDecoder->GetResource()->GetLength()),
    1.74 +  mDataOffset(0),
    1.75 +  mUseParserDuration(false),
    1.76 +#if GST_VERSION_MAJOR >= 1
    1.77 +  mAllocator(nullptr),
    1.78 +  mBufferPool(nullptr),
    1.79 +#endif
    1.80 +  mPlayBin(nullptr),
    1.81 +  mBus(nullptr),
    1.82 +  mSource(nullptr),
    1.83 +  mVideoSink(nullptr),
    1.84 +  mVideoAppSink(nullptr),
    1.85 +  mAudioSink(nullptr),
    1.86 +  mAudioAppSink(nullptr),
    1.87 +  mFormat(GST_VIDEO_FORMAT_UNKNOWN),
    1.88 +  mVideoSinkBufferCount(0),
    1.89 +  mAudioSinkBufferCount(0),
    1.90 +  mGstThreadsMonitor("media.gst.threads"),
    1.91 +  mReachedAudioEos(false),
    1.92 +  mReachedVideoEos(false),
    1.93 +#if GST_VERSION_MAJOR >= 1
    1.94 +  mConfigureAlignment(true),
    1.95 +#endif
    1.96 +  fpsNum(0),
    1.97 +  fpsDen(0)
    1.98 +{
    1.99 +  MOZ_COUNT_CTOR(GStreamerReader);
   1.100 +
   1.101 +  mSrcCallbacks.need_data = GStreamerReader::NeedDataCb;
   1.102 +  mSrcCallbacks.enough_data = GStreamerReader::EnoughDataCb;
   1.103 +  mSrcCallbacks.seek_data = GStreamerReader::SeekDataCb;
   1.104 +
   1.105 +  mSinkCallbacks.eos = GStreamerReader::EosCb;
   1.106 +  mSinkCallbacks.new_preroll = GStreamerReader::NewPrerollCb;
   1.107 +#if GST_VERSION_MAJOR >= 1
   1.108 +  mSinkCallbacks.new_sample = GStreamerReader::NewBufferCb;
   1.109 +#else
   1.110 +  mSinkCallbacks.new_buffer = GStreamerReader::NewBufferCb;
   1.111 +  mSinkCallbacks.new_buffer_list = nullptr;
   1.112 +#endif
   1.113 +
   1.114 +  gst_segment_init(&mVideoSegment, GST_FORMAT_UNDEFINED);
   1.115 +  gst_segment_init(&mAudioSegment, GST_FORMAT_UNDEFINED);
   1.116 +}
   1.117 +
   1.118 +GStreamerReader::~GStreamerReader()
   1.119 +{
   1.120 +  MOZ_COUNT_DTOR(GStreamerReader);
   1.121 +  ResetDecode();
   1.122 +
   1.123 +  if (mPlayBin) {
   1.124 +    gst_app_src_end_of_stream(mSource);
   1.125 +    if (mSource)
   1.126 +      gst_object_unref(mSource);
   1.127 +    gst_element_set_state(mPlayBin, GST_STATE_NULL);
   1.128 +    gst_object_unref(mPlayBin);
   1.129 +    mPlayBin = nullptr;
   1.130 +    mVideoSink = nullptr;
   1.131 +    mVideoAppSink = nullptr;
   1.132 +    mAudioSink = nullptr;
   1.133 +    mAudioAppSink = nullptr;
   1.134 +    gst_object_unref(mBus);
   1.135 +    mBus = nullptr;
   1.136 +#if GST_VERSION_MAJOR >= 1
   1.137 +    g_object_unref(mAllocator);
   1.138 +    g_object_unref(mBufferPool);
   1.139 +#endif
   1.140 +  }
   1.141 +}
   1.142 +
   1.143 +nsresult GStreamerReader::Init(MediaDecoderReader* aCloneDonor)
   1.144 +{
   1.145 +  GStreamerFormatHelper::Instance();
   1.146 +
   1.147 +#if GST_VERSION_MAJOR >= 1
   1.148 +  mAllocator = static_cast<GstAllocator*>(g_object_new(GST_TYPE_MOZ_GFX_MEMORY_ALLOCATOR, nullptr));
   1.149 +  moz_gfx_memory_allocator_set_reader(mAllocator, this);
   1.150 +
   1.151 +  mBufferPool = static_cast<GstBufferPool*>(g_object_new(GST_TYPE_MOZ_GFX_BUFFER_POOL, nullptr));
   1.152 +#endif
   1.153 +
   1.154 +#if GST_VERSION_MAJOR >= 1
   1.155 +  mPlayBin = gst_element_factory_make("playbin", nullptr);
   1.156 +#else
   1.157 +  mPlayBin = gst_element_factory_make("playbin2", nullptr);
   1.158 +#endif
   1.159 +  if (!mPlayBin) {
   1.160 +    LOG(PR_LOG_ERROR, "couldn't create playbin");
   1.161 +    return NS_ERROR_FAILURE;
   1.162 +  }
   1.163 +  g_object_set(mPlayBin, "buffer-size", 0, nullptr);
   1.164 +  mBus = gst_pipeline_get_bus(GST_PIPELINE(mPlayBin));
   1.165 +
   1.166 +  mVideoSink = gst_parse_bin_from_description("capsfilter name=filter ! "
   1.167 +      "appsink name=videosink sync=false max-buffers=1 "
   1.168 +#if GST_VERSION_MAJOR >= 1
   1.169 +      "caps=video/x-raw,format=I420"
   1.170 +#else
   1.171 +      "caps=video/x-raw-yuv,format=(fourcc)I420"
   1.172 +#endif
   1.173 +      , TRUE, nullptr);
   1.174 +  mVideoAppSink = GST_APP_SINK(gst_bin_get_by_name(GST_BIN(mVideoSink),
   1.175 +        "videosink"));
   1.176 +  mAudioSink = gst_parse_bin_from_description("capsfilter name=filter ! "
   1.177 +        "appsink name=audiosink sync=false max-buffers=1", TRUE, nullptr);
   1.178 +  mAudioAppSink = GST_APP_SINK(gst_bin_get_by_name(GST_BIN(mAudioSink),
   1.179 +                                                   "audiosink"));
   1.180 +  GstCaps* caps = BuildAudioSinkCaps();
   1.181 +  g_object_set(mAudioAppSink, "caps", caps, nullptr);
   1.182 +  gst_caps_unref(caps);
   1.183 +
   1.184 +  gst_app_sink_set_callbacks(mVideoAppSink, &mSinkCallbacks,
   1.185 +      (gpointer) this, nullptr);
   1.186 +  gst_app_sink_set_callbacks(mAudioAppSink, &mSinkCallbacks,
   1.187 +                             (gpointer) this, nullptr);
   1.188 +  InstallPadCallbacks();
   1.189 +
   1.190 +  g_object_set(mPlayBin, "uri", "appsrc://",
   1.191 +               "video-sink", mVideoSink,
   1.192 +               "audio-sink", mAudioSink,
   1.193 +               nullptr);
   1.194 +
   1.195 +  g_signal_connect(G_OBJECT(mPlayBin), "notify::source",
   1.196 +                   G_CALLBACK(GStreamerReader::PlayBinSourceSetupCb), this);
   1.197 +  g_signal_connect(G_OBJECT(mPlayBin), "element-added",
   1.198 +                   G_CALLBACK(GStreamerReader::PlayElementAddedCb), this);
   1.199 +
   1.200 +  return NS_OK;
   1.201 +}
   1.202 +
   1.203 +GstBusSyncReply
   1.204 +GStreamerReader::ErrorCb(GstBus *aBus, GstMessage *aMessage, gpointer aUserData)
   1.205 +{
   1.206 +  return static_cast<GStreamerReader*>(aUserData)->Error(aBus, aMessage);
   1.207 +}
   1.208 +
   1.209 +GstBusSyncReply
   1.210 +GStreamerReader::Error(GstBus *aBus, GstMessage *aMessage)
   1.211 +{
   1.212 +  if (GST_MESSAGE_TYPE(aMessage) == GST_MESSAGE_ERROR) {
   1.213 +    Eos();
   1.214 +  }
   1.215 +
   1.216 +  return GST_BUS_PASS;
   1.217 +}
   1.218 +
   1.219 +void GStreamerReader::PlayBinSourceSetupCb(GstElement* aPlayBin,
   1.220 +                                           GParamSpec* pspec,
   1.221 +                                           gpointer aUserData)
   1.222 +{
   1.223 +  GstElement *source;
   1.224 +  GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);
   1.225 +
   1.226 +  g_object_get(aPlayBin, "source", &source, nullptr);
   1.227 +  reader->PlayBinSourceSetup(GST_APP_SRC(source));
   1.228 +}
   1.229 +
   1.230 +void GStreamerReader::PlayBinSourceSetup(GstAppSrc* aSource)
   1.231 +{
   1.232 +  mSource = GST_APP_SRC(aSource);
   1.233 +  gst_app_src_set_callbacks(mSource, &mSrcCallbacks, (gpointer) this, nullptr);
   1.234 +  MediaResource* resource = mDecoder->GetResource();
   1.235 +
   1.236 +  /* do a short read to trigger a network request so that GetLength() below
   1.237 +   * returns something meaningful and not -1
   1.238 +   */
   1.239 +  char buf[512];
   1.240 +  unsigned int size = 0;
   1.241 +  resource->Read(buf, sizeof(buf), &size);
   1.242 +  resource->Seek(SEEK_SET, 0);
   1.243 +
   1.244 +  /* now we should have a length */
   1.245 +  int64_t resourceLength = GetDataLength();
   1.246 +  gst_app_src_set_size(mSource, resourceLength);
   1.247 +  if (resource->IsDataCachedToEndOfResource(0) ||
   1.248 +      (resourceLength != -1 && resourceLength <= SHORT_FILE_SIZE)) {
   1.249 +    /* let the demuxer work in pull mode for local files (or very short files)
   1.250 +     * so that we get optimal seeking accuracy/performance
   1.251 +     */
   1.252 +    LOG(PR_LOG_DEBUG, "configuring random access, len %lld", resourceLength);
   1.253 +    gst_app_src_set_stream_type(mSource, GST_APP_STREAM_TYPE_RANDOM_ACCESS);
   1.254 +  } else {
   1.255 +    /* make the demuxer work in push mode so that seeking is kept to a minimum
   1.256 +     */
   1.257 +    LOG(PR_LOG_DEBUG, "configuring push mode, len %lld", resourceLength);
   1.258 +    gst_app_src_set_stream_type(mSource, GST_APP_STREAM_TYPE_SEEKABLE);
   1.259 +  }
   1.260 +
   1.261 +  // Set the source MIME type to stop typefind trying every. single. format.
   1.262 +  GstCaps *caps =
   1.263 +    GStreamerFormatHelper::ConvertFormatsToCaps(mDecoder->GetResource()->GetContentType().get(),
   1.264 +                                                nullptr);
   1.265 +
   1.266 +  gst_app_src_set_caps(aSource, caps);
   1.267 +  gst_caps_unref(caps);
   1.268 +}
   1.269 +
   1.270 +/**
   1.271 + * If this stream is an MP3, we want to parse the headers to estimate the
   1.272 + * stream duration.
   1.273 + */
   1.274 +nsresult GStreamerReader::ParseMP3Headers()
   1.275 +{
   1.276 +  MediaResource *resource = mDecoder->GetResource();
   1.277 +
   1.278 +  const uint32_t MAX_READ_BYTES = 4096;
   1.279 +
   1.280 +  uint64_t offset = 0;
   1.281 +  char bytes[MAX_READ_BYTES];
   1.282 +  uint32_t bytesRead;
   1.283 +  do {
   1.284 +    nsresult rv = resource->ReadAt(offset, bytes, MAX_READ_BYTES, &bytesRead);
   1.285 +    NS_ENSURE_SUCCESS(rv, rv);
   1.286 +    NS_ENSURE_TRUE(bytesRead, NS_ERROR_FAILURE);
   1.287 +
   1.288 +    mMP3FrameParser.Parse(bytes, bytesRead, offset);
   1.289 +    offset += bytesRead;
   1.290 +  } while (!mMP3FrameParser.ParsedHeaders());
   1.291 +
   1.292 +  if (mMP3FrameParser.IsMP3()) {
   1.293 +    mLastParserDuration = mMP3FrameParser.GetDuration();
   1.294 +    mDataOffset = mMP3FrameParser.GetMP3Offset();
   1.295 +
   1.296 +    // Update GStreamer's stream length in case we found any ID3 headers to
   1.297 +    // ignore.
   1.298 +    gst_app_src_set_size(mSource, GetDataLength());
   1.299 +  }
   1.300 +
   1.301 +  return NS_OK;
   1.302 +}
   1.303 +
   1.304 +int64_t
   1.305 +GStreamerReader::GetDataLength()
   1.306 +{
   1.307 +  int64_t streamLen = mDecoder->GetResource()->GetLength();
   1.308 +
   1.309 +  if (streamLen < 0) {
   1.310 +    return streamLen;
   1.311 +  }
   1.312 +
   1.313 +  return streamLen - mDataOffset;
   1.314 +}
   1.315 +
   1.316 +nsresult GStreamerReader::ReadMetadata(MediaInfo* aInfo,
   1.317 +                                       MetadataTags** aTags)
   1.318 +{
   1.319 +  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
   1.320 +  nsresult ret = NS_OK;
   1.321 +
   1.322 +  /*
   1.323 +   * Parse MP3 headers before we kick off the GStreamer pipeline otherwise there
   1.324 +   * might be concurrent stream operations happening on both decoding and gstreamer
   1.325 +   * threads which will screw the GStreamer state machine.
   1.326 +   */
   1.327 +  bool isMP3 = mDecoder->GetResource()->GetContentType().EqualsASCII(AUDIO_MP3);
   1.328 +  if (isMP3) {
   1.329 +    ParseMP3Headers();
   1.330 +  }
   1.331 +
   1.332 +
   1.333 +  /* We do 3 attempts here: decoding audio and video, decoding video only,
   1.334 +   * decoding audio only. This allows us to play streams that have one broken
   1.335 +   * stream but that are otherwise decodeable.
   1.336 +   */
   1.337 +  guint flags[3] = {GST_PLAY_FLAG_VIDEO|GST_PLAY_FLAG_AUDIO,
   1.338 +    static_cast<guint>(~GST_PLAY_FLAG_AUDIO), static_cast<guint>(~GST_PLAY_FLAG_VIDEO)};
   1.339 +  guint default_flags, current_flags;
   1.340 +  g_object_get(mPlayBin, "flags", &default_flags, nullptr);
   1.341 +
   1.342 +  GstMessage* message = nullptr;
   1.343 +  for (unsigned int i = 0; i < G_N_ELEMENTS(flags); i++) {
   1.344 +    current_flags = default_flags & flags[i];
   1.345 +    g_object_set(G_OBJECT(mPlayBin), "flags", current_flags, nullptr);
   1.346 +
   1.347 +    /* reset filter caps to ANY */
   1.348 +    GstCaps* caps = gst_caps_new_any();
   1.349 +    GstElement* filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter");
   1.350 +    g_object_set(filter, "caps", caps, nullptr);
   1.351 +    gst_object_unref(filter);
   1.352 +
   1.353 +    filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter");
   1.354 +    g_object_set(filter, "caps", caps, nullptr);
   1.355 +    gst_object_unref(filter);
   1.356 +    gst_caps_unref(caps);
   1.357 +    filter = nullptr;
   1.358 +
   1.359 +    if (!(current_flags & GST_PLAY_FLAG_AUDIO))
   1.360 +      filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter");
   1.361 +    else if (!(current_flags & GST_PLAY_FLAG_VIDEO))
   1.362 +      filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter");
   1.363 +
   1.364 +    if (filter) {
   1.365 +      /* Little trick: set the target caps to "skip" so that playbin2 fails to
   1.366 +       * find a decoder for the stream we want to skip.
   1.367 +       */
   1.368 +      GstCaps* filterCaps = gst_caps_new_simple ("skip", nullptr, nullptr);
   1.369 +      g_object_set(filter, "caps", filterCaps, nullptr);
   1.370 +      gst_caps_unref(filterCaps);
   1.371 +      gst_object_unref(filter);
   1.372 +    }
   1.373 +
   1.374 +    LOG(PR_LOG_DEBUG, "starting metadata pipeline");
   1.375 +    if (gst_element_set_state(mPlayBin, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE) {
   1.376 +      LOG(PR_LOG_DEBUG, "metadata pipeline state change failed");
   1.377 +      ret = NS_ERROR_FAILURE;
   1.378 +      continue;
   1.379 +    }
   1.380 +
   1.381 +    /* Wait for ASYNC_DONE, which is emitted when the pipeline is built,
   1.382 +     * prerolled and ready to play. Also watch for errors.
   1.383 +     */
   1.384 +    message = gst_bus_timed_pop_filtered(mBus, GST_CLOCK_TIME_NONE,
   1.385 +                 (GstMessageType)(GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
   1.386 +    if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ASYNC_DONE) {
   1.387 +      LOG(PR_LOG_DEBUG, "read metadata pipeline prerolled");
   1.388 +      gst_message_unref(message);
   1.389 +      ret = NS_OK;
   1.390 +      break;
   1.391 +    } else {
   1.392 +      LOG(PR_LOG_DEBUG, "read metadata pipeline failed to preroll: %s",
   1.393 +            gst_message_type_get_name (GST_MESSAGE_TYPE (message)));
   1.394 +
   1.395 +      if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
   1.396 +        GError* error;
   1.397 +        gchar* debug;
   1.398 +        gst_message_parse_error(message, &error, &debug);
   1.399 +        LOG(PR_LOG_ERROR, "read metadata error: %s: %s", error->message, debug);
   1.400 +        g_error_free(error);
   1.401 +        g_free(debug);
   1.402 +      }
   1.403 +      /* Unexpected stream close/EOS or other error. We'll give up if all
   1.404 +       * streams are in error/eos. */
   1.405 +      gst_element_set_state(mPlayBin, GST_STATE_NULL);
   1.406 +      gst_message_unref(message);
   1.407 +      ret = NS_ERROR_FAILURE;
   1.408 +    }
   1.409 +  }
   1.410 +
   1.411 +  if (NS_SUCCEEDED(ret))
   1.412 +    ret = CheckSupportedFormats();
   1.413 +
   1.414 +  if (NS_FAILED(ret))
   1.415 +    /* we couldn't get this to play */
   1.416 +    return ret;
   1.417 +
   1.418 +  /* report the duration */
   1.419 +  gint64 duration;
   1.420 +
   1.421 +  if (isMP3 && mMP3FrameParser.IsMP3()) {
   1.422 +    // The MP3FrameParser has reported a duration; use that over the gstreamer
   1.423 +    // reported duration for inter-platform consistency.
   1.424 +    ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
   1.425 +    mUseParserDuration = true;
   1.426 +    mLastParserDuration = mMP3FrameParser.GetDuration();
   1.427 +    mDecoder->SetMediaDuration(mLastParserDuration);
   1.428 +  } else {
   1.429 +    LOG(PR_LOG_DEBUG, "querying duration");
   1.430 +    // Otherwise use the gstreamer duration.
   1.431 +#if GST_VERSION_MAJOR >= 1
   1.432 +    if (gst_element_query_duration(GST_ELEMENT(mPlayBin),
   1.433 +          GST_FORMAT_TIME, &duration)) {
   1.434 +#else
   1.435 +    GstFormat format = GST_FORMAT_TIME;
   1.436 +    if (gst_element_query_duration(GST_ELEMENT(mPlayBin),
   1.437 +      &format, &duration) && format == GST_FORMAT_TIME) {
   1.438 +#endif
   1.439 +      ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
   1.440 +      LOG(PR_LOG_DEBUG, "have duration %" GST_TIME_FORMAT, GST_TIME_ARGS(duration));
   1.441 +      duration = GST_TIME_AS_USECONDS (duration);
   1.442 +      mDecoder->SetMediaDuration(duration);
   1.443 +    } else {
   1.444 +      mDecoder->SetMediaSeekable(false);
   1.445 +    }
   1.446 +  }
   1.447 +
   1.448 +  int n_video = 0, n_audio = 0;
   1.449 +  g_object_get(mPlayBin, "n-video", &n_video, "n-audio", &n_audio, nullptr);
   1.450 +  mInfo.mVideo.mHasVideo = n_video != 0;
   1.451 +  mInfo.mAudio.mHasAudio = n_audio != 0;
   1.452 +
   1.453 +  *aInfo = mInfo;
   1.454 +
   1.455 +  *aTags = nullptr;
   1.456 +
   1.457 +  // Watch the pipeline for fatal errors
   1.458 +#if GST_VERSION_MAJOR >= 1
   1.459 +  gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this, nullptr);
   1.460 +#else
   1.461 +  gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this);
   1.462 +#endif
   1.463 +
   1.464 +  /* set the pipeline to PLAYING so that it starts decoding and queueing data in
   1.465 +   * the appsinks */
   1.466 +  gst_element_set_state(mPlayBin, GST_STATE_PLAYING);
   1.467 +
   1.468 +  return NS_OK;
   1.469 +}
   1.470 +
   1.471 +nsresult GStreamerReader::CheckSupportedFormats()
   1.472 +{
   1.473 +  bool done = false;
   1.474 +  bool unsupported = false;
   1.475 +
   1.476 +  GstIterator* it = gst_bin_iterate_recurse(GST_BIN(mPlayBin));
   1.477 +  while (!done) {
   1.478 +    GstIteratorResult res;
   1.479 +    GstElement* element;
   1.480 +
   1.481 +#if GST_VERSION_MAJOR >= 1
   1.482 +    GValue value = {0,};
   1.483 +    res = gst_iterator_next(it, &value);
   1.484 +#else
   1.485 +    res = gst_iterator_next(it, (void **) &element);
   1.486 +#endif
   1.487 +    switch(res) {
   1.488 +      case GST_ITERATOR_OK:
   1.489 +      {
   1.490 +#if GST_VERSION_MAJOR >= 1
   1.491 +        element = GST_ELEMENT (g_value_get_object (&value));
   1.492 +#endif
   1.493 +        GstElementFactory* factory = gst_element_get_factory(element);
   1.494 +        if (factory) {
   1.495 +          const char* klass = gst_element_factory_get_klass(factory);
   1.496 +          GstPad* pad = gst_element_get_static_pad(element, "sink");
   1.497 +          if (pad) {
   1.498 +            GstCaps* caps;
   1.499 +
   1.500 +#if GST_VERSION_MAJOR >= 1
   1.501 +            caps = gst_pad_get_current_caps(pad);
   1.502 +#else
   1.503 +            caps = gst_pad_get_negotiated_caps(pad);
   1.504 +#endif
   1.505 +
   1.506 +            if (caps) {
   1.507 +              /* check for demuxers but ignore elements like id3demux */
   1.508 +              if (strstr (klass, "Demuxer") && !strstr(klass, "Metadata"))
   1.509 +                unsupported = !GStreamerFormatHelper::Instance()->CanHandleContainerCaps(caps);
   1.510 +              else if (strstr (klass, "Decoder") && !strstr(klass, "Generic"))
   1.511 +                unsupported = !GStreamerFormatHelper::Instance()->CanHandleCodecCaps(caps);
   1.512 +
   1.513 +              gst_caps_unref(caps);
   1.514 +            }
   1.515 +            gst_object_unref(pad);
   1.516 +          }
   1.517 +        }
   1.518 +
   1.519 +#if GST_VERSION_MAJOR >= 1
   1.520 +        g_value_unset (&value);
   1.521 +#else
   1.522 +        gst_object_unref(element);
   1.523 +#endif
   1.524 +        done = unsupported;
   1.525 +        break;
   1.526 +      }
   1.527 +      case GST_ITERATOR_RESYNC:
   1.528 +        unsupported = false;
   1.529 +        done = false;
   1.530 +        break;
   1.531 +      case GST_ITERATOR_ERROR:
   1.532 +        done = true;
   1.533 +        break;
   1.534 +      case GST_ITERATOR_DONE:
   1.535 +        done = true;
   1.536 +        break;
   1.537 +    }
   1.538 +  }
   1.539 +
   1.540 +  return unsupported ? NS_ERROR_FAILURE : NS_OK;
   1.541 +}
   1.542 +
   1.543 +nsresult GStreamerReader::ResetDecode()
   1.544 +{
   1.545 +  nsresult res = NS_OK;
   1.546 +
   1.547 +  LOG(PR_LOG_DEBUG, "reset decode");
   1.548 +
   1.549 +  if (NS_FAILED(MediaDecoderReader::ResetDecode())) {
   1.550 +    res = NS_ERROR_FAILURE;
   1.551 +  }
   1.552 +
   1.553 +  mVideoQueue.Reset();
   1.554 +  mAudioQueue.Reset();
   1.555 +
   1.556 +  mVideoSinkBufferCount = 0;
   1.557 +  mAudioSinkBufferCount = 0;
   1.558 +  mReachedAudioEos = false;
   1.559 +  mReachedVideoEos = false;
   1.560 +#if GST_VERSION_MAJOR >= 1
   1.561 +  mConfigureAlignment = true;
   1.562 +#endif
   1.563 +
   1.564 +  LOG(PR_LOG_DEBUG, "reset decode done");
   1.565 +
   1.566 +  return res;
   1.567 +}
   1.568 +
   1.569 +bool GStreamerReader::DecodeAudioData()
   1.570 +{
   1.571 +  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
   1.572 +
   1.573 +  GstBuffer *buffer = nullptr;
   1.574 +
   1.575 +  {
   1.576 +    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
   1.577 +
   1.578 +    if (mReachedAudioEos && !mAudioSinkBufferCount) {
   1.579 +      return false;
   1.580 +    }
   1.581 +
   1.582 +    /* Wait something to be decoded before return or continue */
   1.583 +    if (!mAudioSinkBufferCount) {
   1.584 +      if(!mVideoSinkBufferCount) {
   1.585 +        /* We have nothing decoded so it makes no sense to return to the state machine
   1.586 +         * as it will call us back immediately, we'll return again and so on, wasting
   1.587 +         * CPU cycles for no job done. So, block here until there is either video or
   1.588 +         * audio data available
   1.589 +        */
   1.590 +        mon.Wait();
   1.591 +        if (!mAudioSinkBufferCount) {
   1.592 +          /* There is still no audio data available, so either there is video data or
   1.593 +           * something else has happened (Eos, etc...). Return to the state machine
   1.594 +           * to process it.
   1.595 +           */
   1.596 +          return true;
   1.597 +        }
   1.598 +      }
   1.599 +      else {
   1.600 +        return true;
   1.601 +      }
   1.602 +    }
   1.603 +
   1.604 +#if GST_VERSION_MAJOR >= 1
   1.605 +    GstSample *sample = gst_app_sink_pull_sample(mAudioAppSink);
   1.606 +    buffer = gst_buffer_ref(gst_sample_get_buffer(sample));
   1.607 +    gst_sample_unref(sample);
   1.608 +#else
   1.609 +    buffer = gst_app_sink_pull_buffer(mAudioAppSink);
   1.610 +#endif
   1.611 +
   1.612 +    mAudioSinkBufferCount--;
   1.613 +  }
   1.614 +
   1.615 +  int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer);
   1.616 +  timestamp = gst_segment_to_stream_time(&mAudioSegment,
   1.617 +      GST_FORMAT_TIME, timestamp);
   1.618 +
   1.619 +  timestamp = GST_TIME_AS_USECONDS(timestamp);
   1.620 +
   1.621 +  int64_t offset = GST_BUFFER_OFFSET(buffer);
   1.622 +  guint8* data;
   1.623 +#if GST_VERSION_MAJOR >= 1
   1.624 +  GstMapInfo info;
   1.625 +  gst_buffer_map(buffer, &info, GST_MAP_READ);
   1.626 +  unsigned int size = info.size;
   1.627 +  data = info.data;
   1.628 +#else
   1.629 +  unsigned int size = GST_BUFFER_SIZE(buffer);
   1.630 +  data = GST_BUFFER_DATA(buffer);
   1.631 +#endif
   1.632 +  int32_t frames = (size / sizeof(AudioDataValue)) / mInfo.mAudio.mChannels;
   1.633 +
   1.634 +  typedef AudioCompactor::NativeCopy GstCopy;
   1.635 +  mAudioCompactor.Push(offset,
   1.636 +                       timestamp,
   1.637 +                       mInfo.mAudio.mRate,
   1.638 +                       frames,
   1.639 +                       mInfo.mAudio.mChannels,
   1.640 +                       GstCopy(data,
   1.641 +                               size,
   1.642 +                               mInfo.mAudio.mChannels));
   1.643 +#if GST_VERSION_MAJOR >= 1
   1.644 +  gst_buffer_unmap(buffer, &info);
   1.645 +#endif
   1.646 +
   1.647 +  gst_buffer_unref(buffer);
   1.648 +
   1.649 +  return true;
   1.650 +}
   1.651 +
   1.652 +bool GStreamerReader::DecodeVideoFrame(bool &aKeyFrameSkip,
   1.653 +                                       int64_t aTimeThreshold)
   1.654 +{
   1.655 +  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
   1.656 +
   1.657 +  GstBuffer *buffer = nullptr;
   1.658 +
   1.659 +  {
   1.660 +    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
   1.661 +
   1.662 +    if (mReachedVideoEos && !mVideoSinkBufferCount) {
   1.663 +      return false;
   1.664 +    }
   1.665 +
   1.666 +    /* Wait something to be decoded before return or continue */
   1.667 +    if (!mVideoSinkBufferCount) {
   1.668 +      if (!mAudioSinkBufferCount) {
   1.669 +        /* We have nothing decoded so it makes no sense to return to the state machine
   1.670 +         * as it will call us back immediately, we'll return again and so on, wasting
   1.671 +         * CPU cycles for no job done. So, block here until there is either video or
   1.672 +         * audio data available
   1.673 +        */
   1.674 +        mon.Wait();
   1.675 +        if (!mVideoSinkBufferCount) {
   1.676 +          /* There is still no video data available, so either there is audio data or
   1.677 +           * something else has happened (Eos, etc...). Return to the state machine
   1.678 +           * to process it
   1.679 +           */
   1.680 +          return true;
   1.681 +        }
   1.682 +      }
   1.683 +      else {
   1.684 +        return true;
   1.685 +      }
   1.686 +    }
   1.687 +
   1.688 +    mDecoder->NotifyDecodedFrames(0, 1);
   1.689 +
   1.690 +#if GST_VERSION_MAJOR >= 1
   1.691 +    GstSample *sample = gst_app_sink_pull_sample(mVideoAppSink);
   1.692 +    buffer = gst_buffer_ref(gst_sample_get_buffer(sample));
   1.693 +    gst_sample_unref(sample);
   1.694 +#else
   1.695 +    buffer = gst_app_sink_pull_buffer(mVideoAppSink);
   1.696 +#endif
   1.697 +    mVideoSinkBufferCount--;
   1.698 +  }
   1.699 +
   1.700 +  bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
   1.701 +  if ((aKeyFrameSkip && !isKeyframe)) {
   1.702 +    gst_buffer_unref(buffer);
   1.703 +    return true;
   1.704 +  }
   1.705 +
   1.706 +  int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer);
   1.707 +  {
   1.708 +    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
   1.709 +    timestamp = gst_segment_to_stream_time(&mVideoSegment,
   1.710 +                                           GST_FORMAT_TIME, timestamp);
   1.711 +  }
   1.712 +  NS_ASSERTION(GST_CLOCK_TIME_IS_VALID(timestamp),
   1.713 +               "frame has invalid timestamp");
   1.714 +
   1.715 +  timestamp = GST_TIME_AS_USECONDS(timestamp);
   1.716 +  int64_t duration = 0;
   1.717 +  if (GST_CLOCK_TIME_IS_VALID(GST_BUFFER_DURATION(buffer)))
   1.718 +    duration = GST_TIME_AS_USECONDS(GST_BUFFER_DURATION(buffer));
   1.719 +  else if (fpsNum && fpsDen)
   1.720 +    /* add 1-frame duration */
   1.721 +    duration = gst_util_uint64_scale(GST_USECOND, fpsDen, fpsNum);
   1.722 +
   1.723 +  if (timestamp < aTimeThreshold) {
   1.724 +    LOG(PR_LOG_DEBUG, "skipping frame %" GST_TIME_FORMAT
   1.725 +                      " threshold %" GST_TIME_FORMAT,
   1.726 +                      GST_TIME_ARGS(timestamp * 1000),
   1.727 +                      GST_TIME_ARGS(aTimeThreshold * 1000));
   1.728 +    gst_buffer_unref(buffer);
   1.729 +    return true;
   1.730 +  }
   1.731 +
   1.732 +  if (!buffer)
   1.733 +    /* no more frames */
   1.734 +    return true;
   1.735 +
   1.736 +#if GST_VERSION_MAJOR >= 1
   1.737 +  if (mConfigureAlignment && buffer->pool) {
   1.738 +    GstStructure *config = gst_buffer_pool_get_config(buffer->pool);
   1.739 +    GstVideoAlignment align;
   1.740 +    if (gst_buffer_pool_config_get_video_alignment(config, &align))
   1.741 +      gst_video_info_align(&mVideoInfo, &align);
   1.742 +    gst_structure_free(config);
   1.743 +    mConfigureAlignment = false;
   1.744 +  }
   1.745 +#endif
   1.746 +
   1.747 +  nsRefPtr<PlanarYCbCrImage> image = GetImageFromBuffer(buffer);
   1.748 +  if (!image) {
   1.749 +    /* Ugh, upstream is not calling gst_pad_alloc_buffer(). Fallback to
   1.750 +     * allocating a PlanarYCbCrImage backed GstBuffer here and memcpy.
   1.751 +     */
   1.752 +    GstBuffer* tmp = nullptr;
   1.753 +    CopyIntoImageBuffer(buffer, &tmp, image);
   1.754 +    gst_buffer_unref(buffer);
   1.755 +    buffer = tmp;
   1.756 +  }
   1.757 +
   1.758 +  int64_t offset = mDecoder->GetResource()->Tell(); // Estimate location in media.
   1.759 +  VideoData* video = VideoData::CreateFromImage(mInfo.mVideo,
   1.760 +                                                mDecoder->GetImageContainer(),
   1.761 +                                                offset, timestamp, duration,
   1.762 +                                                static_cast<Image*>(image.get()),
   1.763 +                                                isKeyframe, -1, mPicture);
   1.764 +  mVideoQueue.Push(video);
   1.765 +
   1.766 +  gst_buffer_unref(buffer);
   1.767 +
   1.768 +  return true;
   1.769 +}
   1.770 +
   1.771 +nsresult GStreamerReader::Seek(int64_t aTarget,
   1.772 +                                 int64_t aStartTime,
   1.773 +                                 int64_t aEndTime,
   1.774 +                                 int64_t aCurrentTime)
   1.775 +{
   1.776 +  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
   1.777 +
   1.778 +  gint64 seekPos = aTarget * GST_USECOND;
   1.779 +  LOG(PR_LOG_DEBUG, "%p About to seek to %" GST_TIME_FORMAT,
   1.780 +        mDecoder, GST_TIME_ARGS(seekPos));
   1.781 +
   1.782 +  int flags = GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT;
   1.783 +  if (!gst_element_seek_simple(mPlayBin,
   1.784 +                               GST_FORMAT_TIME,
   1.785 +                               static_cast<GstSeekFlags>(flags),
   1.786 +                               seekPos)) {
   1.787 +    LOG(PR_LOG_ERROR, "seek failed");
   1.788 +    return NS_ERROR_FAILURE;
   1.789 +  }
   1.790 +  LOG(PR_LOG_DEBUG, "seek succeeded");
   1.791 +  GstMessage* message = gst_bus_timed_pop_filtered(mBus, GST_CLOCK_TIME_NONE,
   1.792 +               (GstMessageType)(GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_ERROR));
   1.793 +  gst_message_unref(message);
   1.794 +  LOG(PR_LOG_DEBUG, "seek completed");
   1.795 +
   1.796 +  return NS_OK;
   1.797 +}
   1.798 +
   1.799 +nsresult GStreamerReader::GetBuffered(dom::TimeRanges* aBuffered,
   1.800 +                                      int64_t aStartTime)
   1.801 +{
   1.802 +  if (!mInfo.HasValidMedia()) {
   1.803 +    return NS_OK;
   1.804 +  }
   1.805 +
   1.806 +#if GST_VERSION_MAJOR == 0
   1.807 +  GstFormat format = GST_FORMAT_TIME;
   1.808 +#endif
   1.809 +  MediaResource* resource = mDecoder->GetResource();
   1.810 +  nsTArray<MediaByteRange> ranges;
   1.811 +  resource->GetCachedRanges(ranges);
   1.812 +
   1.813 +  if (resource->IsDataCachedToEndOfResource(0)) {
   1.814 +    /* fast path for local or completely cached files */
   1.815 +    gint64 duration = 0;
   1.816 +
   1.817 +    {
   1.818 +      ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
   1.819 +      duration = mDecoder->GetMediaDuration();
   1.820 +    }
   1.821 +
   1.822 +    double end = (double) duration / GST_MSECOND;
   1.823 +    LOG(PR_LOG_DEBUG, "complete range [0, %f] for [0, %li]",
   1.824 +          end, GetDataLength());
   1.825 +    aBuffered->Add(0, end);
   1.826 +    return NS_OK;
   1.827 +  }
   1.828 +
   1.829 +  for(uint32_t index = 0; index < ranges.Length(); index++) {
   1.830 +    int64_t startOffset = ranges[index].mStart;
   1.831 +    int64_t endOffset = ranges[index].mEnd;
   1.832 +    gint64 startTime, endTime;
   1.833 +
   1.834 +#if GST_VERSION_MAJOR >= 1
   1.835 +    if (!gst_element_query_convert(GST_ELEMENT(mPlayBin), GST_FORMAT_BYTES,
   1.836 +      startOffset, GST_FORMAT_TIME, &startTime))
   1.837 +      continue;
   1.838 +    if (!gst_element_query_convert(GST_ELEMENT(mPlayBin), GST_FORMAT_BYTES,
   1.839 +      endOffset, GST_FORMAT_TIME, &endTime))
   1.840 +      continue;
   1.841 +#else
   1.842 +    if (!gst_element_query_convert(GST_ELEMENT(mPlayBin), GST_FORMAT_BYTES,
   1.843 +      startOffset, &format, &startTime) || format != GST_FORMAT_TIME)
   1.844 +      continue;
   1.845 +    if (!gst_element_query_convert(GST_ELEMENT(mPlayBin), GST_FORMAT_BYTES,
   1.846 +      endOffset, &format, &endTime) || format != GST_FORMAT_TIME)
   1.847 +      continue;
   1.848 +#endif
   1.849 +
   1.850 +    double start = (double) GST_TIME_AS_USECONDS (startTime) / GST_MSECOND;
   1.851 +    double end = (double) GST_TIME_AS_USECONDS (endTime) / GST_MSECOND;
   1.852 +    LOG(PR_LOG_DEBUG, "adding range [%f, %f] for [%li %li] size %li",
   1.853 +          start, end, startOffset, endOffset, GetDataLength());
   1.854 +    aBuffered->Add(start, end);
   1.855 +  }
   1.856 +
   1.857 +  return NS_OK;
   1.858 +}
   1.859 +
   1.860 +void GStreamerReader::ReadAndPushData(guint aLength)
   1.861 +{
   1.862 +  MediaResource* resource = mDecoder->GetResource();
   1.863 +  NS_ASSERTION(resource, "Decoder has no media resource");
   1.864 +  int64_t offset1 = resource->Tell();
   1.865 +  unused << offset1;
   1.866 +  nsresult rv = NS_OK;
   1.867 +
   1.868 +  GstBuffer* buffer = gst_buffer_new_and_alloc(aLength);
   1.869 +#if GST_VERSION_MAJOR >= 1
   1.870 +  GstMapInfo info;
   1.871 +  gst_buffer_map(buffer, &info, GST_MAP_WRITE);
   1.872 +  guint8 *data = info.data;
   1.873 +#else
   1.874 +  guint8* data = GST_BUFFER_DATA(buffer);
   1.875 +#endif
   1.876 +  uint32_t size = 0, bytesRead = 0;
   1.877 +  while(bytesRead < aLength) {
   1.878 +    rv = resource->Read(reinterpret_cast<char*>(data + bytesRead),
   1.879 +        aLength - bytesRead, &size);
   1.880 +    if (NS_FAILED(rv) || size == 0)
   1.881 +      break;
   1.882 +
   1.883 +    bytesRead += size;
   1.884 +  }
   1.885 +
   1.886 +  int64_t offset2 = resource->Tell();
   1.887 +  unused << offset2;
   1.888 +
   1.889 +#if GST_VERSION_MAJOR >= 1
   1.890 +  gst_buffer_unmap(buffer, &info);
   1.891 +  gst_buffer_set_size(buffer, bytesRead);
   1.892 +#else
   1.893 +  GST_BUFFER_SIZE(buffer) = bytesRead;
   1.894 +#endif
   1.895 +
   1.896 +  GstFlowReturn ret = gst_app_src_push_buffer(mSource, gst_buffer_ref(buffer));
   1.897 +  if (ret != GST_FLOW_OK) {
   1.898 +    LOG(PR_LOG_ERROR, "ReadAndPushData push ret %s(%d)", gst_flow_get_name(ret), ret);
   1.899 +  }
   1.900 +
   1.901 +  if (NS_FAILED(rv)) {
   1.902 +    /* Terminate the stream if there is an error in reading */
   1.903 +    LOG(PR_LOG_ERROR, "ReadAndPushData read error, rv=%x", rv);
   1.904 +    gst_app_src_end_of_stream(mSource);
   1.905 +  } else if (bytesRead < aLength) {
   1.906 +    /* If we read less than what we wanted, we reached the end */
   1.907 +    LOG(PR_LOG_WARNING, "ReadAndPushData read underflow, "
   1.908 +        "bytesRead=%u, aLength=%u, offset(%lld,%lld)",
   1.909 +        bytesRead, aLength, offset1, offset2);
   1.910 +    gst_app_src_end_of_stream(mSource);
   1.911 +  }
   1.912 +
   1.913 +  gst_buffer_unref(buffer);
   1.914 +
   1.915 +  /* Ensure offset change is consistent in this function.
   1.916 +   * If there are other stream operations on another thread at the same time,
   1.917 +   * it will disturb the GStreamer state machine.
   1.918 +   */
   1.919 +  MOZ_ASSERT(offset1 + bytesRead == offset2);
   1.920 +}
   1.921 +
   1.922 +void GStreamerReader::NeedDataCb(GstAppSrc* aSrc,
   1.923 +                                 guint aLength,
   1.924 +                                 gpointer aUserData)
   1.925 +{
   1.926 +  GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);
   1.927 +  reader->NeedData(aSrc, aLength);
   1.928 +}
   1.929 +
   1.930 +void GStreamerReader::NeedData(GstAppSrc* aSrc, guint aLength)
   1.931 +{
   1.932 +  if (aLength == static_cast<guint>(-1))
   1.933 +    aLength = DEFAULT_SOURCE_READ_SIZE;
   1.934 +  ReadAndPushData(aLength);
   1.935 +}
   1.936 +
   1.937 +void GStreamerReader::EnoughDataCb(GstAppSrc* aSrc, gpointer aUserData)
   1.938 +{
   1.939 +  GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);
   1.940 +  reader->EnoughData(aSrc);
   1.941 +}
   1.942 +
   1.943 +void GStreamerReader::EnoughData(GstAppSrc* aSrc)
   1.944 +{
   1.945 +}
   1.946 +
   1.947 +gboolean GStreamerReader::SeekDataCb(GstAppSrc* aSrc,
   1.948 +                                     guint64 aOffset,
   1.949 +                                     gpointer aUserData)
   1.950 +{
   1.951 +  GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);
   1.952 +  return reader->SeekData(aSrc, aOffset);
   1.953 +}
   1.954 +
   1.955 +gboolean GStreamerReader::SeekData(GstAppSrc* aSrc, guint64 aOffset)
   1.956 +{
   1.957 +  aOffset += mDataOffset;
   1.958 +
   1.959 +  ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
   1.960 +  MediaResource* resource = mDecoder->GetResource();
   1.961 +  int64_t resourceLength = resource->GetLength();
   1.962 +
   1.963 +  if (gst_app_src_get_size(mSource) == -1) {
   1.964 +    /* It's possible that we didn't know the length when we initialized mSource
   1.965 +     * but maybe we do now
   1.966 +     */
   1.967 +    gst_app_src_set_size(mSource, GetDataLength());
   1.968 +  }
   1.969 +
   1.970 +  nsresult rv = NS_ERROR_FAILURE;
   1.971 +  if (aOffset < static_cast<guint64>(resourceLength)) {
   1.972 +    rv = resource->Seek(SEEK_SET, aOffset);
   1.973 +  }
   1.974 +
   1.975 +  if (NS_FAILED(rv)) {
   1.976 +    LOG(PR_LOG_ERROR, "seek at %lu failed", aOffset);
   1.977 +  } else {
   1.978 +    MOZ_ASSERT(aOffset == static_cast<guint64>(resource->Tell()));
   1.979 +  }
   1.980 +
   1.981 +  return NS_SUCCEEDED(rv);
   1.982 +}
   1.983 +
   1.984 +GstFlowReturn GStreamerReader::NewPrerollCb(GstAppSink* aSink,
   1.985 +                                              gpointer aUserData)
   1.986 +{
   1.987 +  GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);
   1.988 +
   1.989 +  if (aSink == reader->mVideoAppSink)
   1.990 +    reader->VideoPreroll();
   1.991 +  else
   1.992 +    reader->AudioPreroll();
   1.993 +  return GST_FLOW_OK;
   1.994 +}
   1.995 +
   1.996 +void GStreamerReader::AudioPreroll()
   1.997 +{
   1.998 +  /* The first audio buffer has reached the audio sink. Get rate and channels */
   1.999 +  LOG(PR_LOG_DEBUG, "Audio preroll");
  1.1000 +  GstPad* sinkpad = gst_element_get_static_pad(GST_ELEMENT(mAudioAppSink), "sink");
  1.1001 +#if GST_VERSION_MAJOR >= 1
  1.1002 +  GstCaps *caps = gst_pad_get_current_caps(sinkpad);
  1.1003 +#else
  1.1004 +  GstCaps* caps = gst_pad_get_negotiated_caps(sinkpad);
  1.1005 +#endif
  1.1006 +  GstStructure* s = gst_caps_get_structure(caps, 0);
  1.1007 +  mInfo.mAudio.mRate = mInfo.mAudio.mChannels = 0;
  1.1008 +  gst_structure_get_int(s, "rate", (gint*) &mInfo.mAudio.mRate);
  1.1009 +  gst_structure_get_int(s, "channels", (gint*) &mInfo.mAudio.mChannels);
  1.1010 +  NS_ASSERTION(mInfo.mAudio.mRate != 0, ("audio rate is zero"));
  1.1011 +  NS_ASSERTION(mInfo.mAudio.mChannels != 0, ("audio channels is zero"));
  1.1012 +  NS_ASSERTION(mInfo.mAudio.mChannels > 0 && mInfo.mAudio.mChannels <= MAX_CHANNELS,
  1.1013 +      "invalid audio channels number");
  1.1014 +  mInfo.mAudio.mHasAudio = true;
  1.1015 +  gst_caps_unref(caps);
  1.1016 +  gst_object_unref(sinkpad);
  1.1017 +}
  1.1018 +
  1.1019 +void GStreamerReader::VideoPreroll()
  1.1020 +{
  1.1021 +  /* The first video buffer has reached the video sink. Get width and height */
  1.1022 +  LOG(PR_LOG_DEBUG, "Video preroll");
  1.1023 +  GstPad* sinkpad = gst_element_get_static_pad(GST_ELEMENT(mVideoAppSink), "sink");
  1.1024 +  int PARNumerator, PARDenominator;
  1.1025 +#if GST_VERSION_MAJOR >= 1
  1.1026 +  GstCaps* caps = gst_pad_get_current_caps(sinkpad);
  1.1027 +  memset (&mVideoInfo, 0, sizeof (mVideoInfo));
  1.1028 +  gst_video_info_from_caps(&mVideoInfo, caps);
  1.1029 +  mFormat = mVideoInfo.finfo->format;
  1.1030 +  mPicture.width = mVideoInfo.width;
  1.1031 +  mPicture.height = mVideoInfo.height;
  1.1032 +  PARNumerator = GST_VIDEO_INFO_PAR_N(&mVideoInfo);
  1.1033 +  PARDenominator = GST_VIDEO_INFO_PAR_D(&mVideoInfo);
  1.1034 +#else
  1.1035 +  GstCaps* caps = gst_pad_get_negotiated_caps(sinkpad);
  1.1036 +  gst_video_format_parse_caps(caps, &mFormat, &mPicture.width, &mPicture.height);
  1.1037 +  if (!gst_video_parse_caps_pixel_aspect_ratio(caps, &PARNumerator, &PARDenominator)) {
  1.1038 +    PARNumerator = 1;
  1.1039 +    PARDenominator = 1;
  1.1040 +  }
  1.1041 +#endif
  1.1042 +  NS_ASSERTION(mPicture.width && mPicture.height, "invalid video resolution");
  1.1043 +
  1.1044 +  // Calculate display size according to pixel aspect ratio.
  1.1045 +  nsIntRect pictureRect(0, 0, mPicture.width, mPicture.height);
  1.1046 +  nsIntSize frameSize = nsIntSize(mPicture.width, mPicture.height);
  1.1047 +  nsIntSize displaySize = nsIntSize(mPicture.width, mPicture.height);
  1.1048 +  ScaleDisplayByAspectRatio(displaySize, float(PARNumerator) / float(PARDenominator));
  1.1049 +
  1.1050 +  // If video frame size is overflow, stop playing.
  1.1051 +  if (IsValidVideoRegion(frameSize, pictureRect, displaySize)) {
  1.1052 +    GstStructure* structure = gst_caps_get_structure(caps, 0);
  1.1053 +    gst_structure_get_fraction(structure, "framerate", &fpsNum, &fpsDen);
  1.1054 +    mInfo.mVideo.mDisplay = ThebesIntSize(displaySize.ToIntSize());
  1.1055 +    mInfo.mVideo.mHasVideo = true;
  1.1056 +  } else {
  1.1057 +    LOG(PR_LOG_DEBUG, "invalid video region");
  1.1058 +    Eos();
  1.1059 +  }
  1.1060 +  gst_caps_unref(caps);
  1.1061 +  gst_object_unref(sinkpad);
  1.1062 +}
  1.1063 +
  1.1064 +GstFlowReturn GStreamerReader::NewBufferCb(GstAppSink* aSink,
  1.1065 +                                           gpointer aUserData)
  1.1066 +{
  1.1067 +  GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);
  1.1068 +
  1.1069 +  if (aSink == reader->mVideoAppSink)
  1.1070 +    reader->NewVideoBuffer();
  1.1071 +  else
  1.1072 +    reader->NewAudioBuffer();
  1.1073 +
  1.1074 +  return GST_FLOW_OK;
  1.1075 +}
  1.1076 +
  1.1077 +void GStreamerReader::NewVideoBuffer()
  1.1078 +{
  1.1079 +  ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
  1.1080 +  /* We have a new video buffer queued in the video sink. Increment the counter
  1.1081 +   * and notify the decode thread potentially blocked in DecodeVideoFrame
  1.1082 +   */
  1.1083 +
  1.1084 +  mDecoder->NotifyDecodedFrames(1, 0);
  1.1085 +  mVideoSinkBufferCount++;
  1.1086 +  mon.NotifyAll();
  1.1087 +}
  1.1088 +
  1.1089 +void GStreamerReader::NewAudioBuffer()
  1.1090 +{
  1.1091 +  ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
  1.1092 +  /* We have a new audio buffer queued in the audio sink. Increment the counter
  1.1093 +   * and notify the decode thread potentially blocked in DecodeAudioData
  1.1094 +   */
  1.1095 +  mAudioSinkBufferCount++;
  1.1096 +  mon.NotifyAll();
  1.1097 +}
  1.1098 +
  1.1099 +void GStreamerReader::EosCb(GstAppSink* aSink, gpointer aUserData)
  1.1100 +{
  1.1101 +  GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);
  1.1102 +  reader->Eos(aSink);
  1.1103 +}
  1.1104 +
  1.1105 +void GStreamerReader::Eos(GstAppSink* aSink)
  1.1106 +{
  1.1107 +  /* We reached the end of the stream */
  1.1108 +  {
  1.1109 +    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
  1.1110 +    /* Potentially unblock DecodeVideoFrame and DecodeAudioData */
  1.1111 +    if (aSink == mVideoAppSink) {
  1.1112 +      mReachedVideoEos = true;
  1.1113 +    } else if (aSink == mAudioAppSink) {
  1.1114 +      mReachedAudioEos = true;
  1.1115 +    } else {
  1.1116 +      // Assume this is an error causing an EOS.
  1.1117 +      mReachedAudioEos = true;
  1.1118 +      mReachedVideoEos = true;
  1.1119 +    }
  1.1120 +    mon.NotifyAll();
  1.1121 +  }
  1.1122 +
  1.1123 +  {
  1.1124 +    ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
  1.1125 +    /* Potentially unblock the decode thread in ::DecodeLoop */
  1.1126 +    mon.NotifyAll();
  1.1127 +  }
  1.1128 +}
  1.1129 +
  1.1130 +/**
  1.1131 + * This callback is called while the pipeline is automatically built, after a
  1.1132 + * new element has been added to the pipeline. We use it to find the
  1.1133 + * uridecodebin instance used by playbin and connect to it to apply our
  1.1134 + * whitelist.
  1.1135 + */
  1.1136 +void
  1.1137 +GStreamerReader::PlayElementAddedCb(GstBin *aBin, GstElement *aElement,
  1.1138 +                                    gpointer *aUserData)
  1.1139 +{
  1.1140 +  const static char sUriDecodeBinPrefix[] = "uridecodebin";
  1.1141 +  gchar *name = gst_element_get_name(aElement);
  1.1142 +
  1.1143 +  // Attach this callback to uridecodebin, child of playbin.
  1.1144 +  if (!strncmp(name, sUriDecodeBinPrefix, sizeof(sUriDecodeBinPrefix) - 1)) {
  1.1145 +    g_signal_connect(G_OBJECT(aElement), "autoplug-sort",
  1.1146 +                     G_CALLBACK(GStreamerReader::AutoplugSortCb), aUserData);
  1.1147 +  }
  1.1148 +
  1.1149 +  g_free(name);
  1.1150 +}
  1.1151 +
  1.1152 +bool
  1.1153 +GStreamerReader::ShouldAutoplugFactory(GstElementFactory* aFactory, GstCaps* aCaps)
  1.1154 +{
  1.1155 +  bool autoplug;
  1.1156 +  const gchar *klass = gst_element_factory_get_klass(aFactory);
  1.1157 +  if (strstr(klass, "Demuxer") && !strstr(klass, "Metadata")) {
  1.1158 +    autoplug = GStreamerFormatHelper::Instance()->CanHandleContainerCaps(aCaps);
  1.1159 +  } else if (strstr(klass, "Decoder") && !strstr(klass, "Generic")) {
  1.1160 +    autoplug = GStreamerFormatHelper::Instance()->CanHandleCodecCaps(aCaps);
  1.1161 +  } else {
  1.1162 +    /* we only filter demuxers and decoders, let everything else be autoplugged */
  1.1163 +    autoplug = true;
  1.1164 +  }
  1.1165 +
  1.1166 +  return autoplug;
  1.1167 +}
  1.1168 +
  1.1169 +/**
  1.1170 + * This is called by uridecodebin (running inside playbin), after it has found
  1.1171 + * candidate factories to continue decoding the stream. We apply the whitelist
  1.1172 + * here, allowing only demuxers and decoders that output the formats we want to
  1.1173 + * support.
  1.1174 + */
  1.1175 +GValueArray*
  1.1176 +GStreamerReader::AutoplugSortCb(GstElement* aElement, GstPad* aPad,
  1.1177 +                                GstCaps* aCaps, GValueArray* aFactories)
  1.1178 +{
  1.1179 +  if (!aFactories->n_values) {
  1.1180 +    return nullptr;
  1.1181 +  }
  1.1182 +
  1.1183 +  /* aFactories[0] is the element factory that is going to be used to
  1.1184 +   * create the next element needed to demux or decode the stream.
  1.1185 +   */
  1.1186 +  GstElementFactory *factory = (GstElementFactory*) g_value_get_object(g_value_array_get_nth(aFactories, 0));
  1.1187 +  if (!ShouldAutoplugFactory(factory, aCaps)) {
  1.1188 +    /* We don't support this factory. Return an empty array to signal that we
  1.1189 +     * don't want to continue decoding this (sub)stream.
  1.1190 +     */
  1.1191 +    return g_value_array_new(0);
  1.1192 +  }
  1.1193 +
  1.1194 +  /* nullptr means that we're ok with the candidates and don't need to apply any
  1.1195 +   * sorting/filtering.
  1.1196 +   */
  1.1197 +  return nullptr;
  1.1198 +}
  1.1199 +
  1.1200 +/**
  1.1201 + * If this is an MP3 stream, pass any new data we get to the MP3 frame parser
  1.1202 + * for duration estimation.
  1.1203 + */
  1.1204 +void GStreamerReader::NotifyDataArrived(const char *aBuffer,
  1.1205 +                                        uint32_t aLength,
  1.1206 +                                        int64_t aOffset)
  1.1207 +{
  1.1208 +  MOZ_ASSERT(NS_IsMainThread());
  1.1209 +
  1.1210 +  if (HasVideo()) {
  1.1211 +    return;
  1.1212 +  }
  1.1213 +
  1.1214 +  if (!mMP3FrameParser.NeedsData()) {
  1.1215 +    return;
  1.1216 +  }
  1.1217 +
  1.1218 +  mMP3FrameParser.Parse(aBuffer, aLength, aOffset);
  1.1219 +
  1.1220 +  int64_t duration = mMP3FrameParser.GetDuration();
  1.1221 +  if (duration != mLastParserDuration && mUseParserDuration) {
  1.1222 +    ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
  1.1223 +    mLastParserDuration = duration;
  1.1224 +    mDecoder->UpdateEstimatedMediaDuration(mLastParserDuration);
  1.1225 +  }
  1.1226 +}
  1.1227 +
  1.1228 +#if GST_VERSION_MAJOR >= 1
  1.1229 +GstCaps* GStreamerReader::BuildAudioSinkCaps()
  1.1230 +{
  1.1231 +  GstCaps* caps = gst_caps_from_string("audio/x-raw, channels={1,2}");
  1.1232 +  const char* format;
  1.1233 +#ifdef MOZ_SAMPLE_TYPE_FLOAT32
  1.1234 +#if MOZ_LITTLE_ENDIAN
  1.1235 +  format = "F32LE";
  1.1236 +#else
  1.1237 +  format = "F32BE";
  1.1238 +#endif
  1.1239 +#else /* !MOZ_SAMPLE_TYPE_FLOAT32 */
  1.1240 +#if MOZ_LITTLE_ENDIAN
  1.1241 +  format = "S16LE";
  1.1242 +#else
  1.1243 +  format = "S16BE";
  1.1244 +#endif
  1.1245 +#endif
  1.1246 +  gst_caps_set_simple(caps, "format", G_TYPE_STRING, format, nullptr);
  1.1247 +
  1.1248 +  return caps;
  1.1249 +}
  1.1250 +
  1.1251 +void GStreamerReader::InstallPadCallbacks()
  1.1252 +{
  1.1253 +  GstPad* sinkpad = gst_element_get_static_pad(GST_ELEMENT(mVideoAppSink), "sink");
  1.1254 +
  1.1255 +  gst_pad_add_probe(sinkpad,
  1.1256 +      (GstPadProbeType) (GST_PAD_PROBE_TYPE_SCHEDULING |
  1.1257 +        GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM |
  1.1258 +        GST_PAD_PROBE_TYPE_EVENT_UPSTREAM |
  1.1259 +        GST_PAD_PROBE_TYPE_EVENT_FLUSH),
  1.1260 +      &GStreamerReader::EventProbeCb, this, nullptr);
  1.1261 +  gst_pad_add_probe(sinkpad, GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM,
  1.1262 +      GStreamerReader::QueryProbeCb, nullptr, nullptr);
  1.1263 +
  1.1264 +  gst_pad_set_element_private(sinkpad, this);
  1.1265 +  gst_object_unref(sinkpad);
  1.1266 +
  1.1267 +  sinkpad = gst_element_get_static_pad(GST_ELEMENT(mAudioAppSink), "sink");
  1.1268 +  gst_pad_add_probe(sinkpad,
  1.1269 +      (GstPadProbeType) (GST_PAD_PROBE_TYPE_SCHEDULING |
  1.1270 +        GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM |
  1.1271 +        GST_PAD_PROBE_TYPE_EVENT_UPSTREAM |
  1.1272 +        GST_PAD_PROBE_TYPE_EVENT_FLUSH),
  1.1273 +      &GStreamerReader::EventProbeCb, this, nullptr);
  1.1274 +  gst_object_unref(sinkpad);
  1.1275 +}
  1.1276 +
  1.1277 +GstPadProbeReturn GStreamerReader::EventProbeCb(GstPad *aPad,
  1.1278 +                                                GstPadProbeInfo *aInfo,
  1.1279 +                                                gpointer aUserData)
  1.1280 +{
  1.1281 +  GStreamerReader *reader = (GStreamerReader *) aUserData;
  1.1282 +  GstEvent *aEvent = (GstEvent *)aInfo->data;
  1.1283 +  return reader->EventProbe(aPad, aEvent);
  1.1284 +}
  1.1285 +
  1.1286 +GstPadProbeReturn GStreamerReader::EventProbe(GstPad *aPad, GstEvent *aEvent)
  1.1287 +{
  1.1288 +  GstElement* parent = GST_ELEMENT(gst_pad_get_parent(aPad));
  1.1289 +
  1.1290 +  LOG(PR_LOG_DEBUG, "event probe %s", GST_EVENT_TYPE_NAME (aEvent));
  1.1291 +
  1.1292 +  switch(GST_EVENT_TYPE(aEvent)) {
  1.1293 +    case GST_EVENT_SEGMENT:
  1.1294 +    {
  1.1295 +      const GstSegment *newSegment;
  1.1296 +      GstSegment* segment;
  1.1297 +
  1.1298 +      /* Store the segments so we can convert timestamps to stream time, which
  1.1299 +       * is what the upper layers sync on.
  1.1300 +       */
  1.1301 +      ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
  1.1302 +#if GST_VERSION_MINOR <= 1 && GST_VERSION_MICRO < 1
  1.1303 +      ResetDecode();
  1.1304 +#endif
  1.1305 +      gst_event_parse_segment(aEvent, &newSegment);
  1.1306 +      if (parent == GST_ELEMENT(mVideoAppSink))
  1.1307 +        segment = &mVideoSegment;
  1.1308 +      else
  1.1309 +        segment = &mAudioSegment;
  1.1310 +      gst_segment_copy_into (newSegment, segment);
  1.1311 +      break;
  1.1312 +    }
  1.1313 +    case GST_EVENT_FLUSH_STOP:
  1.1314 +      /* Reset on seeks */
  1.1315 +      ResetDecode();
  1.1316 +      break;
  1.1317 +    default:
  1.1318 +      break;
  1.1319 +  }
  1.1320 +  gst_object_unref(parent);
  1.1321 +
  1.1322 +  return GST_PAD_PROBE_OK;
  1.1323 +}
  1.1324 +
  1.1325 +GstPadProbeReturn GStreamerReader::QueryProbeCb(GstPad* aPad, GstPadProbeInfo* aInfo, gpointer aUserData)
  1.1326 +{
  1.1327 +  GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(gst_pad_get_element_private(aPad));
  1.1328 +  return reader->QueryProbe(aPad, aInfo, aUserData);
  1.1329 +}
  1.1330 +
  1.1331 +GstPadProbeReturn GStreamerReader::QueryProbe(GstPad* aPad, GstPadProbeInfo* aInfo, gpointer aUserData)
  1.1332 +{
  1.1333 +  GstQuery *query = gst_pad_probe_info_get_query(aInfo);
  1.1334 +  GstPadProbeReturn ret = GST_PAD_PROBE_OK;
  1.1335 +
  1.1336 +  switch (GST_QUERY_TYPE (query)) {
  1.1337 +    case GST_QUERY_ALLOCATION:
  1.1338 +      GstCaps *caps;
  1.1339 +      GstVideoInfo info;
  1.1340 +      gboolean need_pool;
  1.1341 +
  1.1342 +      gst_query_parse_allocation(query, &caps, &need_pool);
  1.1343 +      gst_video_info_init(&info);
  1.1344 +      gst_video_info_from_caps(&info, caps);
  1.1345 +      gst_query_add_allocation_param(query, mAllocator, nullptr);
  1.1346 +      gst_query_add_allocation_pool(query, mBufferPool, info.size, 0, 0);
  1.1347 +      break;
  1.1348 +    default:
  1.1349 +      break;
  1.1350 +  }
  1.1351 +
  1.1352 +  return ret;
  1.1353 +}
  1.1354 +
  1.1355 +void GStreamerReader::ImageDataFromVideoFrame(GstVideoFrame *aFrame,
  1.1356 +                                              PlanarYCbCrImage::Data *aData)
  1.1357 +{
  1.1358 +  NS_ASSERTION(GST_VIDEO_INFO_IS_YUV(&mVideoInfo),
  1.1359 +               "Non-YUV video frame formats not supported");
  1.1360 +  NS_ASSERTION(GST_VIDEO_FRAME_N_COMPONENTS(aFrame) == 3,
  1.1361 +               "Unsupported number of components in video frame");
  1.1362 +
  1.1363 +  aData->mPicX = aData->mPicY = 0;
  1.1364 +  aData->mPicSize = gfx::IntSize(mPicture.width, mPicture.height);
  1.1365 +  aData->mStereoMode = StereoMode::MONO;
  1.1366 +
  1.1367 +  aData->mYChannel = GST_VIDEO_FRAME_COMP_DATA(aFrame, 0);
  1.1368 +  aData->mYStride = GST_VIDEO_FRAME_COMP_STRIDE(aFrame, 0);
  1.1369 +  aData->mYSize = gfx::IntSize(GST_VIDEO_FRAME_COMP_WIDTH(aFrame, 0),
  1.1370 +                          GST_VIDEO_FRAME_COMP_HEIGHT(aFrame, 0));
  1.1371 +  aData->mYSkip = GST_VIDEO_FRAME_COMP_PSTRIDE(aFrame, 0) - 1;
  1.1372 +  aData->mCbCrStride = GST_VIDEO_FRAME_COMP_STRIDE(aFrame, 1);
  1.1373 +  aData->mCbCrSize = gfx::IntSize(GST_VIDEO_FRAME_COMP_WIDTH(aFrame, 1),
  1.1374 +                             GST_VIDEO_FRAME_COMP_HEIGHT(aFrame, 1));
  1.1375 +  aData->mCbChannel = GST_VIDEO_FRAME_COMP_DATA(aFrame, 1);
  1.1376 +  aData->mCrChannel = GST_VIDEO_FRAME_COMP_DATA(aFrame, 2);
  1.1377 +  aData->mCbSkip = GST_VIDEO_FRAME_COMP_PSTRIDE(aFrame, 1) - 1;
  1.1378 +  aData->mCrSkip = GST_VIDEO_FRAME_COMP_PSTRIDE(aFrame, 2) - 1;
  1.1379 +}
  1.1380 +
  1.1381 +nsRefPtr<PlanarYCbCrImage> GStreamerReader::GetImageFromBuffer(GstBuffer* aBuffer)
  1.1382 +{
  1.1383 +  nsRefPtr<PlanarYCbCrImage> image = nullptr;
  1.1384 +
  1.1385 +  if (gst_buffer_n_memory(aBuffer) == 1) {
  1.1386 +    GstMemory* mem = gst_buffer_peek_memory(aBuffer, 0);
  1.1387 +    if (GST_IS_MOZ_GFX_MEMORY_ALLOCATOR(mem->allocator)) {
  1.1388 +      image = moz_gfx_memory_get_image(mem);
  1.1389 +
  1.1390 +      GstVideoFrame frame;
  1.1391 +      gst_video_frame_map(&frame, &mVideoInfo, aBuffer, GST_MAP_READ);
  1.1392 +      PlanarYCbCrImage::Data data;
  1.1393 +      ImageDataFromVideoFrame(&frame, &data);
  1.1394 +      image->SetDataNoCopy(data);
  1.1395 +      gst_video_frame_unmap(&frame);
  1.1396 +    }
  1.1397 +  }
  1.1398 +
  1.1399 +  return image;
  1.1400 +}
  1.1401 +
  1.1402 +void GStreamerReader::CopyIntoImageBuffer(GstBuffer* aBuffer,
  1.1403 +                                          GstBuffer** aOutBuffer,
  1.1404 +                                          nsRefPtr<PlanarYCbCrImage> &image)
  1.1405 +{
  1.1406 +  *aOutBuffer = gst_buffer_new_allocate(mAllocator, gst_buffer_get_size(aBuffer), nullptr);
  1.1407 +  GstMemory *mem = gst_buffer_peek_memory(*aOutBuffer, 0);
  1.1408 +  GstMapInfo map_info;
  1.1409 +  gst_memory_map(mem, &map_info, GST_MAP_WRITE);
  1.1410 +  gst_buffer_extract(aBuffer, 0, map_info.data, gst_buffer_get_size(aBuffer));
  1.1411 +  gst_memory_unmap(mem, &map_info);
  1.1412 +
  1.1413 +  /* create a new gst buffer with the newly created memory and copy the
  1.1414 +   * metadata over from the incoming buffer */
  1.1415 +  gst_buffer_copy_into(*aOutBuffer, aBuffer,
  1.1416 +      (GstBufferCopyFlags)(GST_BUFFER_COPY_METADATA), 0, -1);
  1.1417 +  image = GetImageFromBuffer(*aOutBuffer);
  1.1418 +}
  1.1419 +#endif
  1.1420 +
  1.1421 +} // namespace mozilla
  1.1422 +

mercurial