Fri, 16 Jan 2015 18:13:44 +0100
Integrate suggestion from review to improve consistency with existing code.
michael@0 | 1 | /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ |
michael@0 | 2 | /* vim:set ts=2 sw=2 sts=2 et cindent: */ |
michael@0 | 3 | /* This Source Code Form is subject to the terms of the Mozilla Public |
michael@0 | 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this file, |
michael@0 | 5 | * You can obtain one at http://mozilla.org/MPL/2.0/. */ |
michael@0 | 6 | |
michael@0 | 7 | #include "nsError.h" |
michael@0 | 8 | #include "nsMimeTypes.h" |
michael@0 | 9 | #include "MediaDecoderStateMachine.h" |
michael@0 | 10 | #include "AbstractMediaDecoder.h" |
michael@0 | 11 | #include "MediaResource.h" |
michael@0 | 12 | #include "GStreamerReader.h" |
michael@0 | 13 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 14 | #include "GStreamerAllocator.h" |
michael@0 | 15 | #endif |
michael@0 | 16 | #include "GStreamerFormatHelper.h" |
michael@0 | 17 | #include "VideoUtils.h" |
michael@0 | 18 | #include "mozilla/dom/TimeRanges.h" |
michael@0 | 19 | #include "mozilla/Endian.h" |
michael@0 | 20 | #include "mozilla/Preferences.h" |
michael@0 | 21 | #include "mozilla/unused.h" |
michael@0 | 22 | #include "GStreamerLoader.h" |
michael@0 | 23 | #include "gfx2DGlue.h" |
michael@0 | 24 | |
michael@0 | 25 | namespace mozilla { |
michael@0 | 26 | |
michael@0 | 27 | using namespace gfx; |
michael@0 | 28 | using namespace layers; |
michael@0 | 29 | |
michael@0 | 30 | // Un-comment to enable logging of seek bisections. |
michael@0 | 31 | //#define SEEK_LOGGING |
michael@0 | 32 | |
michael@0 | 33 | #ifdef PR_LOGGING |
michael@0 | 34 | extern PRLogModuleInfo* gMediaDecoderLog; |
michael@0 | 35 | #define LOG(type, msg, ...) \ |
michael@0 | 36 | PR_LOG(gMediaDecoderLog, type, ("GStreamerReader(%p) " msg, this, ##__VA_ARGS__)) |
michael@0 | 37 | #else |
michael@0 | 38 | #define LOG(type, msg, ...) |
michael@0 | 39 | #endif |
michael@0 | 40 | |
michael@0 | 41 | #if DEBUG |
michael@0 | 42 | static const unsigned int MAX_CHANNELS = 4; |
michael@0 | 43 | #endif |
michael@0 | 44 | // Let the demuxer work in pull mode for short files. This used to be a micro |
michael@0 | 45 | // optimization to have more accurate durations for ogg files in mochitests. |
michael@0 | 46 | // Since as of today we aren't using gstreamer to demux ogg, and having demuxers |
michael@0 | 47 | // work in pull mode over http makes them slower (since they really assume |
michael@0 | 48 | // near-zero latency in pull mode) set the constant to 0 for now, which |
michael@0 | 49 | // effectively disables it. |
michael@0 | 50 | static const int SHORT_FILE_SIZE = 0; |
michael@0 | 51 | // The default resource->Read() size when working in push mode |
michael@0 | 52 | static const int DEFAULT_SOURCE_READ_SIZE = 50 * 1024; |
michael@0 | 53 | |
michael@0 | 54 | typedef enum { |
michael@0 | 55 | GST_PLAY_FLAG_VIDEO = (1 << 0), |
michael@0 | 56 | GST_PLAY_FLAG_AUDIO = (1 << 1), |
michael@0 | 57 | GST_PLAY_FLAG_TEXT = (1 << 2), |
michael@0 | 58 | GST_PLAY_FLAG_VIS = (1 << 3), |
michael@0 | 59 | GST_PLAY_FLAG_SOFT_VOLUME = (1 << 4), |
michael@0 | 60 | GST_PLAY_FLAG_NATIVE_AUDIO = (1 << 5), |
michael@0 | 61 | GST_PLAY_FLAG_NATIVE_VIDEO = (1 << 6), |
michael@0 | 62 | GST_PLAY_FLAG_DOWNLOAD = (1 << 7), |
michael@0 | 63 | GST_PLAY_FLAG_BUFFERING = (1 << 8), |
michael@0 | 64 | GST_PLAY_FLAG_DEINTERLACE = (1 << 9), |
michael@0 | 65 | GST_PLAY_FLAG_SOFT_COLORBALANCE = (1 << 10) |
michael@0 | 66 | } PlayFlags; |
michael@0 | 67 | |
michael@0 | 68 | GStreamerReader::GStreamerReader(AbstractMediaDecoder* aDecoder) |
michael@0 | 69 | : MediaDecoderReader(aDecoder), |
michael@0 | 70 | mMP3FrameParser(aDecoder->GetResource()->GetLength()), |
michael@0 | 71 | mDataOffset(0), |
michael@0 | 72 | mUseParserDuration(false), |
michael@0 | 73 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 74 | mAllocator(nullptr), |
michael@0 | 75 | mBufferPool(nullptr), |
michael@0 | 76 | #endif |
michael@0 | 77 | mPlayBin(nullptr), |
michael@0 | 78 | mBus(nullptr), |
michael@0 | 79 | mSource(nullptr), |
michael@0 | 80 | mVideoSink(nullptr), |
michael@0 | 81 | mVideoAppSink(nullptr), |
michael@0 | 82 | mAudioSink(nullptr), |
michael@0 | 83 | mAudioAppSink(nullptr), |
michael@0 | 84 | mFormat(GST_VIDEO_FORMAT_UNKNOWN), |
michael@0 | 85 | mVideoSinkBufferCount(0), |
michael@0 | 86 | mAudioSinkBufferCount(0), |
michael@0 | 87 | mGstThreadsMonitor("media.gst.threads"), |
michael@0 | 88 | mReachedAudioEos(false), |
michael@0 | 89 | mReachedVideoEos(false), |
michael@0 | 90 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 91 | mConfigureAlignment(true), |
michael@0 | 92 | #endif |
michael@0 | 93 | fpsNum(0), |
michael@0 | 94 | fpsDen(0) |
michael@0 | 95 | { |
michael@0 | 96 | MOZ_COUNT_CTOR(GStreamerReader); |
michael@0 | 97 | |
michael@0 | 98 | mSrcCallbacks.need_data = GStreamerReader::NeedDataCb; |
michael@0 | 99 | mSrcCallbacks.enough_data = GStreamerReader::EnoughDataCb; |
michael@0 | 100 | mSrcCallbacks.seek_data = GStreamerReader::SeekDataCb; |
michael@0 | 101 | |
michael@0 | 102 | mSinkCallbacks.eos = GStreamerReader::EosCb; |
michael@0 | 103 | mSinkCallbacks.new_preroll = GStreamerReader::NewPrerollCb; |
michael@0 | 104 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 105 | mSinkCallbacks.new_sample = GStreamerReader::NewBufferCb; |
michael@0 | 106 | #else |
michael@0 | 107 | mSinkCallbacks.new_buffer = GStreamerReader::NewBufferCb; |
michael@0 | 108 | mSinkCallbacks.new_buffer_list = nullptr; |
michael@0 | 109 | #endif |
michael@0 | 110 | |
michael@0 | 111 | gst_segment_init(&mVideoSegment, GST_FORMAT_UNDEFINED); |
michael@0 | 112 | gst_segment_init(&mAudioSegment, GST_FORMAT_UNDEFINED); |
michael@0 | 113 | } |
michael@0 | 114 | |
michael@0 | 115 | GStreamerReader::~GStreamerReader() |
michael@0 | 116 | { |
michael@0 | 117 | MOZ_COUNT_DTOR(GStreamerReader); |
michael@0 | 118 | ResetDecode(); |
michael@0 | 119 | |
michael@0 | 120 | if (mPlayBin) { |
michael@0 | 121 | gst_app_src_end_of_stream(mSource); |
michael@0 | 122 | if (mSource) |
michael@0 | 123 | gst_object_unref(mSource); |
michael@0 | 124 | gst_element_set_state(mPlayBin, GST_STATE_NULL); |
michael@0 | 125 | gst_object_unref(mPlayBin); |
michael@0 | 126 | mPlayBin = nullptr; |
michael@0 | 127 | mVideoSink = nullptr; |
michael@0 | 128 | mVideoAppSink = nullptr; |
michael@0 | 129 | mAudioSink = nullptr; |
michael@0 | 130 | mAudioAppSink = nullptr; |
michael@0 | 131 | gst_object_unref(mBus); |
michael@0 | 132 | mBus = nullptr; |
michael@0 | 133 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 134 | g_object_unref(mAllocator); |
michael@0 | 135 | g_object_unref(mBufferPool); |
michael@0 | 136 | #endif |
michael@0 | 137 | } |
michael@0 | 138 | } |
michael@0 | 139 | |
michael@0 | 140 | nsresult GStreamerReader::Init(MediaDecoderReader* aCloneDonor) |
michael@0 | 141 | { |
michael@0 | 142 | GStreamerFormatHelper::Instance(); |
michael@0 | 143 | |
michael@0 | 144 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 145 | mAllocator = static_cast<GstAllocator*>(g_object_new(GST_TYPE_MOZ_GFX_MEMORY_ALLOCATOR, nullptr)); |
michael@0 | 146 | moz_gfx_memory_allocator_set_reader(mAllocator, this); |
michael@0 | 147 | |
michael@0 | 148 | mBufferPool = static_cast<GstBufferPool*>(g_object_new(GST_TYPE_MOZ_GFX_BUFFER_POOL, nullptr)); |
michael@0 | 149 | #endif |
michael@0 | 150 | |
michael@0 | 151 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 152 | mPlayBin = gst_element_factory_make("playbin", nullptr); |
michael@0 | 153 | #else |
michael@0 | 154 | mPlayBin = gst_element_factory_make("playbin2", nullptr); |
michael@0 | 155 | #endif |
michael@0 | 156 | if (!mPlayBin) { |
michael@0 | 157 | LOG(PR_LOG_ERROR, "couldn't create playbin"); |
michael@0 | 158 | return NS_ERROR_FAILURE; |
michael@0 | 159 | } |
michael@0 | 160 | g_object_set(mPlayBin, "buffer-size", 0, nullptr); |
michael@0 | 161 | mBus = gst_pipeline_get_bus(GST_PIPELINE(mPlayBin)); |
michael@0 | 162 | |
michael@0 | 163 | mVideoSink = gst_parse_bin_from_description("capsfilter name=filter ! " |
michael@0 | 164 | "appsink name=videosink sync=false max-buffers=1 " |
michael@0 | 165 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 166 | "caps=video/x-raw,format=I420" |
michael@0 | 167 | #else |
michael@0 | 168 | "caps=video/x-raw-yuv,format=(fourcc)I420" |
michael@0 | 169 | #endif |
michael@0 | 170 | , TRUE, nullptr); |
michael@0 | 171 | mVideoAppSink = GST_APP_SINK(gst_bin_get_by_name(GST_BIN(mVideoSink), |
michael@0 | 172 | "videosink")); |
michael@0 | 173 | mAudioSink = gst_parse_bin_from_description("capsfilter name=filter ! " |
michael@0 | 174 | "appsink name=audiosink sync=false max-buffers=1", TRUE, nullptr); |
michael@0 | 175 | mAudioAppSink = GST_APP_SINK(gst_bin_get_by_name(GST_BIN(mAudioSink), |
michael@0 | 176 | "audiosink")); |
michael@0 | 177 | GstCaps* caps = BuildAudioSinkCaps(); |
michael@0 | 178 | g_object_set(mAudioAppSink, "caps", caps, nullptr); |
michael@0 | 179 | gst_caps_unref(caps); |
michael@0 | 180 | |
michael@0 | 181 | gst_app_sink_set_callbacks(mVideoAppSink, &mSinkCallbacks, |
michael@0 | 182 | (gpointer) this, nullptr); |
michael@0 | 183 | gst_app_sink_set_callbacks(mAudioAppSink, &mSinkCallbacks, |
michael@0 | 184 | (gpointer) this, nullptr); |
michael@0 | 185 | InstallPadCallbacks(); |
michael@0 | 186 | |
michael@0 | 187 | g_object_set(mPlayBin, "uri", "appsrc://", |
michael@0 | 188 | "video-sink", mVideoSink, |
michael@0 | 189 | "audio-sink", mAudioSink, |
michael@0 | 190 | nullptr); |
michael@0 | 191 | |
michael@0 | 192 | g_signal_connect(G_OBJECT(mPlayBin), "notify::source", |
michael@0 | 193 | G_CALLBACK(GStreamerReader::PlayBinSourceSetupCb), this); |
michael@0 | 194 | g_signal_connect(G_OBJECT(mPlayBin), "element-added", |
michael@0 | 195 | G_CALLBACK(GStreamerReader::PlayElementAddedCb), this); |
michael@0 | 196 | |
michael@0 | 197 | return NS_OK; |
michael@0 | 198 | } |
michael@0 | 199 | |
michael@0 | 200 | GstBusSyncReply |
michael@0 | 201 | GStreamerReader::ErrorCb(GstBus *aBus, GstMessage *aMessage, gpointer aUserData) |
michael@0 | 202 | { |
michael@0 | 203 | return static_cast<GStreamerReader*>(aUserData)->Error(aBus, aMessage); |
michael@0 | 204 | } |
michael@0 | 205 | |
michael@0 | 206 | GstBusSyncReply |
michael@0 | 207 | GStreamerReader::Error(GstBus *aBus, GstMessage *aMessage) |
michael@0 | 208 | { |
michael@0 | 209 | if (GST_MESSAGE_TYPE(aMessage) == GST_MESSAGE_ERROR) { |
michael@0 | 210 | Eos(); |
michael@0 | 211 | } |
michael@0 | 212 | |
michael@0 | 213 | return GST_BUS_PASS; |
michael@0 | 214 | } |
michael@0 | 215 | |
michael@0 | 216 | void GStreamerReader::PlayBinSourceSetupCb(GstElement* aPlayBin, |
michael@0 | 217 | GParamSpec* pspec, |
michael@0 | 218 | gpointer aUserData) |
michael@0 | 219 | { |
michael@0 | 220 | GstElement *source; |
michael@0 | 221 | GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData); |
michael@0 | 222 | |
michael@0 | 223 | g_object_get(aPlayBin, "source", &source, nullptr); |
michael@0 | 224 | reader->PlayBinSourceSetup(GST_APP_SRC(source)); |
michael@0 | 225 | } |
michael@0 | 226 | |
michael@0 | 227 | void GStreamerReader::PlayBinSourceSetup(GstAppSrc* aSource) |
michael@0 | 228 | { |
michael@0 | 229 | mSource = GST_APP_SRC(aSource); |
michael@0 | 230 | gst_app_src_set_callbacks(mSource, &mSrcCallbacks, (gpointer) this, nullptr); |
michael@0 | 231 | MediaResource* resource = mDecoder->GetResource(); |
michael@0 | 232 | |
michael@0 | 233 | /* do a short read to trigger a network request so that GetLength() below |
michael@0 | 234 | * returns something meaningful and not -1 |
michael@0 | 235 | */ |
michael@0 | 236 | char buf[512]; |
michael@0 | 237 | unsigned int size = 0; |
michael@0 | 238 | resource->Read(buf, sizeof(buf), &size); |
michael@0 | 239 | resource->Seek(SEEK_SET, 0); |
michael@0 | 240 | |
michael@0 | 241 | /* now we should have a length */ |
michael@0 | 242 | int64_t resourceLength = GetDataLength(); |
michael@0 | 243 | gst_app_src_set_size(mSource, resourceLength); |
michael@0 | 244 | if (resource->IsDataCachedToEndOfResource(0) || |
michael@0 | 245 | (resourceLength != -1 && resourceLength <= SHORT_FILE_SIZE)) { |
michael@0 | 246 | /* let the demuxer work in pull mode for local files (or very short files) |
michael@0 | 247 | * so that we get optimal seeking accuracy/performance |
michael@0 | 248 | */ |
michael@0 | 249 | LOG(PR_LOG_DEBUG, "configuring random access, len %lld", resourceLength); |
michael@0 | 250 | gst_app_src_set_stream_type(mSource, GST_APP_STREAM_TYPE_RANDOM_ACCESS); |
michael@0 | 251 | } else { |
michael@0 | 252 | /* make the demuxer work in push mode so that seeking is kept to a minimum |
michael@0 | 253 | */ |
michael@0 | 254 | LOG(PR_LOG_DEBUG, "configuring push mode, len %lld", resourceLength); |
michael@0 | 255 | gst_app_src_set_stream_type(mSource, GST_APP_STREAM_TYPE_SEEKABLE); |
michael@0 | 256 | } |
michael@0 | 257 | |
michael@0 | 258 | // Set the source MIME type to stop typefind trying every. single. format. |
michael@0 | 259 | GstCaps *caps = |
michael@0 | 260 | GStreamerFormatHelper::ConvertFormatsToCaps(mDecoder->GetResource()->GetContentType().get(), |
michael@0 | 261 | nullptr); |
michael@0 | 262 | |
michael@0 | 263 | gst_app_src_set_caps(aSource, caps); |
michael@0 | 264 | gst_caps_unref(caps); |
michael@0 | 265 | } |
michael@0 | 266 | |
michael@0 | 267 | /** |
michael@0 | 268 | * If this stream is an MP3, we want to parse the headers to estimate the |
michael@0 | 269 | * stream duration. |
michael@0 | 270 | */ |
michael@0 | 271 | nsresult GStreamerReader::ParseMP3Headers() |
michael@0 | 272 | { |
michael@0 | 273 | MediaResource *resource = mDecoder->GetResource(); |
michael@0 | 274 | |
michael@0 | 275 | const uint32_t MAX_READ_BYTES = 4096; |
michael@0 | 276 | |
michael@0 | 277 | uint64_t offset = 0; |
michael@0 | 278 | char bytes[MAX_READ_BYTES]; |
michael@0 | 279 | uint32_t bytesRead; |
michael@0 | 280 | do { |
michael@0 | 281 | nsresult rv = resource->ReadAt(offset, bytes, MAX_READ_BYTES, &bytesRead); |
michael@0 | 282 | NS_ENSURE_SUCCESS(rv, rv); |
michael@0 | 283 | NS_ENSURE_TRUE(bytesRead, NS_ERROR_FAILURE); |
michael@0 | 284 | |
michael@0 | 285 | mMP3FrameParser.Parse(bytes, bytesRead, offset); |
michael@0 | 286 | offset += bytesRead; |
michael@0 | 287 | } while (!mMP3FrameParser.ParsedHeaders()); |
michael@0 | 288 | |
michael@0 | 289 | if (mMP3FrameParser.IsMP3()) { |
michael@0 | 290 | mLastParserDuration = mMP3FrameParser.GetDuration(); |
michael@0 | 291 | mDataOffset = mMP3FrameParser.GetMP3Offset(); |
michael@0 | 292 | |
michael@0 | 293 | // Update GStreamer's stream length in case we found any ID3 headers to |
michael@0 | 294 | // ignore. |
michael@0 | 295 | gst_app_src_set_size(mSource, GetDataLength()); |
michael@0 | 296 | } |
michael@0 | 297 | |
michael@0 | 298 | return NS_OK; |
michael@0 | 299 | } |
michael@0 | 300 | |
michael@0 | 301 | int64_t |
michael@0 | 302 | GStreamerReader::GetDataLength() |
michael@0 | 303 | { |
michael@0 | 304 | int64_t streamLen = mDecoder->GetResource()->GetLength(); |
michael@0 | 305 | |
michael@0 | 306 | if (streamLen < 0) { |
michael@0 | 307 | return streamLen; |
michael@0 | 308 | } |
michael@0 | 309 | |
michael@0 | 310 | return streamLen - mDataOffset; |
michael@0 | 311 | } |
michael@0 | 312 | |
michael@0 | 313 | nsresult GStreamerReader::ReadMetadata(MediaInfo* aInfo, |
michael@0 | 314 | MetadataTags** aTags) |
michael@0 | 315 | { |
michael@0 | 316 | NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread."); |
michael@0 | 317 | nsresult ret = NS_OK; |
michael@0 | 318 | |
michael@0 | 319 | /* |
michael@0 | 320 | * Parse MP3 headers before we kick off the GStreamer pipeline otherwise there |
michael@0 | 321 | * might be concurrent stream operations happening on both decoding and gstreamer |
michael@0 | 322 | * threads which will screw the GStreamer state machine. |
michael@0 | 323 | */ |
michael@0 | 324 | bool isMP3 = mDecoder->GetResource()->GetContentType().EqualsASCII(AUDIO_MP3); |
michael@0 | 325 | if (isMP3) { |
michael@0 | 326 | ParseMP3Headers(); |
michael@0 | 327 | } |
michael@0 | 328 | |
michael@0 | 329 | |
michael@0 | 330 | /* We do 3 attempts here: decoding audio and video, decoding video only, |
michael@0 | 331 | * decoding audio only. This allows us to play streams that have one broken |
michael@0 | 332 | * stream but that are otherwise decodeable. |
michael@0 | 333 | */ |
michael@0 | 334 | guint flags[3] = {GST_PLAY_FLAG_VIDEO|GST_PLAY_FLAG_AUDIO, |
michael@0 | 335 | static_cast<guint>(~GST_PLAY_FLAG_AUDIO), static_cast<guint>(~GST_PLAY_FLAG_VIDEO)}; |
michael@0 | 336 | guint default_flags, current_flags; |
michael@0 | 337 | g_object_get(mPlayBin, "flags", &default_flags, nullptr); |
michael@0 | 338 | |
michael@0 | 339 | GstMessage* message = nullptr; |
michael@0 | 340 | for (unsigned int i = 0; i < G_N_ELEMENTS(flags); i++) { |
michael@0 | 341 | current_flags = default_flags & flags[i]; |
michael@0 | 342 | g_object_set(G_OBJECT(mPlayBin), "flags", current_flags, nullptr); |
michael@0 | 343 | |
michael@0 | 344 | /* reset filter caps to ANY */ |
michael@0 | 345 | GstCaps* caps = gst_caps_new_any(); |
michael@0 | 346 | GstElement* filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter"); |
michael@0 | 347 | g_object_set(filter, "caps", caps, nullptr); |
michael@0 | 348 | gst_object_unref(filter); |
michael@0 | 349 | |
michael@0 | 350 | filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter"); |
michael@0 | 351 | g_object_set(filter, "caps", caps, nullptr); |
michael@0 | 352 | gst_object_unref(filter); |
michael@0 | 353 | gst_caps_unref(caps); |
michael@0 | 354 | filter = nullptr; |
michael@0 | 355 | |
michael@0 | 356 | if (!(current_flags & GST_PLAY_FLAG_AUDIO)) |
michael@0 | 357 | filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter"); |
michael@0 | 358 | else if (!(current_flags & GST_PLAY_FLAG_VIDEO)) |
michael@0 | 359 | filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter"); |
michael@0 | 360 | |
michael@0 | 361 | if (filter) { |
michael@0 | 362 | /* Little trick: set the target caps to "skip" so that playbin2 fails to |
michael@0 | 363 | * find a decoder for the stream we want to skip. |
michael@0 | 364 | */ |
michael@0 | 365 | GstCaps* filterCaps = gst_caps_new_simple ("skip", nullptr, nullptr); |
michael@0 | 366 | g_object_set(filter, "caps", filterCaps, nullptr); |
michael@0 | 367 | gst_caps_unref(filterCaps); |
michael@0 | 368 | gst_object_unref(filter); |
michael@0 | 369 | } |
michael@0 | 370 | |
michael@0 | 371 | LOG(PR_LOG_DEBUG, "starting metadata pipeline"); |
michael@0 | 372 | if (gst_element_set_state(mPlayBin, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE) { |
michael@0 | 373 | LOG(PR_LOG_DEBUG, "metadata pipeline state change failed"); |
michael@0 | 374 | ret = NS_ERROR_FAILURE; |
michael@0 | 375 | continue; |
michael@0 | 376 | } |
michael@0 | 377 | |
michael@0 | 378 | /* Wait for ASYNC_DONE, which is emitted when the pipeline is built, |
michael@0 | 379 | * prerolled and ready to play. Also watch for errors. |
michael@0 | 380 | */ |
michael@0 | 381 | message = gst_bus_timed_pop_filtered(mBus, GST_CLOCK_TIME_NONE, |
michael@0 | 382 | (GstMessageType)(GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_ERROR | GST_MESSAGE_EOS)); |
michael@0 | 383 | if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ASYNC_DONE) { |
michael@0 | 384 | LOG(PR_LOG_DEBUG, "read metadata pipeline prerolled"); |
michael@0 | 385 | gst_message_unref(message); |
michael@0 | 386 | ret = NS_OK; |
michael@0 | 387 | break; |
michael@0 | 388 | } else { |
michael@0 | 389 | LOG(PR_LOG_DEBUG, "read metadata pipeline failed to preroll: %s", |
michael@0 | 390 | gst_message_type_get_name (GST_MESSAGE_TYPE (message))); |
michael@0 | 391 | |
michael@0 | 392 | if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) { |
michael@0 | 393 | GError* error; |
michael@0 | 394 | gchar* debug; |
michael@0 | 395 | gst_message_parse_error(message, &error, &debug); |
michael@0 | 396 | LOG(PR_LOG_ERROR, "read metadata error: %s: %s", error->message, debug); |
michael@0 | 397 | g_error_free(error); |
michael@0 | 398 | g_free(debug); |
michael@0 | 399 | } |
michael@0 | 400 | /* Unexpected stream close/EOS or other error. We'll give up if all |
michael@0 | 401 | * streams are in error/eos. */ |
michael@0 | 402 | gst_element_set_state(mPlayBin, GST_STATE_NULL); |
michael@0 | 403 | gst_message_unref(message); |
michael@0 | 404 | ret = NS_ERROR_FAILURE; |
michael@0 | 405 | } |
michael@0 | 406 | } |
michael@0 | 407 | |
michael@0 | 408 | if (NS_SUCCEEDED(ret)) |
michael@0 | 409 | ret = CheckSupportedFormats(); |
michael@0 | 410 | |
michael@0 | 411 | if (NS_FAILED(ret)) |
michael@0 | 412 | /* we couldn't get this to play */ |
michael@0 | 413 | return ret; |
michael@0 | 414 | |
michael@0 | 415 | /* report the duration */ |
michael@0 | 416 | gint64 duration; |
michael@0 | 417 | |
michael@0 | 418 | if (isMP3 && mMP3FrameParser.IsMP3()) { |
michael@0 | 419 | // The MP3FrameParser has reported a duration; use that over the gstreamer |
michael@0 | 420 | // reported duration for inter-platform consistency. |
michael@0 | 421 | ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor()); |
michael@0 | 422 | mUseParserDuration = true; |
michael@0 | 423 | mLastParserDuration = mMP3FrameParser.GetDuration(); |
michael@0 | 424 | mDecoder->SetMediaDuration(mLastParserDuration); |
michael@0 | 425 | } else { |
michael@0 | 426 | LOG(PR_LOG_DEBUG, "querying duration"); |
michael@0 | 427 | // Otherwise use the gstreamer duration. |
michael@0 | 428 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 429 | if (gst_element_query_duration(GST_ELEMENT(mPlayBin), |
michael@0 | 430 | GST_FORMAT_TIME, &duration)) { |
michael@0 | 431 | #else |
michael@0 | 432 | GstFormat format = GST_FORMAT_TIME; |
michael@0 | 433 | if (gst_element_query_duration(GST_ELEMENT(mPlayBin), |
michael@0 | 434 | &format, &duration) && format == GST_FORMAT_TIME) { |
michael@0 | 435 | #endif |
michael@0 | 436 | ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor()); |
michael@0 | 437 | LOG(PR_LOG_DEBUG, "have duration %" GST_TIME_FORMAT, GST_TIME_ARGS(duration)); |
michael@0 | 438 | duration = GST_TIME_AS_USECONDS (duration); |
michael@0 | 439 | mDecoder->SetMediaDuration(duration); |
michael@0 | 440 | } else { |
michael@0 | 441 | mDecoder->SetMediaSeekable(false); |
michael@0 | 442 | } |
michael@0 | 443 | } |
michael@0 | 444 | |
michael@0 | 445 | int n_video = 0, n_audio = 0; |
michael@0 | 446 | g_object_get(mPlayBin, "n-video", &n_video, "n-audio", &n_audio, nullptr); |
michael@0 | 447 | mInfo.mVideo.mHasVideo = n_video != 0; |
michael@0 | 448 | mInfo.mAudio.mHasAudio = n_audio != 0; |
michael@0 | 449 | |
michael@0 | 450 | *aInfo = mInfo; |
michael@0 | 451 | |
michael@0 | 452 | *aTags = nullptr; |
michael@0 | 453 | |
michael@0 | 454 | // Watch the pipeline for fatal errors |
michael@0 | 455 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 456 | gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this, nullptr); |
michael@0 | 457 | #else |
michael@0 | 458 | gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this); |
michael@0 | 459 | #endif |
michael@0 | 460 | |
michael@0 | 461 | /* set the pipeline to PLAYING so that it starts decoding and queueing data in |
michael@0 | 462 | * the appsinks */ |
michael@0 | 463 | gst_element_set_state(mPlayBin, GST_STATE_PLAYING); |
michael@0 | 464 | |
michael@0 | 465 | return NS_OK; |
michael@0 | 466 | } |
michael@0 | 467 | |
michael@0 | 468 | nsresult GStreamerReader::CheckSupportedFormats() |
michael@0 | 469 | { |
michael@0 | 470 | bool done = false; |
michael@0 | 471 | bool unsupported = false; |
michael@0 | 472 | |
michael@0 | 473 | GstIterator* it = gst_bin_iterate_recurse(GST_BIN(mPlayBin)); |
michael@0 | 474 | while (!done) { |
michael@0 | 475 | GstIteratorResult res; |
michael@0 | 476 | GstElement* element; |
michael@0 | 477 | |
michael@0 | 478 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 479 | GValue value = {0,}; |
michael@0 | 480 | res = gst_iterator_next(it, &value); |
michael@0 | 481 | #else |
michael@0 | 482 | res = gst_iterator_next(it, (void **) &element); |
michael@0 | 483 | #endif |
michael@0 | 484 | switch(res) { |
michael@0 | 485 | case GST_ITERATOR_OK: |
michael@0 | 486 | { |
michael@0 | 487 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 488 | element = GST_ELEMENT (g_value_get_object (&value)); |
michael@0 | 489 | #endif |
michael@0 | 490 | GstElementFactory* factory = gst_element_get_factory(element); |
michael@0 | 491 | if (factory) { |
michael@0 | 492 | const char* klass = gst_element_factory_get_klass(factory); |
michael@0 | 493 | GstPad* pad = gst_element_get_static_pad(element, "sink"); |
michael@0 | 494 | if (pad) { |
michael@0 | 495 | GstCaps* caps; |
michael@0 | 496 | |
michael@0 | 497 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 498 | caps = gst_pad_get_current_caps(pad); |
michael@0 | 499 | #else |
michael@0 | 500 | caps = gst_pad_get_negotiated_caps(pad); |
michael@0 | 501 | #endif |
michael@0 | 502 | |
michael@0 | 503 | if (caps) { |
michael@0 | 504 | /* check for demuxers but ignore elements like id3demux */ |
michael@0 | 505 | if (strstr (klass, "Demuxer") && !strstr(klass, "Metadata")) |
michael@0 | 506 | unsupported = !GStreamerFormatHelper::Instance()->CanHandleContainerCaps(caps); |
michael@0 | 507 | else if (strstr (klass, "Decoder") && !strstr(klass, "Generic")) |
michael@0 | 508 | unsupported = !GStreamerFormatHelper::Instance()->CanHandleCodecCaps(caps); |
michael@0 | 509 | |
michael@0 | 510 | gst_caps_unref(caps); |
michael@0 | 511 | } |
michael@0 | 512 | gst_object_unref(pad); |
michael@0 | 513 | } |
michael@0 | 514 | } |
michael@0 | 515 | |
michael@0 | 516 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 517 | g_value_unset (&value); |
michael@0 | 518 | #else |
michael@0 | 519 | gst_object_unref(element); |
michael@0 | 520 | #endif |
michael@0 | 521 | done = unsupported; |
michael@0 | 522 | break; |
michael@0 | 523 | } |
michael@0 | 524 | case GST_ITERATOR_RESYNC: |
michael@0 | 525 | unsupported = false; |
michael@0 | 526 | done = false; |
michael@0 | 527 | break; |
michael@0 | 528 | case GST_ITERATOR_ERROR: |
michael@0 | 529 | done = true; |
michael@0 | 530 | break; |
michael@0 | 531 | case GST_ITERATOR_DONE: |
michael@0 | 532 | done = true; |
michael@0 | 533 | break; |
michael@0 | 534 | } |
michael@0 | 535 | } |
michael@0 | 536 | |
michael@0 | 537 | return unsupported ? NS_ERROR_FAILURE : NS_OK; |
michael@0 | 538 | } |
michael@0 | 539 | |
michael@0 | 540 | nsresult GStreamerReader::ResetDecode() |
michael@0 | 541 | { |
michael@0 | 542 | nsresult res = NS_OK; |
michael@0 | 543 | |
michael@0 | 544 | LOG(PR_LOG_DEBUG, "reset decode"); |
michael@0 | 545 | |
michael@0 | 546 | if (NS_FAILED(MediaDecoderReader::ResetDecode())) { |
michael@0 | 547 | res = NS_ERROR_FAILURE; |
michael@0 | 548 | } |
michael@0 | 549 | |
michael@0 | 550 | mVideoQueue.Reset(); |
michael@0 | 551 | mAudioQueue.Reset(); |
michael@0 | 552 | |
michael@0 | 553 | mVideoSinkBufferCount = 0; |
michael@0 | 554 | mAudioSinkBufferCount = 0; |
michael@0 | 555 | mReachedAudioEos = false; |
michael@0 | 556 | mReachedVideoEos = false; |
michael@0 | 557 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 558 | mConfigureAlignment = true; |
michael@0 | 559 | #endif |
michael@0 | 560 | |
michael@0 | 561 | LOG(PR_LOG_DEBUG, "reset decode done"); |
michael@0 | 562 | |
michael@0 | 563 | return res; |
michael@0 | 564 | } |
michael@0 | 565 | |
michael@0 | 566 | bool GStreamerReader::DecodeAudioData() |
michael@0 | 567 | { |
michael@0 | 568 | NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread."); |
michael@0 | 569 | |
michael@0 | 570 | GstBuffer *buffer = nullptr; |
michael@0 | 571 | |
michael@0 | 572 | { |
michael@0 | 573 | ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); |
michael@0 | 574 | |
michael@0 | 575 | if (mReachedAudioEos && !mAudioSinkBufferCount) { |
michael@0 | 576 | return false; |
michael@0 | 577 | } |
michael@0 | 578 | |
michael@0 | 579 | /* Wait something to be decoded before return or continue */ |
michael@0 | 580 | if (!mAudioSinkBufferCount) { |
michael@0 | 581 | if(!mVideoSinkBufferCount) { |
michael@0 | 582 | /* We have nothing decoded so it makes no sense to return to the state machine |
michael@0 | 583 | * as it will call us back immediately, we'll return again and so on, wasting |
michael@0 | 584 | * CPU cycles for no job done. So, block here until there is either video or |
michael@0 | 585 | * audio data available |
michael@0 | 586 | */ |
michael@0 | 587 | mon.Wait(); |
michael@0 | 588 | if (!mAudioSinkBufferCount) { |
michael@0 | 589 | /* There is still no audio data available, so either there is video data or |
michael@0 | 590 | * something else has happened (Eos, etc...). Return to the state machine |
michael@0 | 591 | * to process it. |
michael@0 | 592 | */ |
michael@0 | 593 | return true; |
michael@0 | 594 | } |
michael@0 | 595 | } |
michael@0 | 596 | else { |
michael@0 | 597 | return true; |
michael@0 | 598 | } |
michael@0 | 599 | } |
michael@0 | 600 | |
michael@0 | 601 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 602 | GstSample *sample = gst_app_sink_pull_sample(mAudioAppSink); |
michael@0 | 603 | buffer = gst_buffer_ref(gst_sample_get_buffer(sample)); |
michael@0 | 604 | gst_sample_unref(sample); |
michael@0 | 605 | #else |
michael@0 | 606 | buffer = gst_app_sink_pull_buffer(mAudioAppSink); |
michael@0 | 607 | #endif |
michael@0 | 608 | |
michael@0 | 609 | mAudioSinkBufferCount--; |
michael@0 | 610 | } |
michael@0 | 611 | |
michael@0 | 612 | int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer); |
michael@0 | 613 | timestamp = gst_segment_to_stream_time(&mAudioSegment, |
michael@0 | 614 | GST_FORMAT_TIME, timestamp); |
michael@0 | 615 | |
michael@0 | 616 | timestamp = GST_TIME_AS_USECONDS(timestamp); |
michael@0 | 617 | |
michael@0 | 618 | int64_t offset = GST_BUFFER_OFFSET(buffer); |
michael@0 | 619 | guint8* data; |
michael@0 | 620 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 621 | GstMapInfo info; |
michael@0 | 622 | gst_buffer_map(buffer, &info, GST_MAP_READ); |
michael@0 | 623 | unsigned int size = info.size; |
michael@0 | 624 | data = info.data; |
michael@0 | 625 | #else |
michael@0 | 626 | unsigned int size = GST_BUFFER_SIZE(buffer); |
michael@0 | 627 | data = GST_BUFFER_DATA(buffer); |
michael@0 | 628 | #endif |
michael@0 | 629 | int32_t frames = (size / sizeof(AudioDataValue)) / mInfo.mAudio.mChannels; |
michael@0 | 630 | |
michael@0 | 631 | typedef AudioCompactor::NativeCopy GstCopy; |
michael@0 | 632 | mAudioCompactor.Push(offset, |
michael@0 | 633 | timestamp, |
michael@0 | 634 | mInfo.mAudio.mRate, |
michael@0 | 635 | frames, |
michael@0 | 636 | mInfo.mAudio.mChannels, |
michael@0 | 637 | GstCopy(data, |
michael@0 | 638 | size, |
michael@0 | 639 | mInfo.mAudio.mChannels)); |
michael@0 | 640 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 641 | gst_buffer_unmap(buffer, &info); |
michael@0 | 642 | #endif |
michael@0 | 643 | |
michael@0 | 644 | gst_buffer_unref(buffer); |
michael@0 | 645 | |
michael@0 | 646 | return true; |
michael@0 | 647 | } |
michael@0 | 648 | |
michael@0 | 649 | bool GStreamerReader::DecodeVideoFrame(bool &aKeyFrameSkip, |
michael@0 | 650 | int64_t aTimeThreshold) |
michael@0 | 651 | { |
michael@0 | 652 | NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread."); |
michael@0 | 653 | |
michael@0 | 654 | GstBuffer *buffer = nullptr; |
michael@0 | 655 | |
michael@0 | 656 | { |
michael@0 | 657 | ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); |
michael@0 | 658 | |
michael@0 | 659 | if (mReachedVideoEos && !mVideoSinkBufferCount) { |
michael@0 | 660 | return false; |
michael@0 | 661 | } |
michael@0 | 662 | |
michael@0 | 663 | /* Wait something to be decoded before return or continue */ |
michael@0 | 664 | if (!mVideoSinkBufferCount) { |
michael@0 | 665 | if (!mAudioSinkBufferCount) { |
michael@0 | 666 | /* We have nothing decoded so it makes no sense to return to the state machine |
michael@0 | 667 | * as it will call us back immediately, we'll return again and so on, wasting |
michael@0 | 668 | * CPU cycles for no job done. So, block here until there is either video or |
michael@0 | 669 | * audio data available |
michael@0 | 670 | */ |
michael@0 | 671 | mon.Wait(); |
michael@0 | 672 | if (!mVideoSinkBufferCount) { |
michael@0 | 673 | /* There is still no video data available, so either there is audio data or |
michael@0 | 674 | * something else has happened (Eos, etc...). Return to the state machine |
michael@0 | 675 | * to process it |
michael@0 | 676 | */ |
michael@0 | 677 | return true; |
michael@0 | 678 | } |
michael@0 | 679 | } |
michael@0 | 680 | else { |
michael@0 | 681 | return true; |
michael@0 | 682 | } |
michael@0 | 683 | } |
michael@0 | 684 | |
michael@0 | 685 | mDecoder->NotifyDecodedFrames(0, 1); |
michael@0 | 686 | |
michael@0 | 687 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 688 | GstSample *sample = gst_app_sink_pull_sample(mVideoAppSink); |
michael@0 | 689 | buffer = gst_buffer_ref(gst_sample_get_buffer(sample)); |
michael@0 | 690 | gst_sample_unref(sample); |
michael@0 | 691 | #else |
michael@0 | 692 | buffer = gst_app_sink_pull_buffer(mVideoAppSink); |
michael@0 | 693 | #endif |
michael@0 | 694 | mVideoSinkBufferCount--; |
michael@0 | 695 | } |
michael@0 | 696 | |
michael@0 | 697 | bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT); |
michael@0 | 698 | if ((aKeyFrameSkip && !isKeyframe)) { |
michael@0 | 699 | gst_buffer_unref(buffer); |
michael@0 | 700 | return true; |
michael@0 | 701 | } |
michael@0 | 702 | |
michael@0 | 703 | int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer); |
michael@0 | 704 | { |
michael@0 | 705 | ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); |
michael@0 | 706 | timestamp = gst_segment_to_stream_time(&mVideoSegment, |
michael@0 | 707 | GST_FORMAT_TIME, timestamp); |
michael@0 | 708 | } |
michael@0 | 709 | NS_ASSERTION(GST_CLOCK_TIME_IS_VALID(timestamp), |
michael@0 | 710 | "frame has invalid timestamp"); |
michael@0 | 711 | |
michael@0 | 712 | timestamp = GST_TIME_AS_USECONDS(timestamp); |
michael@0 | 713 | int64_t duration = 0; |
michael@0 | 714 | if (GST_CLOCK_TIME_IS_VALID(GST_BUFFER_DURATION(buffer))) |
michael@0 | 715 | duration = GST_TIME_AS_USECONDS(GST_BUFFER_DURATION(buffer)); |
michael@0 | 716 | else if (fpsNum && fpsDen) |
michael@0 | 717 | /* add 1-frame duration */ |
michael@0 | 718 | duration = gst_util_uint64_scale(GST_USECOND, fpsDen, fpsNum); |
michael@0 | 719 | |
michael@0 | 720 | if (timestamp < aTimeThreshold) { |
michael@0 | 721 | LOG(PR_LOG_DEBUG, "skipping frame %" GST_TIME_FORMAT |
michael@0 | 722 | " threshold %" GST_TIME_FORMAT, |
michael@0 | 723 | GST_TIME_ARGS(timestamp * 1000), |
michael@0 | 724 | GST_TIME_ARGS(aTimeThreshold * 1000)); |
michael@0 | 725 | gst_buffer_unref(buffer); |
michael@0 | 726 | return true; |
michael@0 | 727 | } |
michael@0 | 728 | |
michael@0 | 729 | if (!buffer) |
michael@0 | 730 | /* no more frames */ |
michael@0 | 731 | return true; |
michael@0 | 732 | |
michael@0 | 733 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 734 | if (mConfigureAlignment && buffer->pool) { |
michael@0 | 735 | GstStructure *config = gst_buffer_pool_get_config(buffer->pool); |
michael@0 | 736 | GstVideoAlignment align; |
michael@0 | 737 | if (gst_buffer_pool_config_get_video_alignment(config, &align)) |
michael@0 | 738 | gst_video_info_align(&mVideoInfo, &align); |
michael@0 | 739 | gst_structure_free(config); |
michael@0 | 740 | mConfigureAlignment = false; |
michael@0 | 741 | } |
michael@0 | 742 | #endif |
michael@0 | 743 | |
michael@0 | 744 | nsRefPtr<PlanarYCbCrImage> image = GetImageFromBuffer(buffer); |
michael@0 | 745 | if (!image) { |
michael@0 | 746 | /* Ugh, upstream is not calling gst_pad_alloc_buffer(). Fallback to |
michael@0 | 747 | * allocating a PlanarYCbCrImage backed GstBuffer here and memcpy. |
michael@0 | 748 | */ |
michael@0 | 749 | GstBuffer* tmp = nullptr; |
michael@0 | 750 | CopyIntoImageBuffer(buffer, &tmp, image); |
michael@0 | 751 | gst_buffer_unref(buffer); |
michael@0 | 752 | buffer = tmp; |
michael@0 | 753 | } |
michael@0 | 754 | |
michael@0 | 755 | int64_t offset = mDecoder->GetResource()->Tell(); // Estimate location in media. |
michael@0 | 756 | VideoData* video = VideoData::CreateFromImage(mInfo.mVideo, |
michael@0 | 757 | mDecoder->GetImageContainer(), |
michael@0 | 758 | offset, timestamp, duration, |
michael@0 | 759 | static_cast<Image*>(image.get()), |
michael@0 | 760 | isKeyframe, -1, mPicture); |
michael@0 | 761 | mVideoQueue.Push(video); |
michael@0 | 762 | |
michael@0 | 763 | gst_buffer_unref(buffer); |
michael@0 | 764 | |
michael@0 | 765 | return true; |
michael@0 | 766 | } |
michael@0 | 767 | |
michael@0 | 768 | nsresult GStreamerReader::Seek(int64_t aTarget, |
michael@0 | 769 | int64_t aStartTime, |
michael@0 | 770 | int64_t aEndTime, |
michael@0 | 771 | int64_t aCurrentTime) |
michael@0 | 772 | { |
michael@0 | 773 | NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread."); |
michael@0 | 774 | |
michael@0 | 775 | gint64 seekPos = aTarget * GST_USECOND; |
michael@0 | 776 | LOG(PR_LOG_DEBUG, "%p About to seek to %" GST_TIME_FORMAT, |
michael@0 | 777 | mDecoder, GST_TIME_ARGS(seekPos)); |
michael@0 | 778 | |
michael@0 | 779 | int flags = GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT; |
michael@0 | 780 | if (!gst_element_seek_simple(mPlayBin, |
michael@0 | 781 | GST_FORMAT_TIME, |
michael@0 | 782 | static_cast<GstSeekFlags>(flags), |
michael@0 | 783 | seekPos)) { |
michael@0 | 784 | LOG(PR_LOG_ERROR, "seek failed"); |
michael@0 | 785 | return NS_ERROR_FAILURE; |
michael@0 | 786 | } |
michael@0 | 787 | LOG(PR_LOG_DEBUG, "seek succeeded"); |
michael@0 | 788 | GstMessage* message = gst_bus_timed_pop_filtered(mBus, GST_CLOCK_TIME_NONE, |
michael@0 | 789 | (GstMessageType)(GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_ERROR)); |
michael@0 | 790 | gst_message_unref(message); |
michael@0 | 791 | LOG(PR_LOG_DEBUG, "seek completed"); |
michael@0 | 792 | |
michael@0 | 793 | return NS_OK; |
michael@0 | 794 | } |
michael@0 | 795 | |
michael@0 | 796 | nsresult GStreamerReader::GetBuffered(dom::TimeRanges* aBuffered, |
michael@0 | 797 | int64_t aStartTime) |
michael@0 | 798 | { |
michael@0 | 799 | if (!mInfo.HasValidMedia()) { |
michael@0 | 800 | return NS_OK; |
michael@0 | 801 | } |
michael@0 | 802 | |
michael@0 | 803 | #if GST_VERSION_MAJOR == 0 |
michael@0 | 804 | GstFormat format = GST_FORMAT_TIME; |
michael@0 | 805 | #endif |
michael@0 | 806 | MediaResource* resource = mDecoder->GetResource(); |
michael@0 | 807 | nsTArray<MediaByteRange> ranges; |
michael@0 | 808 | resource->GetCachedRanges(ranges); |
michael@0 | 809 | |
michael@0 | 810 | if (resource->IsDataCachedToEndOfResource(0)) { |
michael@0 | 811 | /* fast path for local or completely cached files */ |
michael@0 | 812 | gint64 duration = 0; |
michael@0 | 813 | |
michael@0 | 814 | { |
michael@0 | 815 | ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor()); |
michael@0 | 816 | duration = mDecoder->GetMediaDuration(); |
michael@0 | 817 | } |
michael@0 | 818 | |
michael@0 | 819 | double end = (double) duration / GST_MSECOND; |
michael@0 | 820 | LOG(PR_LOG_DEBUG, "complete range [0, %f] for [0, %li]", |
michael@0 | 821 | end, GetDataLength()); |
michael@0 | 822 | aBuffered->Add(0, end); |
michael@0 | 823 | return NS_OK; |
michael@0 | 824 | } |
michael@0 | 825 | |
michael@0 | 826 | for(uint32_t index = 0; index < ranges.Length(); index++) { |
michael@0 | 827 | int64_t startOffset = ranges[index].mStart; |
michael@0 | 828 | int64_t endOffset = ranges[index].mEnd; |
michael@0 | 829 | gint64 startTime, endTime; |
michael@0 | 830 | |
michael@0 | 831 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 832 | if (!gst_element_query_convert(GST_ELEMENT(mPlayBin), GST_FORMAT_BYTES, |
michael@0 | 833 | startOffset, GST_FORMAT_TIME, &startTime)) |
michael@0 | 834 | continue; |
michael@0 | 835 | if (!gst_element_query_convert(GST_ELEMENT(mPlayBin), GST_FORMAT_BYTES, |
michael@0 | 836 | endOffset, GST_FORMAT_TIME, &endTime)) |
michael@0 | 837 | continue; |
michael@0 | 838 | #else |
michael@0 | 839 | if (!gst_element_query_convert(GST_ELEMENT(mPlayBin), GST_FORMAT_BYTES, |
michael@0 | 840 | startOffset, &format, &startTime) || format != GST_FORMAT_TIME) |
michael@0 | 841 | continue; |
michael@0 | 842 | if (!gst_element_query_convert(GST_ELEMENT(mPlayBin), GST_FORMAT_BYTES, |
michael@0 | 843 | endOffset, &format, &endTime) || format != GST_FORMAT_TIME) |
michael@0 | 844 | continue; |
michael@0 | 845 | #endif |
michael@0 | 846 | |
michael@0 | 847 | double start = (double) GST_TIME_AS_USECONDS (startTime) / GST_MSECOND; |
michael@0 | 848 | double end = (double) GST_TIME_AS_USECONDS (endTime) / GST_MSECOND; |
michael@0 | 849 | LOG(PR_LOG_DEBUG, "adding range [%f, %f] for [%li %li] size %li", |
michael@0 | 850 | start, end, startOffset, endOffset, GetDataLength()); |
michael@0 | 851 | aBuffered->Add(start, end); |
michael@0 | 852 | } |
michael@0 | 853 | |
michael@0 | 854 | return NS_OK; |
michael@0 | 855 | } |
michael@0 | 856 | |
michael@0 | 857 | void GStreamerReader::ReadAndPushData(guint aLength) |
michael@0 | 858 | { |
michael@0 | 859 | MediaResource* resource = mDecoder->GetResource(); |
michael@0 | 860 | NS_ASSERTION(resource, "Decoder has no media resource"); |
michael@0 | 861 | int64_t offset1 = resource->Tell(); |
michael@0 | 862 | unused << offset1; |
michael@0 | 863 | nsresult rv = NS_OK; |
michael@0 | 864 | |
michael@0 | 865 | GstBuffer* buffer = gst_buffer_new_and_alloc(aLength); |
michael@0 | 866 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 867 | GstMapInfo info; |
michael@0 | 868 | gst_buffer_map(buffer, &info, GST_MAP_WRITE); |
michael@0 | 869 | guint8 *data = info.data; |
michael@0 | 870 | #else |
michael@0 | 871 | guint8* data = GST_BUFFER_DATA(buffer); |
michael@0 | 872 | #endif |
michael@0 | 873 | uint32_t size = 0, bytesRead = 0; |
michael@0 | 874 | while(bytesRead < aLength) { |
michael@0 | 875 | rv = resource->Read(reinterpret_cast<char*>(data + bytesRead), |
michael@0 | 876 | aLength - bytesRead, &size); |
michael@0 | 877 | if (NS_FAILED(rv) || size == 0) |
michael@0 | 878 | break; |
michael@0 | 879 | |
michael@0 | 880 | bytesRead += size; |
michael@0 | 881 | } |
michael@0 | 882 | |
michael@0 | 883 | int64_t offset2 = resource->Tell(); |
michael@0 | 884 | unused << offset2; |
michael@0 | 885 | |
michael@0 | 886 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 887 | gst_buffer_unmap(buffer, &info); |
michael@0 | 888 | gst_buffer_set_size(buffer, bytesRead); |
michael@0 | 889 | #else |
michael@0 | 890 | GST_BUFFER_SIZE(buffer) = bytesRead; |
michael@0 | 891 | #endif |
michael@0 | 892 | |
michael@0 | 893 | GstFlowReturn ret = gst_app_src_push_buffer(mSource, gst_buffer_ref(buffer)); |
michael@0 | 894 | if (ret != GST_FLOW_OK) { |
michael@0 | 895 | LOG(PR_LOG_ERROR, "ReadAndPushData push ret %s(%d)", gst_flow_get_name(ret), ret); |
michael@0 | 896 | } |
michael@0 | 897 | |
michael@0 | 898 | if (NS_FAILED(rv)) { |
michael@0 | 899 | /* Terminate the stream if there is an error in reading */ |
michael@0 | 900 | LOG(PR_LOG_ERROR, "ReadAndPushData read error, rv=%x", rv); |
michael@0 | 901 | gst_app_src_end_of_stream(mSource); |
michael@0 | 902 | } else if (bytesRead < aLength) { |
michael@0 | 903 | /* If we read less than what we wanted, we reached the end */ |
michael@0 | 904 | LOG(PR_LOG_WARNING, "ReadAndPushData read underflow, " |
michael@0 | 905 | "bytesRead=%u, aLength=%u, offset(%lld,%lld)", |
michael@0 | 906 | bytesRead, aLength, offset1, offset2); |
michael@0 | 907 | gst_app_src_end_of_stream(mSource); |
michael@0 | 908 | } |
michael@0 | 909 | |
michael@0 | 910 | gst_buffer_unref(buffer); |
michael@0 | 911 | |
michael@0 | 912 | /* Ensure offset change is consistent in this function. |
michael@0 | 913 | * If there are other stream operations on another thread at the same time, |
michael@0 | 914 | * it will disturb the GStreamer state machine. |
michael@0 | 915 | */ |
michael@0 | 916 | MOZ_ASSERT(offset1 + bytesRead == offset2); |
michael@0 | 917 | } |
michael@0 | 918 | |
michael@0 | 919 | void GStreamerReader::NeedDataCb(GstAppSrc* aSrc, |
michael@0 | 920 | guint aLength, |
michael@0 | 921 | gpointer aUserData) |
michael@0 | 922 | { |
michael@0 | 923 | GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData); |
michael@0 | 924 | reader->NeedData(aSrc, aLength); |
michael@0 | 925 | } |
michael@0 | 926 | |
michael@0 | 927 | void GStreamerReader::NeedData(GstAppSrc* aSrc, guint aLength) |
michael@0 | 928 | { |
michael@0 | 929 | if (aLength == static_cast<guint>(-1)) |
michael@0 | 930 | aLength = DEFAULT_SOURCE_READ_SIZE; |
michael@0 | 931 | ReadAndPushData(aLength); |
michael@0 | 932 | } |
michael@0 | 933 | |
michael@0 | 934 | void GStreamerReader::EnoughDataCb(GstAppSrc* aSrc, gpointer aUserData) |
michael@0 | 935 | { |
michael@0 | 936 | GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData); |
michael@0 | 937 | reader->EnoughData(aSrc); |
michael@0 | 938 | } |
michael@0 | 939 | |
michael@0 | 940 | void GStreamerReader::EnoughData(GstAppSrc* aSrc) |
michael@0 | 941 | { |
michael@0 | 942 | } |
michael@0 | 943 | |
michael@0 | 944 | gboolean GStreamerReader::SeekDataCb(GstAppSrc* aSrc, |
michael@0 | 945 | guint64 aOffset, |
michael@0 | 946 | gpointer aUserData) |
michael@0 | 947 | { |
michael@0 | 948 | GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData); |
michael@0 | 949 | return reader->SeekData(aSrc, aOffset); |
michael@0 | 950 | } |
michael@0 | 951 | |
michael@0 | 952 | gboolean GStreamerReader::SeekData(GstAppSrc* aSrc, guint64 aOffset) |
michael@0 | 953 | { |
michael@0 | 954 | aOffset += mDataOffset; |
michael@0 | 955 | |
michael@0 | 956 | ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); |
michael@0 | 957 | MediaResource* resource = mDecoder->GetResource(); |
michael@0 | 958 | int64_t resourceLength = resource->GetLength(); |
michael@0 | 959 | |
michael@0 | 960 | if (gst_app_src_get_size(mSource) == -1) { |
michael@0 | 961 | /* It's possible that we didn't know the length when we initialized mSource |
michael@0 | 962 | * but maybe we do now |
michael@0 | 963 | */ |
michael@0 | 964 | gst_app_src_set_size(mSource, GetDataLength()); |
michael@0 | 965 | } |
michael@0 | 966 | |
michael@0 | 967 | nsresult rv = NS_ERROR_FAILURE; |
michael@0 | 968 | if (aOffset < static_cast<guint64>(resourceLength)) { |
michael@0 | 969 | rv = resource->Seek(SEEK_SET, aOffset); |
michael@0 | 970 | } |
michael@0 | 971 | |
michael@0 | 972 | if (NS_FAILED(rv)) { |
michael@0 | 973 | LOG(PR_LOG_ERROR, "seek at %lu failed", aOffset); |
michael@0 | 974 | } else { |
michael@0 | 975 | MOZ_ASSERT(aOffset == static_cast<guint64>(resource->Tell())); |
michael@0 | 976 | } |
michael@0 | 977 | |
michael@0 | 978 | return NS_SUCCEEDED(rv); |
michael@0 | 979 | } |
michael@0 | 980 | |
michael@0 | 981 | GstFlowReturn GStreamerReader::NewPrerollCb(GstAppSink* aSink, |
michael@0 | 982 | gpointer aUserData) |
michael@0 | 983 | { |
michael@0 | 984 | GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData); |
michael@0 | 985 | |
michael@0 | 986 | if (aSink == reader->mVideoAppSink) |
michael@0 | 987 | reader->VideoPreroll(); |
michael@0 | 988 | else |
michael@0 | 989 | reader->AudioPreroll(); |
michael@0 | 990 | return GST_FLOW_OK; |
michael@0 | 991 | } |
michael@0 | 992 | |
michael@0 | 993 | void GStreamerReader::AudioPreroll() |
michael@0 | 994 | { |
michael@0 | 995 | /* The first audio buffer has reached the audio sink. Get rate and channels */ |
michael@0 | 996 | LOG(PR_LOG_DEBUG, "Audio preroll"); |
michael@0 | 997 | GstPad* sinkpad = gst_element_get_static_pad(GST_ELEMENT(mAudioAppSink), "sink"); |
michael@0 | 998 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 999 | GstCaps *caps = gst_pad_get_current_caps(sinkpad); |
michael@0 | 1000 | #else |
michael@0 | 1001 | GstCaps* caps = gst_pad_get_negotiated_caps(sinkpad); |
michael@0 | 1002 | #endif |
michael@0 | 1003 | GstStructure* s = gst_caps_get_structure(caps, 0); |
michael@0 | 1004 | mInfo.mAudio.mRate = mInfo.mAudio.mChannels = 0; |
michael@0 | 1005 | gst_structure_get_int(s, "rate", (gint*) &mInfo.mAudio.mRate); |
michael@0 | 1006 | gst_structure_get_int(s, "channels", (gint*) &mInfo.mAudio.mChannels); |
michael@0 | 1007 | NS_ASSERTION(mInfo.mAudio.mRate != 0, ("audio rate is zero")); |
michael@0 | 1008 | NS_ASSERTION(mInfo.mAudio.mChannels != 0, ("audio channels is zero")); |
michael@0 | 1009 | NS_ASSERTION(mInfo.mAudio.mChannels > 0 && mInfo.mAudio.mChannels <= MAX_CHANNELS, |
michael@0 | 1010 | "invalid audio channels number"); |
michael@0 | 1011 | mInfo.mAudio.mHasAudio = true; |
michael@0 | 1012 | gst_caps_unref(caps); |
michael@0 | 1013 | gst_object_unref(sinkpad); |
michael@0 | 1014 | } |
michael@0 | 1015 | |
michael@0 | 1016 | void GStreamerReader::VideoPreroll() |
michael@0 | 1017 | { |
michael@0 | 1018 | /* The first video buffer has reached the video sink. Get width and height */ |
michael@0 | 1019 | LOG(PR_LOG_DEBUG, "Video preroll"); |
michael@0 | 1020 | GstPad* sinkpad = gst_element_get_static_pad(GST_ELEMENT(mVideoAppSink), "sink"); |
michael@0 | 1021 | int PARNumerator, PARDenominator; |
michael@0 | 1022 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 1023 | GstCaps* caps = gst_pad_get_current_caps(sinkpad); |
michael@0 | 1024 | memset (&mVideoInfo, 0, sizeof (mVideoInfo)); |
michael@0 | 1025 | gst_video_info_from_caps(&mVideoInfo, caps); |
michael@0 | 1026 | mFormat = mVideoInfo.finfo->format; |
michael@0 | 1027 | mPicture.width = mVideoInfo.width; |
michael@0 | 1028 | mPicture.height = mVideoInfo.height; |
michael@0 | 1029 | PARNumerator = GST_VIDEO_INFO_PAR_N(&mVideoInfo); |
michael@0 | 1030 | PARDenominator = GST_VIDEO_INFO_PAR_D(&mVideoInfo); |
michael@0 | 1031 | #else |
michael@0 | 1032 | GstCaps* caps = gst_pad_get_negotiated_caps(sinkpad); |
michael@0 | 1033 | gst_video_format_parse_caps(caps, &mFormat, &mPicture.width, &mPicture.height); |
michael@0 | 1034 | if (!gst_video_parse_caps_pixel_aspect_ratio(caps, &PARNumerator, &PARDenominator)) { |
michael@0 | 1035 | PARNumerator = 1; |
michael@0 | 1036 | PARDenominator = 1; |
michael@0 | 1037 | } |
michael@0 | 1038 | #endif |
michael@0 | 1039 | NS_ASSERTION(mPicture.width && mPicture.height, "invalid video resolution"); |
michael@0 | 1040 | |
michael@0 | 1041 | // Calculate display size according to pixel aspect ratio. |
michael@0 | 1042 | nsIntRect pictureRect(0, 0, mPicture.width, mPicture.height); |
michael@0 | 1043 | nsIntSize frameSize = nsIntSize(mPicture.width, mPicture.height); |
michael@0 | 1044 | nsIntSize displaySize = nsIntSize(mPicture.width, mPicture.height); |
michael@0 | 1045 | ScaleDisplayByAspectRatio(displaySize, float(PARNumerator) / float(PARDenominator)); |
michael@0 | 1046 | |
michael@0 | 1047 | // If video frame size is overflow, stop playing. |
michael@0 | 1048 | if (IsValidVideoRegion(frameSize, pictureRect, displaySize)) { |
michael@0 | 1049 | GstStructure* structure = gst_caps_get_structure(caps, 0); |
michael@0 | 1050 | gst_structure_get_fraction(structure, "framerate", &fpsNum, &fpsDen); |
michael@0 | 1051 | mInfo.mVideo.mDisplay = ThebesIntSize(displaySize.ToIntSize()); |
michael@0 | 1052 | mInfo.mVideo.mHasVideo = true; |
michael@0 | 1053 | } else { |
michael@0 | 1054 | LOG(PR_LOG_DEBUG, "invalid video region"); |
michael@0 | 1055 | Eos(); |
michael@0 | 1056 | } |
michael@0 | 1057 | gst_caps_unref(caps); |
michael@0 | 1058 | gst_object_unref(sinkpad); |
michael@0 | 1059 | } |
michael@0 | 1060 | |
michael@0 | 1061 | GstFlowReturn GStreamerReader::NewBufferCb(GstAppSink* aSink, |
michael@0 | 1062 | gpointer aUserData) |
michael@0 | 1063 | { |
michael@0 | 1064 | GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData); |
michael@0 | 1065 | |
michael@0 | 1066 | if (aSink == reader->mVideoAppSink) |
michael@0 | 1067 | reader->NewVideoBuffer(); |
michael@0 | 1068 | else |
michael@0 | 1069 | reader->NewAudioBuffer(); |
michael@0 | 1070 | |
michael@0 | 1071 | return GST_FLOW_OK; |
michael@0 | 1072 | } |
michael@0 | 1073 | |
michael@0 | 1074 | void GStreamerReader::NewVideoBuffer() |
michael@0 | 1075 | { |
michael@0 | 1076 | ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); |
michael@0 | 1077 | /* We have a new video buffer queued in the video sink. Increment the counter |
michael@0 | 1078 | * and notify the decode thread potentially blocked in DecodeVideoFrame |
michael@0 | 1079 | */ |
michael@0 | 1080 | |
michael@0 | 1081 | mDecoder->NotifyDecodedFrames(1, 0); |
michael@0 | 1082 | mVideoSinkBufferCount++; |
michael@0 | 1083 | mon.NotifyAll(); |
michael@0 | 1084 | } |
michael@0 | 1085 | |
michael@0 | 1086 | void GStreamerReader::NewAudioBuffer() |
michael@0 | 1087 | { |
michael@0 | 1088 | ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); |
michael@0 | 1089 | /* We have a new audio buffer queued in the audio sink. Increment the counter |
michael@0 | 1090 | * and notify the decode thread potentially blocked in DecodeAudioData |
michael@0 | 1091 | */ |
michael@0 | 1092 | mAudioSinkBufferCount++; |
michael@0 | 1093 | mon.NotifyAll(); |
michael@0 | 1094 | } |
michael@0 | 1095 | |
michael@0 | 1096 | void GStreamerReader::EosCb(GstAppSink* aSink, gpointer aUserData) |
michael@0 | 1097 | { |
michael@0 | 1098 | GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData); |
michael@0 | 1099 | reader->Eos(aSink); |
michael@0 | 1100 | } |
michael@0 | 1101 | |
michael@0 | 1102 | void GStreamerReader::Eos(GstAppSink* aSink) |
michael@0 | 1103 | { |
michael@0 | 1104 | /* We reached the end of the stream */ |
michael@0 | 1105 | { |
michael@0 | 1106 | ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); |
michael@0 | 1107 | /* Potentially unblock DecodeVideoFrame and DecodeAudioData */ |
michael@0 | 1108 | if (aSink == mVideoAppSink) { |
michael@0 | 1109 | mReachedVideoEos = true; |
michael@0 | 1110 | } else if (aSink == mAudioAppSink) { |
michael@0 | 1111 | mReachedAudioEos = true; |
michael@0 | 1112 | } else { |
michael@0 | 1113 | // Assume this is an error causing an EOS. |
michael@0 | 1114 | mReachedAudioEos = true; |
michael@0 | 1115 | mReachedVideoEos = true; |
michael@0 | 1116 | } |
michael@0 | 1117 | mon.NotifyAll(); |
michael@0 | 1118 | } |
michael@0 | 1119 | |
michael@0 | 1120 | { |
michael@0 | 1121 | ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor()); |
michael@0 | 1122 | /* Potentially unblock the decode thread in ::DecodeLoop */ |
michael@0 | 1123 | mon.NotifyAll(); |
michael@0 | 1124 | } |
michael@0 | 1125 | } |
michael@0 | 1126 | |
michael@0 | 1127 | /** |
michael@0 | 1128 | * This callback is called while the pipeline is automatically built, after a |
michael@0 | 1129 | * new element has been added to the pipeline. We use it to find the |
michael@0 | 1130 | * uridecodebin instance used by playbin and connect to it to apply our |
michael@0 | 1131 | * whitelist. |
michael@0 | 1132 | */ |
michael@0 | 1133 | void |
michael@0 | 1134 | GStreamerReader::PlayElementAddedCb(GstBin *aBin, GstElement *aElement, |
michael@0 | 1135 | gpointer *aUserData) |
michael@0 | 1136 | { |
michael@0 | 1137 | const static char sUriDecodeBinPrefix[] = "uridecodebin"; |
michael@0 | 1138 | gchar *name = gst_element_get_name(aElement); |
michael@0 | 1139 | |
michael@0 | 1140 | // Attach this callback to uridecodebin, child of playbin. |
michael@0 | 1141 | if (!strncmp(name, sUriDecodeBinPrefix, sizeof(sUriDecodeBinPrefix) - 1)) { |
michael@0 | 1142 | g_signal_connect(G_OBJECT(aElement), "autoplug-sort", |
michael@0 | 1143 | G_CALLBACK(GStreamerReader::AutoplugSortCb), aUserData); |
michael@0 | 1144 | } |
michael@0 | 1145 | |
michael@0 | 1146 | g_free(name); |
michael@0 | 1147 | } |
michael@0 | 1148 | |
michael@0 | 1149 | bool |
michael@0 | 1150 | GStreamerReader::ShouldAutoplugFactory(GstElementFactory* aFactory, GstCaps* aCaps) |
michael@0 | 1151 | { |
michael@0 | 1152 | bool autoplug; |
michael@0 | 1153 | const gchar *klass = gst_element_factory_get_klass(aFactory); |
michael@0 | 1154 | if (strstr(klass, "Demuxer") && !strstr(klass, "Metadata")) { |
michael@0 | 1155 | autoplug = GStreamerFormatHelper::Instance()->CanHandleContainerCaps(aCaps); |
michael@0 | 1156 | } else if (strstr(klass, "Decoder") && !strstr(klass, "Generic")) { |
michael@0 | 1157 | autoplug = GStreamerFormatHelper::Instance()->CanHandleCodecCaps(aCaps); |
michael@0 | 1158 | } else { |
michael@0 | 1159 | /* we only filter demuxers and decoders, let everything else be autoplugged */ |
michael@0 | 1160 | autoplug = true; |
michael@0 | 1161 | } |
michael@0 | 1162 | |
michael@0 | 1163 | return autoplug; |
michael@0 | 1164 | } |
michael@0 | 1165 | |
michael@0 | 1166 | /** |
michael@0 | 1167 | * This is called by uridecodebin (running inside playbin), after it has found |
michael@0 | 1168 | * candidate factories to continue decoding the stream. We apply the whitelist |
michael@0 | 1169 | * here, allowing only demuxers and decoders that output the formats we want to |
michael@0 | 1170 | * support. |
michael@0 | 1171 | */ |
michael@0 | 1172 | GValueArray* |
michael@0 | 1173 | GStreamerReader::AutoplugSortCb(GstElement* aElement, GstPad* aPad, |
michael@0 | 1174 | GstCaps* aCaps, GValueArray* aFactories) |
michael@0 | 1175 | { |
michael@0 | 1176 | if (!aFactories->n_values) { |
michael@0 | 1177 | return nullptr; |
michael@0 | 1178 | } |
michael@0 | 1179 | |
michael@0 | 1180 | /* aFactories[0] is the element factory that is going to be used to |
michael@0 | 1181 | * create the next element needed to demux or decode the stream. |
michael@0 | 1182 | */ |
michael@0 | 1183 | GstElementFactory *factory = (GstElementFactory*) g_value_get_object(g_value_array_get_nth(aFactories, 0)); |
michael@0 | 1184 | if (!ShouldAutoplugFactory(factory, aCaps)) { |
michael@0 | 1185 | /* We don't support this factory. Return an empty array to signal that we |
michael@0 | 1186 | * don't want to continue decoding this (sub)stream. |
michael@0 | 1187 | */ |
michael@0 | 1188 | return g_value_array_new(0); |
michael@0 | 1189 | } |
michael@0 | 1190 | |
michael@0 | 1191 | /* nullptr means that we're ok with the candidates and don't need to apply any |
michael@0 | 1192 | * sorting/filtering. |
michael@0 | 1193 | */ |
michael@0 | 1194 | return nullptr; |
michael@0 | 1195 | } |
michael@0 | 1196 | |
michael@0 | 1197 | /** |
michael@0 | 1198 | * If this is an MP3 stream, pass any new data we get to the MP3 frame parser |
michael@0 | 1199 | * for duration estimation. |
michael@0 | 1200 | */ |
michael@0 | 1201 | void GStreamerReader::NotifyDataArrived(const char *aBuffer, |
michael@0 | 1202 | uint32_t aLength, |
michael@0 | 1203 | int64_t aOffset) |
michael@0 | 1204 | { |
michael@0 | 1205 | MOZ_ASSERT(NS_IsMainThread()); |
michael@0 | 1206 | |
michael@0 | 1207 | if (HasVideo()) { |
michael@0 | 1208 | return; |
michael@0 | 1209 | } |
michael@0 | 1210 | |
michael@0 | 1211 | if (!mMP3FrameParser.NeedsData()) { |
michael@0 | 1212 | return; |
michael@0 | 1213 | } |
michael@0 | 1214 | |
michael@0 | 1215 | mMP3FrameParser.Parse(aBuffer, aLength, aOffset); |
michael@0 | 1216 | |
michael@0 | 1217 | int64_t duration = mMP3FrameParser.GetDuration(); |
michael@0 | 1218 | if (duration != mLastParserDuration && mUseParserDuration) { |
michael@0 | 1219 | ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor()); |
michael@0 | 1220 | mLastParserDuration = duration; |
michael@0 | 1221 | mDecoder->UpdateEstimatedMediaDuration(mLastParserDuration); |
michael@0 | 1222 | } |
michael@0 | 1223 | } |
michael@0 | 1224 | |
michael@0 | 1225 | #if GST_VERSION_MAJOR >= 1 |
michael@0 | 1226 | GstCaps* GStreamerReader::BuildAudioSinkCaps() |
michael@0 | 1227 | { |
michael@0 | 1228 | GstCaps* caps = gst_caps_from_string("audio/x-raw, channels={1,2}"); |
michael@0 | 1229 | const char* format; |
michael@0 | 1230 | #ifdef MOZ_SAMPLE_TYPE_FLOAT32 |
michael@0 | 1231 | #if MOZ_LITTLE_ENDIAN |
michael@0 | 1232 | format = "F32LE"; |
michael@0 | 1233 | #else |
michael@0 | 1234 | format = "F32BE"; |
michael@0 | 1235 | #endif |
michael@0 | 1236 | #else /* !MOZ_SAMPLE_TYPE_FLOAT32 */ |
michael@0 | 1237 | #if MOZ_LITTLE_ENDIAN |
michael@0 | 1238 | format = "S16LE"; |
michael@0 | 1239 | #else |
michael@0 | 1240 | format = "S16BE"; |
michael@0 | 1241 | #endif |
michael@0 | 1242 | #endif |
michael@0 | 1243 | gst_caps_set_simple(caps, "format", G_TYPE_STRING, format, nullptr); |
michael@0 | 1244 | |
michael@0 | 1245 | return caps; |
michael@0 | 1246 | } |
michael@0 | 1247 | |
michael@0 | 1248 | void GStreamerReader::InstallPadCallbacks() |
michael@0 | 1249 | { |
michael@0 | 1250 | GstPad* sinkpad = gst_element_get_static_pad(GST_ELEMENT(mVideoAppSink), "sink"); |
michael@0 | 1251 | |
michael@0 | 1252 | gst_pad_add_probe(sinkpad, |
michael@0 | 1253 | (GstPadProbeType) (GST_PAD_PROBE_TYPE_SCHEDULING | |
michael@0 | 1254 | GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM | |
michael@0 | 1255 | GST_PAD_PROBE_TYPE_EVENT_UPSTREAM | |
michael@0 | 1256 | GST_PAD_PROBE_TYPE_EVENT_FLUSH), |
michael@0 | 1257 | &GStreamerReader::EventProbeCb, this, nullptr); |
michael@0 | 1258 | gst_pad_add_probe(sinkpad, GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM, |
michael@0 | 1259 | GStreamerReader::QueryProbeCb, nullptr, nullptr); |
michael@0 | 1260 | |
michael@0 | 1261 | gst_pad_set_element_private(sinkpad, this); |
michael@0 | 1262 | gst_object_unref(sinkpad); |
michael@0 | 1263 | |
michael@0 | 1264 | sinkpad = gst_element_get_static_pad(GST_ELEMENT(mAudioAppSink), "sink"); |
michael@0 | 1265 | gst_pad_add_probe(sinkpad, |
michael@0 | 1266 | (GstPadProbeType) (GST_PAD_PROBE_TYPE_SCHEDULING | |
michael@0 | 1267 | GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM | |
michael@0 | 1268 | GST_PAD_PROBE_TYPE_EVENT_UPSTREAM | |
michael@0 | 1269 | GST_PAD_PROBE_TYPE_EVENT_FLUSH), |
michael@0 | 1270 | &GStreamerReader::EventProbeCb, this, nullptr); |
michael@0 | 1271 | gst_object_unref(sinkpad); |
michael@0 | 1272 | } |
michael@0 | 1273 | |
michael@0 | 1274 | GstPadProbeReturn GStreamerReader::EventProbeCb(GstPad *aPad, |
michael@0 | 1275 | GstPadProbeInfo *aInfo, |
michael@0 | 1276 | gpointer aUserData) |
michael@0 | 1277 | { |
michael@0 | 1278 | GStreamerReader *reader = (GStreamerReader *) aUserData; |
michael@0 | 1279 | GstEvent *aEvent = (GstEvent *)aInfo->data; |
michael@0 | 1280 | return reader->EventProbe(aPad, aEvent); |
michael@0 | 1281 | } |
michael@0 | 1282 | |
michael@0 | 1283 | GstPadProbeReturn GStreamerReader::EventProbe(GstPad *aPad, GstEvent *aEvent) |
michael@0 | 1284 | { |
michael@0 | 1285 | GstElement* parent = GST_ELEMENT(gst_pad_get_parent(aPad)); |
michael@0 | 1286 | |
michael@0 | 1287 | LOG(PR_LOG_DEBUG, "event probe %s", GST_EVENT_TYPE_NAME (aEvent)); |
michael@0 | 1288 | |
michael@0 | 1289 | switch(GST_EVENT_TYPE(aEvent)) { |
michael@0 | 1290 | case GST_EVENT_SEGMENT: |
michael@0 | 1291 | { |
michael@0 | 1292 | const GstSegment *newSegment; |
michael@0 | 1293 | GstSegment* segment; |
michael@0 | 1294 | |
michael@0 | 1295 | /* Store the segments so we can convert timestamps to stream time, which |
michael@0 | 1296 | * is what the upper layers sync on. |
michael@0 | 1297 | */ |
michael@0 | 1298 | ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); |
michael@0 | 1299 | #if GST_VERSION_MINOR <= 1 && GST_VERSION_MICRO < 1 |
michael@0 | 1300 | ResetDecode(); |
michael@0 | 1301 | #endif |
michael@0 | 1302 | gst_event_parse_segment(aEvent, &newSegment); |
michael@0 | 1303 | if (parent == GST_ELEMENT(mVideoAppSink)) |
michael@0 | 1304 | segment = &mVideoSegment; |
michael@0 | 1305 | else |
michael@0 | 1306 | segment = &mAudioSegment; |
michael@0 | 1307 | gst_segment_copy_into (newSegment, segment); |
michael@0 | 1308 | break; |
michael@0 | 1309 | } |
michael@0 | 1310 | case GST_EVENT_FLUSH_STOP: |
michael@0 | 1311 | /* Reset on seeks */ |
michael@0 | 1312 | ResetDecode(); |
michael@0 | 1313 | break; |
michael@0 | 1314 | default: |
michael@0 | 1315 | break; |
michael@0 | 1316 | } |
michael@0 | 1317 | gst_object_unref(parent); |
michael@0 | 1318 | |
michael@0 | 1319 | return GST_PAD_PROBE_OK; |
michael@0 | 1320 | } |
michael@0 | 1321 | |
michael@0 | 1322 | GstPadProbeReturn GStreamerReader::QueryProbeCb(GstPad* aPad, GstPadProbeInfo* aInfo, gpointer aUserData) |
michael@0 | 1323 | { |
michael@0 | 1324 | GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(gst_pad_get_element_private(aPad)); |
michael@0 | 1325 | return reader->QueryProbe(aPad, aInfo, aUserData); |
michael@0 | 1326 | } |
michael@0 | 1327 | |
michael@0 | 1328 | GstPadProbeReturn GStreamerReader::QueryProbe(GstPad* aPad, GstPadProbeInfo* aInfo, gpointer aUserData) |
michael@0 | 1329 | { |
michael@0 | 1330 | GstQuery *query = gst_pad_probe_info_get_query(aInfo); |
michael@0 | 1331 | GstPadProbeReturn ret = GST_PAD_PROBE_OK; |
michael@0 | 1332 | |
michael@0 | 1333 | switch (GST_QUERY_TYPE (query)) { |
michael@0 | 1334 | case GST_QUERY_ALLOCATION: |
michael@0 | 1335 | GstCaps *caps; |
michael@0 | 1336 | GstVideoInfo info; |
michael@0 | 1337 | gboolean need_pool; |
michael@0 | 1338 | |
michael@0 | 1339 | gst_query_parse_allocation(query, &caps, &need_pool); |
michael@0 | 1340 | gst_video_info_init(&info); |
michael@0 | 1341 | gst_video_info_from_caps(&info, caps); |
michael@0 | 1342 | gst_query_add_allocation_param(query, mAllocator, nullptr); |
michael@0 | 1343 | gst_query_add_allocation_pool(query, mBufferPool, info.size, 0, 0); |
michael@0 | 1344 | break; |
michael@0 | 1345 | default: |
michael@0 | 1346 | break; |
michael@0 | 1347 | } |
michael@0 | 1348 | |
michael@0 | 1349 | return ret; |
michael@0 | 1350 | } |
michael@0 | 1351 | |
michael@0 | 1352 | void GStreamerReader::ImageDataFromVideoFrame(GstVideoFrame *aFrame, |
michael@0 | 1353 | PlanarYCbCrImage::Data *aData) |
michael@0 | 1354 | { |
michael@0 | 1355 | NS_ASSERTION(GST_VIDEO_INFO_IS_YUV(&mVideoInfo), |
michael@0 | 1356 | "Non-YUV video frame formats not supported"); |
michael@0 | 1357 | NS_ASSERTION(GST_VIDEO_FRAME_N_COMPONENTS(aFrame) == 3, |
michael@0 | 1358 | "Unsupported number of components in video frame"); |
michael@0 | 1359 | |
michael@0 | 1360 | aData->mPicX = aData->mPicY = 0; |
michael@0 | 1361 | aData->mPicSize = gfx::IntSize(mPicture.width, mPicture.height); |
michael@0 | 1362 | aData->mStereoMode = StereoMode::MONO; |
michael@0 | 1363 | |
michael@0 | 1364 | aData->mYChannel = GST_VIDEO_FRAME_COMP_DATA(aFrame, 0); |
michael@0 | 1365 | aData->mYStride = GST_VIDEO_FRAME_COMP_STRIDE(aFrame, 0); |
michael@0 | 1366 | aData->mYSize = gfx::IntSize(GST_VIDEO_FRAME_COMP_WIDTH(aFrame, 0), |
michael@0 | 1367 | GST_VIDEO_FRAME_COMP_HEIGHT(aFrame, 0)); |
michael@0 | 1368 | aData->mYSkip = GST_VIDEO_FRAME_COMP_PSTRIDE(aFrame, 0) - 1; |
michael@0 | 1369 | aData->mCbCrStride = GST_VIDEO_FRAME_COMP_STRIDE(aFrame, 1); |
michael@0 | 1370 | aData->mCbCrSize = gfx::IntSize(GST_VIDEO_FRAME_COMP_WIDTH(aFrame, 1), |
michael@0 | 1371 | GST_VIDEO_FRAME_COMP_HEIGHT(aFrame, 1)); |
michael@0 | 1372 | aData->mCbChannel = GST_VIDEO_FRAME_COMP_DATA(aFrame, 1); |
michael@0 | 1373 | aData->mCrChannel = GST_VIDEO_FRAME_COMP_DATA(aFrame, 2); |
michael@0 | 1374 | aData->mCbSkip = GST_VIDEO_FRAME_COMP_PSTRIDE(aFrame, 1) - 1; |
michael@0 | 1375 | aData->mCrSkip = GST_VIDEO_FRAME_COMP_PSTRIDE(aFrame, 2) - 1; |
michael@0 | 1376 | } |
michael@0 | 1377 | |
michael@0 | 1378 | nsRefPtr<PlanarYCbCrImage> GStreamerReader::GetImageFromBuffer(GstBuffer* aBuffer) |
michael@0 | 1379 | { |
michael@0 | 1380 | nsRefPtr<PlanarYCbCrImage> image = nullptr; |
michael@0 | 1381 | |
michael@0 | 1382 | if (gst_buffer_n_memory(aBuffer) == 1) { |
michael@0 | 1383 | GstMemory* mem = gst_buffer_peek_memory(aBuffer, 0); |
michael@0 | 1384 | if (GST_IS_MOZ_GFX_MEMORY_ALLOCATOR(mem->allocator)) { |
michael@0 | 1385 | image = moz_gfx_memory_get_image(mem); |
michael@0 | 1386 | |
michael@0 | 1387 | GstVideoFrame frame; |
michael@0 | 1388 | gst_video_frame_map(&frame, &mVideoInfo, aBuffer, GST_MAP_READ); |
michael@0 | 1389 | PlanarYCbCrImage::Data data; |
michael@0 | 1390 | ImageDataFromVideoFrame(&frame, &data); |
michael@0 | 1391 | image->SetDataNoCopy(data); |
michael@0 | 1392 | gst_video_frame_unmap(&frame); |
michael@0 | 1393 | } |
michael@0 | 1394 | } |
michael@0 | 1395 | |
michael@0 | 1396 | return image; |
michael@0 | 1397 | } |
michael@0 | 1398 | |
michael@0 | 1399 | void GStreamerReader::CopyIntoImageBuffer(GstBuffer* aBuffer, |
michael@0 | 1400 | GstBuffer** aOutBuffer, |
michael@0 | 1401 | nsRefPtr<PlanarYCbCrImage> &image) |
michael@0 | 1402 | { |
michael@0 | 1403 | *aOutBuffer = gst_buffer_new_allocate(mAllocator, gst_buffer_get_size(aBuffer), nullptr); |
michael@0 | 1404 | GstMemory *mem = gst_buffer_peek_memory(*aOutBuffer, 0); |
michael@0 | 1405 | GstMapInfo map_info; |
michael@0 | 1406 | gst_memory_map(mem, &map_info, GST_MAP_WRITE); |
michael@0 | 1407 | gst_buffer_extract(aBuffer, 0, map_info.data, gst_buffer_get_size(aBuffer)); |
michael@0 | 1408 | gst_memory_unmap(mem, &map_info); |
michael@0 | 1409 | |
michael@0 | 1410 | /* create a new gst buffer with the newly created memory and copy the |
michael@0 | 1411 | * metadata over from the incoming buffer */ |
michael@0 | 1412 | gst_buffer_copy_into(*aOutBuffer, aBuffer, |
michael@0 | 1413 | (GstBufferCopyFlags)(GST_BUFFER_COPY_METADATA), 0, -1); |
michael@0 | 1414 | image = GetImageFromBuffer(*aOutBuffer); |
michael@0 | 1415 | } |
michael@0 | 1416 | #endif |
michael@0 | 1417 | |
michael@0 | 1418 | } // namespace mozilla |
michael@0 | 1419 |