content/media/wmf/WMFReader.cpp

Fri, 16 Jan 2015 04:50:19 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Fri, 16 Jan 2015 04:50:19 +0100
branch
TOR_BUG_9701
changeset 13
44a2da4a2ab2
permissions
-rw-r--r--

Replace accessor implementation with direct member state manipulation, by
request https://trac.torproject.org/projects/tor/ticket/9701#comment:32

michael@0 1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
michael@0 2 /* vim:set ts=2 sw=2 sts=2 et cindent: */
michael@0 3 /* This Source Code Form is subject to the terms of the Mozilla Public
michael@0 4 * License, v. 2.0. If a copy of the MPL was not distributed with this
michael@0 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
michael@0 6
michael@0 7 #include "WMFReader.h"
michael@0 8 #include "WMFDecoder.h"
michael@0 9 #include "WMFUtils.h"
michael@0 10 #include "WMFByteStream.h"
michael@0 11 #include "WMFSourceReaderCallback.h"
michael@0 12 #include "mozilla/ArrayUtils.h"
michael@0 13 #include "mozilla/dom/TimeRanges.h"
michael@0 14 #include "mozilla/dom/HTMLMediaElement.h"
michael@0 15 #include "mozilla/Preferences.h"
michael@0 16 #include "DXVA2Manager.h"
michael@0 17 #include "ImageContainer.h"
michael@0 18 #include "Layers.h"
michael@0 19 #include "mozilla/layers/LayersTypes.h"
michael@0 20
michael@0 21 #ifndef MOZ_SAMPLE_TYPE_FLOAT32
michael@0 22 #error We expect 32bit float audio samples on desktop for the Windows Media Foundation media backend.
michael@0 23 #endif
michael@0 24
michael@0 25 #include "MediaDecoder.h"
michael@0 26 #include "VideoUtils.h"
michael@0 27 #include "gfx2DGlue.h"
michael@0 28
michael@0 29 using namespace mozilla::gfx;
michael@0 30 using mozilla::layers::Image;
michael@0 31 using mozilla::layers::LayerManager;
michael@0 32 using mozilla::layers::LayersBackend;
michael@0 33
michael@0 34 namespace mozilla {
michael@0 35
michael@0 36 #ifdef PR_LOGGING
michael@0 37 extern PRLogModuleInfo* gMediaDecoderLog;
michael@0 38 #define DECODER_LOG(...) PR_LOG(gMediaDecoderLog, PR_LOG_DEBUG, (__VA_ARGS__))
michael@0 39 #else
michael@0 40 #define DECODER_LOG(...)
michael@0 41 #endif
michael@0 42
michael@0 43 // Uncomment to enable verbose per-sample logging.
michael@0 44 //#define LOG_SAMPLE_DECODE 1
michael@0 45
michael@0 46 WMFReader::WMFReader(AbstractMediaDecoder* aDecoder)
michael@0 47 : MediaDecoderReader(aDecoder),
michael@0 48 mSourceReader(nullptr),
michael@0 49 mAudioChannels(0),
michael@0 50 mAudioBytesPerSample(0),
michael@0 51 mAudioRate(0),
michael@0 52 mVideoWidth(0),
michael@0 53 mVideoHeight(0),
michael@0 54 mVideoStride(0),
michael@0 55 mAudioFrameSum(0),
michael@0 56 mAudioFrameOffset(0),
michael@0 57 mHasAudio(false),
michael@0 58 mHasVideo(false),
michael@0 59 mUseHwAccel(false),
michael@0 60 mMustRecaptureAudioPosition(true),
michael@0 61 mIsMP3Enabled(WMFDecoder::IsMP3Supported()),
michael@0 62 mCOMInitialized(false)
michael@0 63 {
michael@0 64 NS_ASSERTION(NS_IsMainThread(), "Must be on main thread.");
michael@0 65 MOZ_COUNT_CTOR(WMFReader);
michael@0 66 }
michael@0 67
michael@0 68 WMFReader::~WMFReader()
michael@0 69 {
michael@0 70 NS_ASSERTION(NS_IsMainThread(), "Must be on main thread.");
michael@0 71
michael@0 72 // Note: We must shutdown the byte stream before calling MFShutdown, else we
michael@0 73 // get assertion failures when unlocking the byte stream's work queue.
michael@0 74 if (mByteStream) {
michael@0 75 DebugOnly<nsresult> rv = mByteStream->Shutdown();
michael@0 76 NS_ASSERTION(NS_SUCCEEDED(rv), "Failed to shutdown WMFByteStream");
michael@0 77 }
michael@0 78 DebugOnly<HRESULT> hr = wmf::MFShutdown();
michael@0 79 NS_ASSERTION(SUCCEEDED(hr), "MFShutdown failed");
michael@0 80 MOZ_COUNT_DTOR(WMFReader);
michael@0 81 }
michael@0 82
michael@0 83 bool
michael@0 84 WMFReader::InitializeDXVA()
michael@0 85 {
michael@0 86 if (!Preferences::GetBool("media.windows-media-foundation.use-dxva", false)) {
michael@0 87 return false;
michael@0 88 }
michael@0 89 MOZ_ASSERT(mDecoder->GetImageContainer());
michael@0 90
michael@0 91 // Extract the layer manager backend type so that we can determine
michael@0 92 // whether it's worthwhile using DXVA. If we're not running with a D3D
michael@0 93 // layer manager then the readback of decoded video frames from GPU to
michael@0 94 // CPU memory grinds painting to a halt, and makes playback performance
michael@0 95 // *worse*.
michael@0 96 MediaDecoderOwner* owner = mDecoder->GetOwner();
michael@0 97 NS_ENSURE_TRUE(owner, false);
michael@0 98
michael@0 99 dom::HTMLMediaElement* element = owner->GetMediaElement();
michael@0 100 NS_ENSURE_TRUE(element, false);
michael@0 101
michael@0 102 nsRefPtr<LayerManager> layerManager =
michael@0 103 nsContentUtils::LayerManagerForDocument(element->OwnerDoc());
michael@0 104 NS_ENSURE_TRUE(layerManager, false);
michael@0 105
michael@0 106 LayersBackend backend = layerManager->GetCompositorBackendType();
michael@0 107 if (backend != LayersBackend::LAYERS_D3D9 &&
michael@0 108 backend != LayersBackend::LAYERS_D3D10 &&
michael@0 109 backend != LayersBackend::LAYERS_D3D11) {
michael@0 110 return false;
michael@0 111 }
michael@0 112
michael@0 113 mDXVA2Manager = DXVA2Manager::Create();
michael@0 114
michael@0 115 return mDXVA2Manager != nullptr;
michael@0 116 }
michael@0 117
michael@0 118 nsresult
michael@0 119 WMFReader::Init(MediaDecoderReader* aCloneDonor)
michael@0 120 {
michael@0 121 NS_ASSERTION(NS_IsMainThread(), "Must be on main thread.");
michael@0 122
michael@0 123 nsresult rv = WMFDecoder::LoadDLLs();
michael@0 124 NS_ENSURE_SUCCESS(rv, rv);
michael@0 125
michael@0 126 if (FAILED(wmf::MFStartup())) {
michael@0 127 NS_WARNING("Failed to initialize Windows Media Foundation");
michael@0 128 return NS_ERROR_FAILURE;
michael@0 129 }
michael@0 130
michael@0 131 mSourceReaderCallback = new WMFSourceReaderCallback();
michael@0 132
michael@0 133 // Must be created on main thread.
michael@0 134 mByteStream = new WMFByteStream(mDecoder->GetResource(), mSourceReaderCallback);
michael@0 135 rv = mByteStream->Init();
michael@0 136 NS_ENSURE_SUCCESS(rv, rv);
michael@0 137
michael@0 138 if (mDecoder->GetImageContainer() != nullptr &&
michael@0 139 IsVideoContentType(mDecoder->GetResource()->GetContentType())) {
michael@0 140 mUseHwAccel = InitializeDXVA();
michael@0 141 } else {
michael@0 142 mUseHwAccel = false;
michael@0 143 }
michael@0 144
michael@0 145 return NS_OK;
michael@0 146 }
michael@0 147
michael@0 148 bool
michael@0 149 WMFReader::HasAudio()
michael@0 150 {
michael@0 151 NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
michael@0 152 return mHasAudio;
michael@0 153 }
michael@0 154
michael@0 155 bool
michael@0 156 WMFReader::HasVideo()
michael@0 157 {
michael@0 158 NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
michael@0 159 return mHasVideo;
michael@0 160 }
michael@0 161
michael@0 162 static HRESULT
michael@0 163 ConfigureSourceReaderStream(IMFSourceReader *aReader,
michael@0 164 const DWORD aStreamIndex,
michael@0 165 const GUID& aOutputSubType,
michael@0 166 const GUID* aAllowedInSubTypes,
michael@0 167 const uint32_t aNumAllowedInSubTypes)
michael@0 168 {
michael@0 169 NS_ENSURE_TRUE(aReader, E_POINTER);
michael@0 170 NS_ENSURE_TRUE(aAllowedInSubTypes, E_POINTER);
michael@0 171
michael@0 172 RefPtr<IMFMediaType> nativeType;
michael@0 173 RefPtr<IMFMediaType> type;
michael@0 174 HRESULT hr;
michael@0 175
michael@0 176 // Find the native format of the stream.
michael@0 177 hr = aReader->GetNativeMediaType(aStreamIndex, 0, byRef(nativeType));
michael@0 178 NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
michael@0 179
michael@0 180 // Get the native output subtype of the stream. This denotes the uncompressed
michael@0 181 // type.
michael@0 182 GUID subType;
michael@0 183 hr = nativeType->GetGUID(MF_MT_SUBTYPE, &subType);
michael@0 184 NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
michael@0 185
michael@0 186 // Ensure the input type of the media is in the allowed formats list.
michael@0 187 bool isSubTypeAllowed = false;
michael@0 188 for (uint32_t i = 0; i < aNumAllowedInSubTypes; i++) {
michael@0 189 if (aAllowedInSubTypes[i] == subType) {
michael@0 190 isSubTypeAllowed = true;
michael@0 191 break;
michael@0 192 }
michael@0 193 }
michael@0 194 if (!isSubTypeAllowed) {
michael@0 195 nsCString name = GetGUIDName(subType);
michael@0 196 DECODER_LOG("ConfigureSourceReaderStream subType=%s is not allowed to be decoded", name.get());
michael@0 197 return E_FAIL;
michael@0 198 }
michael@0 199
michael@0 200 // Find the major type.
michael@0 201 GUID majorType;
michael@0 202 hr = nativeType->GetGUID(MF_MT_MAJOR_TYPE, &majorType);
michael@0 203 NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
michael@0 204
michael@0 205 // Define the output type.
michael@0 206 hr = wmf::MFCreateMediaType(byRef(type));
michael@0 207 NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
michael@0 208
michael@0 209 hr = type->SetGUID(MF_MT_MAJOR_TYPE, majorType);
michael@0 210 NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
michael@0 211
michael@0 212 hr = type->SetGUID(MF_MT_SUBTYPE, aOutputSubType);
michael@0 213 NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
michael@0 214
michael@0 215 // Set the uncompressed format. This can fail if the decoder can't produce
michael@0 216 // that type.
michael@0 217 return aReader->SetCurrentMediaType(aStreamIndex, nullptr, type);
michael@0 218 }
michael@0 219
michael@0 220 // Returns the duration of the resource, in microseconds.
michael@0 221 HRESULT
michael@0 222 GetSourceReaderDuration(IMFSourceReader *aReader,
michael@0 223 int64_t& aOutDuration)
michael@0 224 {
michael@0 225 AutoPropVar var;
michael@0 226 HRESULT hr = aReader->GetPresentationAttribute(MF_SOURCE_READER_MEDIASOURCE,
michael@0 227 MF_PD_DURATION,
michael@0 228 &var);
michael@0 229 NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
michael@0 230
michael@0 231 // WMF stores duration in hundred nanosecond units.
michael@0 232 int64_t duration_hns = 0;
michael@0 233 hr = wmf::PropVariantToInt64(var, &duration_hns);
michael@0 234 NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
michael@0 235
michael@0 236 aOutDuration = HNsToUsecs(duration_hns);
michael@0 237
michael@0 238 return S_OK;
michael@0 239 }
michael@0 240
michael@0 241 HRESULT
michael@0 242 GetSourceReaderCanSeek(IMFSourceReader* aReader, bool& aOutCanSeek)
michael@0 243 {
michael@0 244 NS_ENSURE_TRUE(aReader, E_FAIL);
michael@0 245
michael@0 246 HRESULT hr;
michael@0 247 AutoPropVar var;
michael@0 248 hr = aReader->GetPresentationAttribute(MF_SOURCE_READER_MEDIASOURCE,
michael@0 249 MF_SOURCE_READER_MEDIASOURCE_CHARACTERISTICS,
michael@0 250 &var);
michael@0 251 NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
michael@0 252
michael@0 253 ULONG flags = 0;
michael@0 254 hr = wmf::PropVariantToUInt32(var, &flags);
michael@0 255 NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
michael@0 256
michael@0 257 aOutCanSeek = ((flags & MFMEDIASOURCE_CAN_SEEK) == MFMEDIASOURCE_CAN_SEEK);
michael@0 258
michael@0 259 return S_OK;
michael@0 260 }
michael@0 261
michael@0 262 HRESULT
michael@0 263 WMFReader::ConfigureVideoFrameGeometry(IMFMediaType* aMediaType)
michael@0 264 {
michael@0 265 NS_ENSURE_TRUE(aMediaType != nullptr, E_POINTER);
michael@0 266 HRESULT hr;
michael@0 267
michael@0 268 // Verify that the video subtype is what we expect it to be.
michael@0 269 // When using hardware acceleration/DXVA2 the video format should
michael@0 270 // be NV12, which is DXVA2's preferred format. For software decoding
michael@0 271 // we use YV12, as that's easier for us to stick into our rendering
michael@0 272 // pipeline than NV12. NV12 has interleaved UV samples, whereas YV12
michael@0 273 // is a planar format.
michael@0 274 GUID videoFormat;
michael@0 275 hr = aMediaType->GetGUID(MF_MT_SUBTYPE, &videoFormat);
michael@0 276 NS_ENSURE_TRUE(videoFormat == MFVideoFormat_NV12 || !mUseHwAccel, E_FAIL);
michael@0 277 NS_ENSURE_TRUE(videoFormat == MFVideoFormat_YV12 || mUseHwAccel, E_FAIL);
michael@0 278
michael@0 279 nsIntRect pictureRegion;
michael@0 280 hr = GetPictureRegion(aMediaType, pictureRegion);
michael@0 281 NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
michael@0 282
michael@0 283 UINT32 width = 0, height = 0;
michael@0 284 hr = MFGetAttributeSize(aMediaType, MF_MT_FRAME_SIZE, &width, &height);
michael@0 285 NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
michael@0 286
michael@0 287 uint32_t aspectNum = 0, aspectDenom = 0;
michael@0 288 hr = MFGetAttributeRatio(aMediaType,
michael@0 289 MF_MT_PIXEL_ASPECT_RATIO,
michael@0 290 &aspectNum,
michael@0 291 &aspectDenom);
michael@0 292 NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
michael@0 293
michael@0 294 // Calculate and validate the picture region and frame dimensions after
michael@0 295 // scaling by the pixel aspect ratio.
michael@0 296 nsIntSize frameSize = nsIntSize(width, height);
michael@0 297 nsIntSize displaySize = nsIntSize(pictureRegion.width, pictureRegion.height);
michael@0 298 ScaleDisplayByAspectRatio(displaySize, float(aspectNum) / float(aspectDenom));
michael@0 299 if (!IsValidVideoRegion(frameSize, pictureRegion, displaySize)) {
michael@0 300 // Video track's frame sizes will overflow. Ignore the video track.
michael@0 301 return E_FAIL;
michael@0 302 }
michael@0 303
michael@0 304 // Success! Save state.
michael@0 305 mInfo.mVideo.mDisplay = displaySize;
michael@0 306 GetDefaultStride(aMediaType, &mVideoStride);
michael@0 307 mVideoWidth = width;
michael@0 308 mVideoHeight = height;
michael@0 309 mPictureRegion = pictureRegion;
michael@0 310
michael@0 311 DECODER_LOG("WMFReader frame geometry frame=(%u,%u) stride=%u picture=(%d, %d, %d, %d) display=(%d,%d) PAR=%d:%d",
michael@0 312 width, height,
michael@0 313 mVideoStride,
michael@0 314 mPictureRegion.x, mPictureRegion.y, mPictureRegion.width, mPictureRegion.height,
michael@0 315 displaySize.width, displaySize.height,
michael@0 316 aspectNum, aspectDenom);
michael@0 317
michael@0 318 return S_OK;
michael@0 319 }
michael@0 320
michael@0 321 HRESULT
michael@0 322 WMFReader::ConfigureVideoDecoder()
michael@0 323 {
michael@0 324 NS_ASSERTION(mSourceReader, "Must have a SourceReader before configuring decoders!");
michael@0 325
michael@0 326 // Determine if we have video.
michael@0 327 if (!mSourceReader ||
michael@0 328 !SourceReaderHasStream(mSourceReader, MF_SOURCE_READER_FIRST_VIDEO_STREAM)) {
michael@0 329 // No stream, no error.
michael@0 330 return S_OK;
michael@0 331 }
michael@0 332
michael@0 333 if (!mDecoder->GetImageContainer()) {
michael@0 334 // We can't display the video, so don't bother to decode; disable the stream.
michael@0 335 return mSourceReader->SetStreamSelection(MF_SOURCE_READER_FIRST_VIDEO_STREAM, FALSE);
michael@0 336 }
michael@0 337
michael@0 338 static const GUID MP4VideoTypes[] = {
michael@0 339 MFVideoFormat_H264
michael@0 340 };
michael@0 341 HRESULT hr = ConfigureSourceReaderStream(mSourceReader,
michael@0 342 MF_SOURCE_READER_FIRST_VIDEO_STREAM,
michael@0 343 mUseHwAccel ? MFVideoFormat_NV12 : MFVideoFormat_YV12,
michael@0 344 MP4VideoTypes,
michael@0 345 ArrayLength(MP4VideoTypes));
michael@0 346 if (FAILED(hr)) {
michael@0 347 DECODER_LOG("Failed to configured video output");
michael@0 348 return hr;
michael@0 349 }
michael@0 350
michael@0 351 RefPtr<IMFMediaType> mediaType;
michael@0 352 hr = mSourceReader->GetCurrentMediaType(MF_SOURCE_READER_FIRST_VIDEO_STREAM,
michael@0 353 byRef(mediaType));
michael@0 354 if (FAILED(hr)) {
michael@0 355 NS_WARNING("Failed to get configured video media type");
michael@0 356 return hr;
michael@0 357 }
michael@0 358
michael@0 359 if (FAILED(ConfigureVideoFrameGeometry(mediaType))) {
michael@0 360 NS_WARNING("Failed configured video frame dimensions");
michael@0 361 return hr;
michael@0 362 }
michael@0 363
michael@0 364 DECODER_LOG("Successfully configured video stream");
michael@0 365
michael@0 366 mHasVideo = mInfo.mVideo.mHasVideo = true;
michael@0 367
michael@0 368 return S_OK;
michael@0 369 }
michael@0 370
michael@0 371 void
michael@0 372 WMFReader::GetSupportedAudioCodecs(const GUID** aCodecs, uint32_t* aNumCodecs)
michael@0 373 {
michael@0 374 MOZ_ASSERT(aCodecs);
michael@0 375 MOZ_ASSERT(aNumCodecs);
michael@0 376
michael@0 377 if (mIsMP3Enabled) {
michael@0 378 GUID aacOrMp3 = MFMPEG4Format_Base;
michael@0 379 aacOrMp3.Data1 = 0x6D703461;// FOURCC('m','p','4','a');
michael@0 380 static const GUID codecs[] = {
michael@0 381 MFAudioFormat_AAC,
michael@0 382 MFAudioFormat_MP3,
michael@0 383 aacOrMp3
michael@0 384 };
michael@0 385 *aCodecs = codecs;
michael@0 386 *aNumCodecs = ArrayLength(codecs);
michael@0 387 } else {
michael@0 388 static const GUID codecs[] = {
michael@0 389 MFAudioFormat_AAC
michael@0 390 };
michael@0 391 *aCodecs = codecs;
michael@0 392 *aNumCodecs = ArrayLength(codecs);
michael@0 393 }
michael@0 394 }
michael@0 395
michael@0 396 HRESULT
michael@0 397 WMFReader::ConfigureAudioDecoder()
michael@0 398 {
michael@0 399 NS_ASSERTION(mSourceReader, "Must have a SourceReader before configuring decoders!");
michael@0 400
michael@0 401 if (!mSourceReader ||
michael@0 402 !SourceReaderHasStream(mSourceReader, MF_SOURCE_READER_FIRST_AUDIO_STREAM)) {
michael@0 403 // No stream, no error.
michael@0 404 return S_OK;
michael@0 405 }
michael@0 406
michael@0 407 const GUID* codecs;
michael@0 408 uint32_t numCodecs = 0;
michael@0 409 GetSupportedAudioCodecs(&codecs, &numCodecs);
michael@0 410
michael@0 411 HRESULT hr = ConfigureSourceReaderStream(mSourceReader,
michael@0 412 MF_SOURCE_READER_FIRST_AUDIO_STREAM,
michael@0 413 MFAudioFormat_Float,
michael@0 414 codecs,
michael@0 415 numCodecs);
michael@0 416 if (FAILED(hr)) {
michael@0 417 NS_WARNING("Failed to configure WMF Audio decoder for PCM output");
michael@0 418 return hr;
michael@0 419 }
michael@0 420
michael@0 421 RefPtr<IMFMediaType> mediaType;
michael@0 422 hr = mSourceReader->GetCurrentMediaType(MF_SOURCE_READER_FIRST_AUDIO_STREAM,
michael@0 423 byRef(mediaType));
michael@0 424 if (FAILED(hr)) {
michael@0 425 NS_WARNING("Failed to get configured audio media type");
michael@0 426 return hr;
michael@0 427 }
michael@0 428
michael@0 429 mAudioRate = MFGetAttributeUINT32(mediaType, MF_MT_AUDIO_SAMPLES_PER_SECOND, 0);
michael@0 430 mAudioChannels = MFGetAttributeUINT32(mediaType, MF_MT_AUDIO_NUM_CHANNELS, 0);
michael@0 431 mAudioBytesPerSample = MFGetAttributeUINT32(mediaType, MF_MT_AUDIO_BITS_PER_SAMPLE, 16) / 8;
michael@0 432
michael@0 433 mInfo.mAudio.mChannels = mAudioChannels;
michael@0 434 mInfo.mAudio.mRate = mAudioRate;
michael@0 435 mHasAudio = mInfo.mAudio.mHasAudio = true;
michael@0 436
michael@0 437 DECODER_LOG("Successfully configured audio stream. rate=%u channels=%u bitsPerSample=%u",
michael@0 438 mAudioRate, mAudioChannels, mAudioBytesPerSample);
michael@0 439
michael@0 440 return S_OK;
michael@0 441 }
michael@0 442
michael@0 443 HRESULT
michael@0 444 WMFReader::CreateSourceReader()
michael@0 445 {
michael@0 446 HRESULT hr;
michael@0 447
michael@0 448 RefPtr<IMFAttributes> attr;
michael@0 449 hr = wmf::MFCreateAttributes(byRef(attr), 1);
michael@0 450 NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
michael@0 451
michael@0 452 hr = attr->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, mSourceReaderCallback);
michael@0 453 NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
michael@0 454
michael@0 455 if (mUseHwAccel) {
michael@0 456 hr = attr->SetUnknown(MF_SOURCE_READER_D3D_MANAGER,
michael@0 457 mDXVA2Manager->GetDXVADeviceManager());
michael@0 458 if (FAILED(hr)) {
michael@0 459 DECODER_LOG("Failed to set DXVA2 D3D Device manager on source reader attributes");
michael@0 460 mUseHwAccel = false;
michael@0 461 }
michael@0 462 }
michael@0 463
michael@0 464 hr = wmf::MFCreateSourceReaderFromByteStream(mByteStream, attr, byRef(mSourceReader));
michael@0 465 NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
michael@0 466
michael@0 467 hr = ConfigureVideoDecoder();
michael@0 468 NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
michael@0 469
michael@0 470 hr = ConfigureAudioDecoder();
michael@0 471 NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
michael@0 472
michael@0 473 if (mUseHwAccel && mInfo.mVideo.mHasVideo) {
michael@0 474 RefPtr<IMFTransform> videoDecoder;
michael@0 475 hr = mSourceReader->GetServiceForStream(MF_SOURCE_READER_FIRST_VIDEO_STREAM,
michael@0 476 GUID_NULL,
michael@0 477 IID_IMFTransform,
michael@0 478 (void**)(IMFTransform**)(byRef(videoDecoder)));
michael@0 479
michael@0 480 if (SUCCEEDED(hr)) {
michael@0 481 ULONG_PTR manager = ULONG_PTR(mDXVA2Manager->GetDXVADeviceManager());
michael@0 482 hr = videoDecoder->ProcessMessage(MFT_MESSAGE_SET_D3D_MANAGER,
michael@0 483 manager);
michael@0 484 if (hr == MF_E_TRANSFORM_TYPE_NOT_SET) {
michael@0 485 // Ignore MF_E_TRANSFORM_TYPE_NOT_SET. Vista returns this here
michael@0 486 // on some, perhaps all, video cards. This may be because activating
michael@0 487 // DXVA changes the available output types. It seems to be safe to
michael@0 488 // ignore this error.
michael@0 489 hr = S_OK;
michael@0 490 }
michael@0 491 }
michael@0 492 if (FAILED(hr)) {
michael@0 493 DECODER_LOG("Failed to set DXVA2 D3D Device manager on decoder hr=0x%x", hr);
michael@0 494 mUseHwAccel = false;
michael@0 495 }
michael@0 496 }
michael@0 497 return hr;
michael@0 498 }
michael@0 499
michael@0 500 nsresult
michael@0 501 WMFReader::ReadMetadata(MediaInfo* aInfo,
michael@0 502 MetadataTags** aTags)
michael@0 503 {
michael@0 504 NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
michael@0 505
michael@0 506 DECODER_LOG("WMFReader::ReadMetadata()");
michael@0 507 HRESULT hr;
michael@0 508
michael@0 509 const bool triedToInitDXVA = mUseHwAccel;
michael@0 510 if (FAILED(CreateSourceReader())) {
michael@0 511 mSourceReader = nullptr;
michael@0 512 if (triedToInitDXVA && !mUseHwAccel) {
michael@0 513 // We tried to initialize DXVA and failed. Try again to create the
michael@0 514 // IMFSourceReader but this time we won't use DXVA. Note that we
michael@0 515 // must recreate the IMFSourceReader from scratch, as on some systems
michael@0 516 // (AMD Radeon 3000) we cannot successfully reconfigure an existing
michael@0 517 // reader to not use DXVA after we've failed to configure DXVA.
michael@0 518 // See bug 987127.
michael@0 519 if (FAILED(CreateSourceReader())) {
michael@0 520 mSourceReader = nullptr;
michael@0 521 }
michael@0 522 }
michael@0 523 }
michael@0 524
michael@0 525 if (!mSourceReader) {
michael@0 526 NS_WARNING("Failed to create IMFSourceReader");
michael@0 527 return NS_ERROR_FAILURE;
michael@0 528 }
michael@0 529
michael@0 530 if (mInfo.HasVideo()) {
michael@0 531 DECODER_LOG("Using DXVA: %s", (mUseHwAccel ? "Yes" : "No"));
michael@0 532 }
michael@0 533
michael@0 534 // Abort if both video and audio failed to initialize.
michael@0 535 NS_ENSURE_TRUE(mInfo.HasValidMedia(), NS_ERROR_FAILURE);
michael@0 536
michael@0 537 // Get the duration, and report it to the decoder if we have it.
michael@0 538 int64_t duration = 0;
michael@0 539 hr = GetSourceReaderDuration(mSourceReader, duration);
michael@0 540 if (SUCCEEDED(hr)) {
michael@0 541 ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
michael@0 542 mDecoder->SetMediaEndTime(duration);
michael@0 543 }
michael@0 544 // We can seek if we get a duration *and* the reader reports that it's
michael@0 545 // seekable.
michael@0 546 bool canSeek = false;
michael@0 547 if (FAILED(hr) ||
michael@0 548 FAILED(GetSourceReaderCanSeek(mSourceReader, canSeek)) ||
michael@0 549 !canSeek) {
michael@0 550 mDecoder->SetMediaSeekable(false);
michael@0 551 }
michael@0 552
michael@0 553 *aInfo = mInfo;
michael@0 554 *aTags = nullptr;
michael@0 555 // aTags can be retrieved using techniques like used here:
michael@0 556 // http://blogs.msdn.com/b/mf/archive/2010/01/12/mfmediapropdump.aspx
michael@0 557
michael@0 558 return NS_OK;
michael@0 559 }
michael@0 560
michael@0 561 bool
michael@0 562 WMFReader::DecodeAudioData()
michael@0 563 {
michael@0 564 NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
michael@0 565
michael@0 566 HRESULT hr;
michael@0 567 hr = mSourceReader->ReadSample(MF_SOURCE_READER_FIRST_AUDIO_STREAM,
michael@0 568 0, // control flags
michael@0 569 0, // read stream index
michael@0 570 nullptr,
michael@0 571 nullptr,
michael@0 572 nullptr);
michael@0 573
michael@0 574 if (FAILED(hr)) {
michael@0 575 DECODER_LOG("WMFReader::DecodeAudioData() ReadSample failed with hr=0x%x", hr);
michael@0 576 // End the stream.
michael@0 577 return false;
michael@0 578 }
michael@0 579
michael@0 580 DWORD flags = 0;
michael@0 581 LONGLONG timestampHns = 0;
michael@0 582 RefPtr<IMFSample> sample;
michael@0 583 hr = mSourceReaderCallback->Wait(&flags, &timestampHns, byRef(sample));
michael@0 584 if (FAILED(hr) ||
michael@0 585 (flags & MF_SOURCE_READERF_ERROR) ||
michael@0 586 (flags & MF_SOURCE_READERF_ENDOFSTREAM) ||
michael@0 587 (flags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED)) {
michael@0 588 DECODER_LOG("WMFReader::DecodeAudioData() ReadSample failed with hr=0x%x flags=0x%x",
michael@0 589 hr, flags);
michael@0 590 // End the stream.
michael@0 591 return false;
michael@0 592 }
michael@0 593
michael@0 594 if (!sample) {
michael@0 595 // Not enough data? Try again...
michael@0 596 return true;
michael@0 597 }
michael@0 598
michael@0 599 RefPtr<IMFMediaBuffer> buffer;
michael@0 600 hr = sample->ConvertToContiguousBuffer(byRef(buffer));
michael@0 601 NS_ENSURE_TRUE(SUCCEEDED(hr), false);
michael@0 602
michael@0 603 BYTE* data = nullptr; // Note: *data will be owned by the IMFMediaBuffer, we don't need to free it.
michael@0 604 DWORD maxLength = 0, currentLength = 0;
michael@0 605 hr = buffer->Lock(&data, &maxLength, &currentLength);
michael@0 606 NS_ENSURE_TRUE(SUCCEEDED(hr), false);
michael@0 607
michael@0 608 uint32_t numFrames = currentLength / mAudioBytesPerSample / mAudioChannels;
michael@0 609 NS_ASSERTION(sizeof(AudioDataValue) == mAudioBytesPerSample, "Size calculation is wrong");
michael@0 610 nsAutoArrayPtr<AudioDataValue> pcmSamples(new AudioDataValue[numFrames * mAudioChannels]);
michael@0 611 memcpy(pcmSamples.get(), data, currentLength);
michael@0 612 buffer->Unlock();
michael@0 613
michael@0 614 // We calculate the timestamp and the duration based on the number of audio
michael@0 615 // frames we've already played. We don't trust the timestamp stored on the
michael@0 616 // IMFSample, as sometimes it's wrong, possibly due to buggy encoders?
michael@0 617
michael@0 618 // If this sample block comes after a discontinuity (i.e. a gap or seek)
michael@0 619 // reset the frame counters, and capture the timestamp. Future timestamps
michael@0 620 // will be offset from this block's timestamp.
michael@0 621 UINT32 discontinuity = false;
michael@0 622 sample->GetUINT32(MFSampleExtension_Discontinuity, &discontinuity);
michael@0 623 if (mMustRecaptureAudioPosition || discontinuity) {
michael@0 624 mAudioFrameSum = 0;
michael@0 625 hr = HNsToFrames(timestampHns, mAudioRate, &mAudioFrameOffset);
michael@0 626 NS_ENSURE_TRUE(SUCCEEDED(hr), false);
michael@0 627 mMustRecaptureAudioPosition = false;
michael@0 628 }
michael@0 629
michael@0 630 int64_t timestamp;
michael@0 631 hr = FramesToUsecs(mAudioFrameOffset + mAudioFrameSum, mAudioRate, &timestamp);
michael@0 632 NS_ENSURE_TRUE(SUCCEEDED(hr), false);
michael@0 633
michael@0 634 mAudioFrameSum += numFrames;
michael@0 635
michael@0 636 int64_t duration;
michael@0 637 hr = FramesToUsecs(numFrames, mAudioRate, &duration);
michael@0 638 NS_ENSURE_TRUE(SUCCEEDED(hr), false);
michael@0 639
michael@0 640 mAudioQueue.Push(new AudioData(mDecoder->GetResource()->Tell(),
michael@0 641 timestamp,
michael@0 642 duration,
michael@0 643 numFrames,
michael@0 644 pcmSamples.forget(),
michael@0 645 mAudioChannels));
michael@0 646
michael@0 647 #ifdef LOG_SAMPLE_DECODE
michael@0 648 DECODER_LOG("Decoded audio sample! timestamp=%lld duration=%lld currentLength=%u",
michael@0 649 timestamp, duration, currentLength);
michael@0 650 #endif
michael@0 651
michael@0 652 return true;
michael@0 653 }
michael@0 654
michael@0 655 HRESULT
michael@0 656 WMFReader::CreateBasicVideoFrame(IMFSample* aSample,
michael@0 657 int64_t aTimestampUsecs,
michael@0 658 int64_t aDurationUsecs,
michael@0 659 int64_t aOffsetBytes,
michael@0 660 VideoData** aOutVideoData)
michael@0 661 {
michael@0 662 NS_ENSURE_TRUE(aSample, E_POINTER);
michael@0 663 NS_ENSURE_TRUE(aOutVideoData, E_POINTER);
michael@0 664
michael@0 665 *aOutVideoData = nullptr;
michael@0 666
michael@0 667 HRESULT hr;
michael@0 668 RefPtr<IMFMediaBuffer> buffer;
michael@0 669
michael@0 670 // Must convert to contiguous buffer to use IMD2DBuffer interface.
michael@0 671 hr = aSample->ConvertToContiguousBuffer(byRef(buffer));
michael@0 672 NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
michael@0 673
michael@0 674 // Try and use the IMF2DBuffer interface if available, otherwise fallback
michael@0 675 // to the IMFMediaBuffer interface. Apparently IMF2DBuffer is more efficient,
michael@0 676 // but only some systems (Windows 8?) support it.
michael@0 677 BYTE* data = nullptr;
michael@0 678 LONG stride = 0;
michael@0 679 RefPtr<IMF2DBuffer> twoDBuffer;
michael@0 680 hr = buffer->QueryInterface(static_cast<IMF2DBuffer**>(byRef(twoDBuffer)));
michael@0 681 if (SUCCEEDED(hr)) {
michael@0 682 hr = twoDBuffer->Lock2D(&data, &stride);
michael@0 683 NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
michael@0 684 } else {
michael@0 685 hr = buffer->Lock(&data, nullptr, nullptr);
michael@0 686 NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
michael@0 687 stride = mVideoStride;
michael@0 688 }
michael@0 689
michael@0 690 // YV12, planar format: [YYYY....][VVVV....][UUUU....]
michael@0 691 // i.e., Y, then V, then U.
michael@0 692 VideoData::YCbCrBuffer b;
michael@0 693
michael@0 694 // Y (Y') plane
michael@0 695 b.mPlanes[0].mData = data;
michael@0 696 b.mPlanes[0].mStride = stride;
michael@0 697 b.mPlanes[0].mHeight = mVideoHeight;
michael@0 698 b.mPlanes[0].mWidth = mVideoWidth;
michael@0 699 b.mPlanes[0].mOffset = 0;
michael@0 700 b.mPlanes[0].mSkip = 0;
michael@0 701
michael@0 702 // The V and U planes are stored 16-row-aligned, so we need to add padding
michael@0 703 // to the row heights to ensure the Y'CbCr planes are referenced properly.
michael@0 704 uint32_t padding = 0;
michael@0 705 if (mVideoHeight % 16 != 0) {
michael@0 706 padding = 16 - (mVideoHeight % 16);
michael@0 707 }
michael@0 708 uint32_t y_size = stride * (mVideoHeight + padding);
michael@0 709 uint32_t v_size = stride * (mVideoHeight + padding) / 4;
michael@0 710 uint32_t halfStride = (stride + 1) / 2;
michael@0 711 uint32_t halfHeight = (mVideoHeight + 1) / 2;
michael@0 712 uint32_t halfWidth = (mVideoWidth + 1) / 2;
michael@0 713
michael@0 714 // U plane (Cb)
michael@0 715 b.mPlanes[1].mData = data + y_size + v_size;
michael@0 716 b.mPlanes[1].mStride = halfStride;
michael@0 717 b.mPlanes[1].mHeight = halfHeight;
michael@0 718 b.mPlanes[1].mWidth = halfWidth;
michael@0 719 b.mPlanes[1].mOffset = 0;
michael@0 720 b.mPlanes[1].mSkip = 0;
michael@0 721
michael@0 722 // V plane (Cr)
michael@0 723 b.mPlanes[2].mData = data + y_size;
michael@0 724 b.mPlanes[2].mStride = halfStride;
michael@0 725 b.mPlanes[2].mHeight = halfHeight;
michael@0 726 b.mPlanes[2].mWidth = halfWidth;
michael@0 727 b.mPlanes[2].mOffset = 0;
michael@0 728 b.mPlanes[2].mSkip = 0;
michael@0 729
michael@0 730 VideoData *v = VideoData::Create(mInfo.mVideo,
michael@0 731 mDecoder->GetImageContainer(),
michael@0 732 aOffsetBytes,
michael@0 733 aTimestampUsecs,
michael@0 734 aDurationUsecs,
michael@0 735 b,
michael@0 736 false,
michael@0 737 -1,
michael@0 738 ToIntRect(mPictureRegion));
michael@0 739 if (twoDBuffer) {
michael@0 740 twoDBuffer->Unlock2D();
michael@0 741 } else {
michael@0 742 buffer->Unlock();
michael@0 743 }
michael@0 744
michael@0 745 *aOutVideoData = v;
michael@0 746
michael@0 747 return S_OK;
michael@0 748 }
michael@0 749
michael@0 750 HRESULT
michael@0 751 WMFReader::CreateD3DVideoFrame(IMFSample* aSample,
michael@0 752 int64_t aTimestampUsecs,
michael@0 753 int64_t aDurationUsecs,
michael@0 754 int64_t aOffsetBytes,
michael@0 755 VideoData** aOutVideoData)
michael@0 756 {
michael@0 757 NS_ENSURE_TRUE(aSample, E_POINTER);
michael@0 758 NS_ENSURE_TRUE(aOutVideoData, E_POINTER);
michael@0 759 NS_ENSURE_TRUE(mDXVA2Manager, E_ABORT);
michael@0 760 NS_ENSURE_TRUE(mUseHwAccel, E_ABORT);
michael@0 761
michael@0 762 *aOutVideoData = nullptr;
michael@0 763 HRESULT hr;
michael@0 764
michael@0 765 nsRefPtr<Image> image;
michael@0 766 hr = mDXVA2Manager->CopyToImage(aSample,
michael@0 767 mPictureRegion,
michael@0 768 mDecoder->GetImageContainer(),
michael@0 769 getter_AddRefs(image));
michael@0 770 NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
michael@0 771 NS_ENSURE_TRUE(image, E_FAIL);
michael@0 772
michael@0 773 VideoData *v = VideoData::CreateFromImage(mInfo.mVideo,
michael@0 774 mDecoder->GetImageContainer(),
michael@0 775 aOffsetBytes,
michael@0 776 aTimestampUsecs,
michael@0 777 aDurationUsecs,
michael@0 778 image.forget(),
michael@0 779 false,
michael@0 780 -1,
michael@0 781 ToIntRect(mPictureRegion));
michael@0 782
michael@0 783 NS_ENSURE_TRUE(v, E_FAIL);
michael@0 784 *aOutVideoData = v;
michael@0 785
michael@0 786 return S_OK;
michael@0 787 }
michael@0 788
michael@0 789 bool
michael@0 790 WMFReader::DecodeVideoFrame(bool &aKeyframeSkip,
michael@0 791 int64_t aTimeThreshold)
michael@0 792 {
michael@0 793 NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
michael@0 794
michael@0 795 // Record number of frames decoded and parsed. Automatically update the
michael@0 796 // stats counters using the AutoNotifyDecoded stack-based class.
michael@0 797 uint32_t parsed = 0, decoded = 0;
michael@0 798 AbstractMediaDecoder::AutoNotifyDecoded autoNotify(mDecoder, parsed, decoded);
michael@0 799
michael@0 800 HRESULT hr;
michael@0 801
michael@0 802 hr = mSourceReader->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM,
michael@0 803 0, // control flags
michael@0 804 0, // read stream index
michael@0 805 nullptr,
michael@0 806 nullptr,
michael@0 807 nullptr);
michael@0 808 if (FAILED(hr)) {
michael@0 809 DECODER_LOG("WMFReader::DecodeVideoData() ReadSample failed with hr=0x%x", hr);
michael@0 810 return false;
michael@0 811 }
michael@0 812
michael@0 813 DWORD flags = 0;
michael@0 814 LONGLONG timestampHns = 0;
michael@0 815 RefPtr<IMFSample> sample;
michael@0 816 hr = mSourceReaderCallback->Wait(&flags, &timestampHns, byRef(sample));
michael@0 817
michael@0 818 if (flags & MF_SOURCE_READERF_ERROR) {
michael@0 819 NS_WARNING("WMFReader: Catastrophic failure reading video sample");
michael@0 820 // Future ReadSample() calls will fail, so give up and report end of stream.
michael@0 821 return false;
michael@0 822 }
michael@0 823
michael@0 824 if (FAILED(hr)) {
michael@0 825 // Unknown failure, ask caller to try again?
michael@0 826 return true;
michael@0 827 }
michael@0 828
michael@0 829 if (!sample) {
michael@0 830 if ((flags & MF_SOURCE_READERF_ENDOFSTREAM)) {
michael@0 831 DECODER_LOG("WMFReader; Null sample after video decode, at end of stream");
michael@0 832 return false;
michael@0 833 }
michael@0 834 DECODER_LOG("WMFReader; Null sample after video decode. Maybe insufficient data...");
michael@0 835 return true;
michael@0 836 }
michael@0 837
michael@0 838 if ((flags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED)) {
michael@0 839 DECODER_LOG("WMFReader: Video media type changed!");
michael@0 840 RefPtr<IMFMediaType> mediaType;
michael@0 841 hr = mSourceReader->GetCurrentMediaType(MF_SOURCE_READER_FIRST_VIDEO_STREAM,
michael@0 842 byRef(mediaType));
michael@0 843 if (FAILED(hr) ||
michael@0 844 FAILED(ConfigureVideoFrameGeometry(mediaType))) {
michael@0 845 NS_WARNING("Failed to reconfigure video media type");
michael@0 846 return false;
michael@0 847 }
michael@0 848 }
michael@0 849
michael@0 850 int64_t timestamp = HNsToUsecs(timestampHns);
michael@0 851 if (timestamp < aTimeThreshold) {
michael@0 852 return true;
michael@0 853 }
michael@0 854 int64_t offset = mDecoder->GetResource()->Tell();
michael@0 855 int64_t duration = GetSampleDuration(sample);
michael@0 856
michael@0 857 VideoData* v = nullptr;
michael@0 858 if (mUseHwAccel) {
michael@0 859 hr = CreateD3DVideoFrame(sample, timestamp, duration, offset, &v);
michael@0 860 } else {
michael@0 861 hr = CreateBasicVideoFrame(sample, timestamp, duration, offset, &v);
michael@0 862 }
michael@0 863 NS_ENSURE_TRUE(SUCCEEDED(hr) && v, false);
michael@0 864
michael@0 865 parsed++;
michael@0 866 decoded++;
michael@0 867 mVideoQueue.Push(v);
michael@0 868
michael@0 869 #ifdef LOG_SAMPLE_DECODE
michael@0 870 DECODER_LOG("Decoded video sample timestamp=%lld duration=%lld stride=%d height=%u flags=%u",
michael@0 871 timestamp, duration, mVideoStride, mVideoHeight, flags);
michael@0 872 #endif
michael@0 873
michael@0 874 if ((flags & MF_SOURCE_READERF_ENDOFSTREAM)) {
michael@0 875 // End of stream.
michael@0 876 DECODER_LOG("End of video stream");
michael@0 877 return false;
michael@0 878 }
michael@0 879
michael@0 880 return true;
michael@0 881 }
michael@0 882
michael@0 883 nsresult
michael@0 884 WMFReader::Seek(int64_t aTargetUs,
michael@0 885 int64_t aStartTime,
michael@0 886 int64_t aEndTime,
michael@0 887 int64_t aCurrentTime)
michael@0 888 {
michael@0 889 DECODER_LOG("WMFReader::Seek() %lld", aTargetUs);
michael@0 890
michael@0 891 NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
michael@0 892 #ifdef DEBUG
michael@0 893 bool canSeek = false;
michael@0 894 GetSourceReaderCanSeek(mSourceReader, canSeek);
michael@0 895 NS_ASSERTION(canSeek, "WMFReader::Seek() should only be called if we can seek!");
michael@0 896 #endif
michael@0 897
michael@0 898 nsresult rv = ResetDecode();
michael@0 899 NS_ENSURE_SUCCESS(rv, rv);
michael@0 900
michael@0 901 // Mark that we must recapture the audio frame count from the next sample.
michael@0 902 // WMF doesn't set a discontinuity marker when we seek to time 0, so we
michael@0 903 // must remember to recapture the audio frame offset and reset the frame
michael@0 904 // sum on the next audio packet we decode.
michael@0 905 mMustRecaptureAudioPosition = true;
michael@0 906
michael@0 907 AutoPropVar var;
michael@0 908 HRESULT hr = InitPropVariantFromInt64(UsecsToHNs(aTargetUs), &var);
michael@0 909 NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
michael@0 910
michael@0 911 hr = mSourceReader->SetCurrentPosition(GUID_NULL, var);
michael@0 912 NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
michael@0 913
michael@0 914 return NS_OK;
michael@0 915 }
michael@0 916
michael@0 917 } // namespace mozilla

mercurial