michael@0: /* This Source Code Form is subject to the terms of the Mozilla Public michael@0: * License, v. 2.0. If a copy of the MPL was not distributed with this file, michael@0: * You can obtain one at http://mozilla.org/MPL/2.0/. */ michael@0: michael@0: #include "MediaEngineDefault.h" michael@0: michael@0: #include "nsCOMPtr.h" michael@0: #include "nsDOMFile.h" michael@0: #include "nsILocalFile.h" michael@0: #include "Layers.h" michael@0: #include "ImageContainer.h" michael@0: #include "ImageTypes.h" michael@0: #include "prmem.h" michael@0: #include "nsContentUtils.h" michael@0: michael@0: #include "nsIFilePicker.h" michael@0: #include "nsIPrefService.h" michael@0: #include "nsIPrefBranch.h" michael@0: michael@0: #ifdef MOZ_WIDGET_ANDROID michael@0: #include "AndroidBridge.h" michael@0: #include "nsISupportsUtils.h" michael@0: #endif michael@0: michael@0: #if defined(MOZ_WEBRTC) && defined(MOZ_WEBRTC_SIGNALING) michael@0: #include "YuvStamper.h" michael@0: #endif michael@0: michael@0: #define VIDEO_RATE USECS_PER_S michael@0: #define AUDIO_RATE 16000 michael@0: #define AUDIO_FRAME_LENGTH ((AUDIO_RATE * MediaEngine::DEFAULT_AUDIO_TIMER_MS) / 1000) michael@0: namespace mozilla { michael@0: michael@0: using namespace mozilla::gfx; michael@0: michael@0: NS_IMPL_ISUPPORTS(MediaEngineDefaultVideoSource, nsITimerCallback) michael@0: /** michael@0: * Default video source. michael@0: */ michael@0: michael@0: MediaEngineDefaultVideoSource::MediaEngineDefaultVideoSource() michael@0: : mTimer(nullptr), mMonitor("Fake video") michael@0: { michael@0: mImageContainer = layers::LayerManager::CreateImageContainer(); michael@0: mState = kReleased; michael@0: } michael@0: michael@0: MediaEngineDefaultVideoSource::~MediaEngineDefaultVideoSource() michael@0: {} michael@0: michael@0: void michael@0: MediaEngineDefaultVideoSource::GetName(nsAString& aName) michael@0: { michael@0: aName.Assign(NS_LITERAL_STRING("Default Video Device")); michael@0: return; michael@0: } michael@0: michael@0: void michael@0: MediaEngineDefaultVideoSource::GetUUID(nsAString& aUUID) michael@0: { michael@0: aUUID.Assign(NS_LITERAL_STRING("1041FCBD-3F12-4F7B-9E9B-1EC556DD5676")); michael@0: return; michael@0: } michael@0: michael@0: nsresult michael@0: MediaEngineDefaultVideoSource::Allocate(const VideoTrackConstraintsN &aConstraints, michael@0: const MediaEnginePrefs &aPrefs) michael@0: { michael@0: if (mState != kReleased) { michael@0: return NS_ERROR_FAILURE; michael@0: } michael@0: michael@0: mOpts = aPrefs; michael@0: mOpts.mWidth = mOpts.mWidth ? mOpts.mWidth : MediaEngine::DEFAULT_43_VIDEO_WIDTH; michael@0: mOpts.mHeight = mOpts.mHeight ? mOpts.mHeight : MediaEngine::DEFAULT_43_VIDEO_HEIGHT; michael@0: mState = kAllocated; michael@0: return NS_OK; michael@0: } michael@0: michael@0: nsresult michael@0: MediaEngineDefaultVideoSource::Deallocate() michael@0: { michael@0: if (mState != kStopped && mState != kAllocated) { michael@0: return NS_ERROR_FAILURE; michael@0: } michael@0: mState = kReleased; michael@0: return NS_OK; michael@0: } michael@0: michael@0: static void AllocateSolidColorFrame(layers::PlanarYCbCrData& aData, michael@0: int aWidth, int aHeight, michael@0: int aY, int aCb, int aCr) michael@0: { michael@0: MOZ_ASSERT(!(aWidth&1)); michael@0: MOZ_ASSERT(!(aHeight&1)); michael@0: // Allocate a single frame with a solid color michael@0: int yLen = aWidth*aHeight; michael@0: int cbLen = yLen>>2; michael@0: int crLen = cbLen; michael@0: uint8_t* frame = (uint8_t*) PR_Malloc(yLen+cbLen+crLen); michael@0: memset(frame, aY, yLen); michael@0: memset(frame+yLen, aCb, cbLen); michael@0: memset(frame+yLen+cbLen, aCr, crLen); michael@0: michael@0: aData.mYChannel = frame; michael@0: aData.mYSize = IntSize(aWidth, aHeight); michael@0: aData.mYStride = aWidth; michael@0: aData.mCbCrStride = aWidth>>1; michael@0: aData.mCbChannel = frame + yLen; michael@0: aData.mCrChannel = aData.mCbChannel + cbLen; michael@0: aData.mCbCrSize = IntSize(aWidth>>1, aHeight>>1); michael@0: aData.mPicX = 0; michael@0: aData.mPicY = 0; michael@0: aData.mPicSize = IntSize(aWidth, aHeight); michael@0: aData.mStereoMode = StereoMode::MONO; michael@0: } michael@0: michael@0: static void ReleaseFrame(layers::PlanarYCbCrData& aData) michael@0: { michael@0: PR_Free(aData.mYChannel); michael@0: } michael@0: michael@0: nsresult michael@0: MediaEngineDefaultVideoSource::Start(SourceMediaStream* aStream, TrackID aID) michael@0: { michael@0: if (mState != kAllocated) { michael@0: return NS_ERROR_FAILURE; michael@0: } michael@0: michael@0: mTimer = do_CreateInstance(NS_TIMER_CONTRACTID); michael@0: if (!mTimer) { michael@0: return NS_ERROR_FAILURE; michael@0: } michael@0: michael@0: aStream->AddTrack(aID, VIDEO_RATE, 0, new VideoSegment()); michael@0: aStream->AdvanceKnownTracksTime(STREAM_TIME_MAX); michael@0: michael@0: // Remember TrackID so we can end it later michael@0: mTrackID = aID; michael@0: michael@0: // Start timer for subsequent frames michael@0: #if defined(MOZ_WIDGET_GONK) && defined(DEBUG) michael@0: // B2G emulator debug is very, very slow and has problems dealing with realtime audio inputs michael@0: mTimer->InitWithCallback(this, (1000 / mOpts.mFPS)*10, nsITimer::TYPE_REPEATING_SLACK); michael@0: #else michael@0: mTimer->InitWithCallback(this, 1000 / mOpts.mFPS, nsITimer::TYPE_REPEATING_SLACK); michael@0: #endif michael@0: mState = kStarted; michael@0: michael@0: return NS_OK; michael@0: } michael@0: michael@0: nsresult michael@0: MediaEngineDefaultVideoSource::Stop(SourceMediaStream *aSource, TrackID aID) michael@0: { michael@0: if (mState != kStarted) { michael@0: return NS_ERROR_FAILURE; michael@0: } michael@0: if (!mTimer) { michael@0: return NS_ERROR_FAILURE; michael@0: } michael@0: michael@0: mTimer->Cancel(); michael@0: mTimer = nullptr; michael@0: michael@0: aSource->EndTrack(aID); michael@0: aSource->Finish(); michael@0: michael@0: mState = kStopped; michael@0: return NS_OK; michael@0: } michael@0: michael@0: nsresult michael@0: MediaEngineDefaultVideoSource::Snapshot(uint32_t aDuration, nsIDOMFile** aFile) michael@0: { michael@0: *aFile = nullptr; michael@0: michael@0: #ifndef MOZ_WIDGET_ANDROID michael@0: return NS_ERROR_NOT_IMPLEMENTED; michael@0: #else michael@0: nsAutoString filePath; michael@0: nsCOMPtr filePicker = do_CreateInstance("@mozilla.org/filepicker;1"); michael@0: if (!filePicker) michael@0: return NS_ERROR_FAILURE; michael@0: michael@0: nsXPIDLString title; michael@0: nsContentUtils::GetLocalizedString(nsContentUtils::eFORMS_PROPERTIES, "Browse", title); michael@0: int16_t mode = static_cast(nsIFilePicker::modeOpen); michael@0: michael@0: nsresult rv = filePicker->Init(nullptr, title, mode); michael@0: NS_ENSURE_SUCCESS(rv, rv); michael@0: filePicker->AppendFilters(nsIFilePicker::filterImages); michael@0: michael@0: // XXX - This API should be made async michael@0: PRInt16 dialogReturn; michael@0: rv = filePicker->Show(&dialogReturn); michael@0: NS_ENSURE_SUCCESS(rv, rv); michael@0: if (dialogReturn == nsIFilePicker::returnCancel) { michael@0: *aFile = nullptr; michael@0: return NS_OK; michael@0: } michael@0: michael@0: nsCOMPtr localFile; michael@0: filePicker->GetFile(getter_AddRefs(localFile)); michael@0: michael@0: if (!localFile) { michael@0: *aFile = nullptr; michael@0: return NS_OK; michael@0: } michael@0: michael@0: nsCOMPtr domFile = new nsDOMFileFile(localFile); michael@0: domFile.forget(aFile); michael@0: return NS_OK; michael@0: #endif michael@0: } michael@0: michael@0: NS_IMETHODIMP michael@0: MediaEngineDefaultVideoSource::Notify(nsITimer* aTimer) michael@0: { michael@0: // Update the target color michael@0: if (mCr <= 16) { michael@0: if (mCb < 240) { michael@0: mCb++; michael@0: } else { michael@0: mCr++; michael@0: } michael@0: } else if (mCb >= 240) { michael@0: if (mCr < 240) { michael@0: mCr++; michael@0: } else { michael@0: mCb--; michael@0: } michael@0: } else if (mCr >= 240) { michael@0: if (mCb > 16) { michael@0: mCb--; michael@0: } else { michael@0: mCr--; michael@0: } michael@0: } else { michael@0: mCr--; michael@0: } michael@0: michael@0: // Allocate a single solid color image michael@0: nsRefPtr image = mImageContainer->CreateImage(ImageFormat::PLANAR_YCBCR); michael@0: nsRefPtr ycbcr_image = michael@0: static_cast(image.get()); michael@0: layers::PlanarYCbCrData data; michael@0: AllocateSolidColorFrame(data, mOpts.mWidth, mOpts.mHeight, 0x80, mCb, mCr); michael@0: michael@0: #if defined(MOZ_WEBRTC) && defined(MOZ_WEBRTC_SIGNALING) michael@0: uint64_t timestamp = PR_Now(); michael@0: YuvStamper::Encode(mOpts.mWidth, mOpts.mHeight, mOpts.mWidth, michael@0: data.mYChannel, michael@0: reinterpret_cast(×tamp), sizeof(timestamp), michael@0: 0, 0); michael@0: #endif michael@0: michael@0: ycbcr_image->SetData(data); michael@0: // SetData copies data, so we can free the frame michael@0: ReleaseFrame(data); michael@0: michael@0: MonitorAutoLock lock(mMonitor); michael@0: michael@0: // implicitly releases last image michael@0: mImage = ycbcr_image.forget(); michael@0: michael@0: return NS_OK; michael@0: } michael@0: michael@0: void michael@0: MediaEngineDefaultVideoSource::NotifyPull(MediaStreamGraph* aGraph, michael@0: SourceMediaStream *aSource, michael@0: TrackID aID, michael@0: StreamTime aDesiredTime, michael@0: TrackTicks &aLastEndTime) michael@0: { michael@0: // AddTrack takes ownership of segment michael@0: VideoSegment segment; michael@0: MonitorAutoLock lock(mMonitor); michael@0: if (mState != kStarted) { michael@0: return; michael@0: } michael@0: michael@0: // Note: we're not giving up mImage here michael@0: nsRefPtr image = mImage; michael@0: TrackTicks target = TimeToTicksRoundUp(USECS_PER_S, aDesiredTime); michael@0: TrackTicks delta = target - aLastEndTime; michael@0: michael@0: if (delta > 0) { michael@0: // nullptr images are allowed michael@0: IntSize size(image ? mOpts.mWidth : 0, image ? mOpts.mHeight : 0); michael@0: segment.AppendFrame(image.forget(), delta, size); michael@0: // This can fail if either a) we haven't added the track yet, or b) michael@0: // we've removed or finished the track. michael@0: if (aSource->AppendToTrack(aID, &segment)) { michael@0: aLastEndTime = target; michael@0: } michael@0: } michael@0: } michael@0: michael@0: // generate 1k sine wave per second michael@0: class SineWaveGenerator michael@0: { michael@0: public: michael@0: static const int bytesPerSample = 2; michael@0: static const int millisecondsPerSecond = 1000; michael@0: static const int frequency = 1000; michael@0: michael@0: SineWaveGenerator(int aSampleRate) : michael@0: mTotalLength(aSampleRate / frequency), michael@0: mReadLength(0) { michael@0: MOZ_ASSERT(mTotalLength * frequency == aSampleRate); michael@0: mAudioBuffer = new int16_t[mTotalLength]; michael@0: for(int i = 0; i < mTotalLength; i++) { michael@0: // Set volume to -20db. It's from 32768.0 * 10^(-20/20) = 3276.8 michael@0: mAudioBuffer[i] = (3276.8f * sin(2 * M_PI * i / mTotalLength)); michael@0: } michael@0: } michael@0: michael@0: // NOTE: only safely called from a single thread (MSG callback) michael@0: void generate(int16_t* aBuffer, int16_t aLengthInSamples) { michael@0: int16_t remaining = aLengthInSamples; michael@0: michael@0: while (remaining) { michael@0: int16_t processSamples = 0; michael@0: michael@0: if (mTotalLength - mReadLength >= remaining) { michael@0: processSamples = remaining; michael@0: } else { michael@0: processSamples = mTotalLength - mReadLength; michael@0: } michael@0: memcpy(aBuffer, mAudioBuffer + mReadLength, processSamples * bytesPerSample); michael@0: aBuffer += processSamples; michael@0: mReadLength += processSamples; michael@0: remaining -= processSamples; michael@0: if (mReadLength == mTotalLength) { michael@0: mReadLength = 0; michael@0: } michael@0: } michael@0: } michael@0: michael@0: private: michael@0: nsAutoArrayPtr mAudioBuffer; michael@0: int16_t mTotalLength; michael@0: int16_t mReadLength; michael@0: }; michael@0: michael@0: /** michael@0: * Default audio source. michael@0: */ michael@0: NS_IMPL_ISUPPORTS(MediaEngineDefaultAudioSource, nsITimerCallback) michael@0: michael@0: MediaEngineDefaultAudioSource::MediaEngineDefaultAudioSource() michael@0: : mTimer(nullptr) michael@0: { michael@0: mState = kReleased; michael@0: } michael@0: michael@0: MediaEngineDefaultAudioSource::~MediaEngineDefaultAudioSource() michael@0: {} michael@0: michael@0: void michael@0: MediaEngineDefaultAudioSource::GetName(nsAString& aName) michael@0: { michael@0: aName.Assign(NS_LITERAL_STRING("Default Audio Device")); michael@0: return; michael@0: } michael@0: michael@0: void michael@0: MediaEngineDefaultAudioSource::GetUUID(nsAString& aUUID) michael@0: { michael@0: aUUID.Assign(NS_LITERAL_STRING("B7CBD7C1-53EF-42F9-8353-73F61C70C092")); michael@0: return; michael@0: } michael@0: michael@0: nsresult michael@0: MediaEngineDefaultAudioSource::Allocate(const AudioTrackConstraintsN &aConstraints, michael@0: const MediaEnginePrefs &aPrefs) michael@0: { michael@0: if (mState != kReleased) { michael@0: return NS_ERROR_FAILURE; michael@0: } michael@0: michael@0: mState = kAllocated; michael@0: // generate 1Khz sine wave michael@0: mSineGenerator = new SineWaveGenerator(AUDIO_RATE); michael@0: return NS_OK; michael@0: } michael@0: michael@0: nsresult michael@0: MediaEngineDefaultAudioSource::Deallocate() michael@0: { michael@0: if (mState != kStopped && mState != kAllocated) { michael@0: return NS_ERROR_FAILURE; michael@0: } michael@0: mState = kReleased; michael@0: return NS_OK; michael@0: } michael@0: michael@0: nsresult michael@0: MediaEngineDefaultAudioSource::Start(SourceMediaStream* aStream, TrackID aID) michael@0: { michael@0: if (mState != kAllocated) { michael@0: return NS_ERROR_FAILURE; michael@0: } michael@0: michael@0: mTimer = do_CreateInstance(NS_TIMER_CONTRACTID); michael@0: if (!mTimer) { michael@0: return NS_ERROR_FAILURE; michael@0: } michael@0: michael@0: mSource = aStream; michael@0: michael@0: // AddTrack will take ownership of segment michael@0: AudioSegment* segment = new AudioSegment(); michael@0: mSource->AddTrack(aID, AUDIO_RATE, 0, segment); michael@0: michael@0: // We aren't going to add any more tracks michael@0: mSource->AdvanceKnownTracksTime(STREAM_TIME_MAX); michael@0: michael@0: // Remember TrackID so we can finish later michael@0: mTrackID = aID; michael@0: michael@0: // 1 Audio frame per 10ms michael@0: #if defined(MOZ_WIDGET_GONK) && defined(DEBUG) michael@0: // B2G emulator debug is very, very slow and has problems dealing with realtime audio inputs michael@0: mTimer->InitWithCallback(this, MediaEngine::DEFAULT_AUDIO_TIMER_MS*10, michael@0: nsITimer::TYPE_REPEATING_PRECISE_CAN_SKIP); michael@0: #else michael@0: mTimer->InitWithCallback(this, MediaEngine::DEFAULT_AUDIO_TIMER_MS, michael@0: nsITimer::TYPE_REPEATING_PRECISE_CAN_SKIP); michael@0: #endif michael@0: mState = kStarted; michael@0: michael@0: return NS_OK; michael@0: } michael@0: michael@0: nsresult michael@0: MediaEngineDefaultAudioSource::Stop(SourceMediaStream *aSource, TrackID aID) michael@0: { michael@0: if (mState != kStarted) { michael@0: return NS_ERROR_FAILURE; michael@0: } michael@0: if (!mTimer) { michael@0: return NS_ERROR_FAILURE; michael@0: } michael@0: michael@0: mTimer->Cancel(); michael@0: mTimer = nullptr; michael@0: michael@0: aSource->EndTrack(aID); michael@0: aSource->Finish(); michael@0: michael@0: mState = kStopped; michael@0: return NS_OK; michael@0: } michael@0: michael@0: nsresult michael@0: MediaEngineDefaultAudioSource::Snapshot(uint32_t aDuration, nsIDOMFile** aFile) michael@0: { michael@0: return NS_ERROR_NOT_IMPLEMENTED; michael@0: } michael@0: michael@0: NS_IMETHODIMP michael@0: MediaEngineDefaultAudioSource::Notify(nsITimer* aTimer) michael@0: { michael@0: AudioSegment segment; michael@0: nsRefPtr buffer = SharedBuffer::Create(AUDIO_FRAME_LENGTH * sizeof(int16_t)); michael@0: int16_t* dest = static_cast(buffer->Data()); michael@0: michael@0: mSineGenerator->generate(dest, AUDIO_FRAME_LENGTH); michael@0: nsAutoTArray channels; michael@0: channels.AppendElement(dest); michael@0: segment.AppendFrames(buffer.forget(), channels, AUDIO_FRAME_LENGTH); michael@0: mSource->AppendToTrack(mTrackID, &segment); michael@0: michael@0: return NS_OK; michael@0: } michael@0: michael@0: void michael@0: MediaEngineDefault::EnumerateVideoDevices(nsTArray >* aVSources) { michael@0: MutexAutoLock lock(mMutex); michael@0: michael@0: // We once had code here to find a VideoSource with the same settings and re-use that. michael@0: // This no longer is possible since the resolution is being set in Allocate(). michael@0: michael@0: nsRefPtr newSource = new MediaEngineDefaultVideoSource(); michael@0: mVSources.AppendElement(newSource); michael@0: aVSources->AppendElement(newSource); michael@0: michael@0: return; michael@0: } michael@0: michael@0: void michael@0: MediaEngineDefault::EnumerateAudioDevices(nsTArray >* aASources) { michael@0: MutexAutoLock lock(mMutex); michael@0: int32_t len = mASources.Length(); michael@0: michael@0: for (int32_t i = 0; i < len; i++) { michael@0: nsRefPtr source = mASources.ElementAt(i); michael@0: if (source->IsAvailable()) { michael@0: aASources->AppendElement(source); michael@0: } michael@0: } michael@0: michael@0: // All streams are currently busy, just make a new one. michael@0: if (aASources->Length() == 0) { michael@0: nsRefPtr newSource = michael@0: new MediaEngineDefaultAudioSource(); michael@0: mASources.AppendElement(newSource); michael@0: aASources->AppendElement(newSource); michael@0: } michael@0: return; michael@0: } michael@0: michael@0: } // namespace mozilla