michael@0: /* This Source Code Form is subject to the terms of the Mozilla Public michael@0: * License, v. 2.0. If a copy of the MPL was not distributed with this file, michael@0: * You can obtain one at http://mozilla.org/MPL/2.0/. */ michael@0: michael@0: #ifndef MEDIAENGINE_H_ michael@0: #define MEDIAENGINE_H_ michael@0: michael@0: #include "mozilla/RefPtr.h" michael@0: #include "nsIDOMFile.h" michael@0: #include "DOMMediaStream.h" michael@0: #include "MediaStreamGraph.h" michael@0: michael@0: namespace mozilla { michael@0: michael@0: class VideoTrackConstraintsN; michael@0: class AudioTrackConstraintsN; michael@0: michael@0: /** michael@0: * Abstract interface for managing audio and video devices. Each platform michael@0: * must implement a concrete class that will map these classes and methods michael@0: * to the appropriate backend. For example, on Desktop platforms, these will michael@0: * correspond to equivalent webrtc (GIPS) calls, and on B2G they will map to michael@0: * a Gonk interface. michael@0: */ michael@0: class MediaEngineVideoSource; michael@0: class MediaEngineAudioSource; michael@0: struct MediaEnginePrefs; michael@0: michael@0: enum MediaEngineState { michael@0: kAllocated, michael@0: kStarted, michael@0: kStopped, michael@0: kReleased michael@0: }; michael@0: michael@0: // We only support 1 audio and 1 video track for now. michael@0: enum { michael@0: kVideoTrack = 1, michael@0: kAudioTrack = 2 michael@0: }; michael@0: michael@0: class MediaEngine michael@0: { michael@0: public: michael@0: NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaEngine) michael@0: michael@0: static const int DEFAULT_VIDEO_FPS = 30; michael@0: static const int DEFAULT_VIDEO_MIN_FPS = 10; michael@0: static const int DEFAULT_43_VIDEO_WIDTH = 640; michael@0: static const int DEFAULT_43_VIDEO_HEIGHT = 480; michael@0: static const int DEFAULT_169_VIDEO_WIDTH = 1280; michael@0: static const int DEFAULT_169_VIDEO_HEIGHT = 720; michael@0: static const int DEFAULT_AUDIO_TIMER_MS = 10; michael@0: michael@0: /* Populate an array of video sources in the nsTArray. Also include devices michael@0: * that are currently unavailable. */ michael@0: virtual void EnumerateVideoDevices(nsTArray >*) = 0; michael@0: michael@0: /* Populate an array of audio sources in the nsTArray. Also include devices michael@0: * that are currently unavailable. */ michael@0: virtual void EnumerateAudioDevices(nsTArray >*) = 0; michael@0: michael@0: protected: michael@0: virtual ~MediaEngine() {} michael@0: }; michael@0: michael@0: /** michael@0: * Common abstract base class for audio and video sources. michael@0: */ michael@0: class MediaEngineSource : public nsISupports michael@0: { michael@0: public: michael@0: virtual ~MediaEngineSource() {} michael@0: michael@0: /* Populate the human readable name of this device in the nsAString */ michael@0: virtual void GetName(nsAString&) = 0; michael@0: michael@0: /* Populate the UUID of this device in the nsAString */ michael@0: virtual void GetUUID(nsAString&) = 0; michael@0: michael@0: /* Release the device back to the system. */ michael@0: virtual nsresult Deallocate() = 0; michael@0: michael@0: /* Start the device and add the track to the provided SourceMediaStream, with michael@0: * the provided TrackID. You may start appending data to the track michael@0: * immediately after. */ michael@0: virtual nsresult Start(SourceMediaStream*, TrackID) = 0; michael@0: michael@0: /* Take a snapshot from this source. In the case of video this is a single michael@0: * image, and for audio, it is a snippet lasting aDuration milliseconds. The michael@0: * duration argument is ignored for a MediaEngineVideoSource. michael@0: */ michael@0: virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile) = 0; michael@0: michael@0: /* Called when the stream wants more data */ michael@0: virtual void NotifyPull(MediaStreamGraph* aGraph, michael@0: SourceMediaStream *aSource, michael@0: TrackID aId, michael@0: StreamTime aDesiredTime, michael@0: TrackTicks &aLastEndTime) = 0; michael@0: michael@0: /* Stop the device and release the corresponding MediaStream */ michael@0: virtual nsresult Stop(SourceMediaStream *aSource, TrackID aID) = 0; michael@0: michael@0: /* Change device configuration. */ michael@0: virtual nsresult Config(bool aEchoOn, uint32_t aEcho, michael@0: bool aAgcOn, uint32_t aAGC, michael@0: bool aNoiseOn, uint32_t aNoise, michael@0: int32_t aPlayoutDelay) = 0; michael@0: michael@0: /* Returns true if a source represents a fake capture device and michael@0: * false otherwise michael@0: */ michael@0: virtual bool IsFake() = 0; michael@0: michael@0: /* Return false if device is currently allocated or started */ michael@0: bool IsAvailable() { michael@0: if (mState == kAllocated || mState == kStarted) { michael@0: return false; michael@0: } else { michael@0: return true; michael@0: } michael@0: } michael@0: michael@0: /* It is an error to call Start() before an Allocate(), and Stop() before michael@0: * a Start(). Only Allocate() may be called after a Deallocate(). */ michael@0: michael@0: protected: michael@0: MediaEngineState mState; michael@0: }; michael@0: michael@0: /** michael@0: * Video source and friends. michael@0: */ michael@0: class MediaEnginePrefs { michael@0: public: michael@0: int32_t mWidth; michael@0: int32_t mHeight; michael@0: int32_t mFPS; michael@0: int32_t mMinFPS; michael@0: michael@0: // mWidth and/or mHeight may be zero (=adaptive default), so use functions. michael@0: michael@0: int32_t GetWidth(bool aHD = false) const { michael@0: return mWidth? mWidth : (mHeight? michael@0: (mHeight * GetDefWidth(aHD)) / GetDefHeight(aHD) : michael@0: GetDefWidth(aHD)); michael@0: } michael@0: michael@0: int32_t GetHeight(bool aHD = false) const { michael@0: return mHeight? mHeight : (mWidth? michael@0: (mWidth * GetDefHeight(aHD)) / GetDefWidth(aHD) : michael@0: GetDefHeight(aHD)); michael@0: } michael@0: private: michael@0: static int32_t GetDefWidth(bool aHD = false) { michael@0: return aHD ? MediaEngine::DEFAULT_169_VIDEO_WIDTH : michael@0: MediaEngine::DEFAULT_43_VIDEO_WIDTH; michael@0: } michael@0: michael@0: static int32_t GetDefHeight(bool aHD = false) { michael@0: return aHD ? MediaEngine::DEFAULT_169_VIDEO_HEIGHT : michael@0: MediaEngine::DEFAULT_43_VIDEO_HEIGHT; michael@0: } michael@0: }; michael@0: michael@0: class MediaEngineVideoSource : public MediaEngineSource michael@0: { michael@0: public: michael@0: virtual ~MediaEngineVideoSource() {} michael@0: michael@0: /* This call reserves but does not start the device. */ michael@0: virtual nsresult Allocate(const VideoTrackConstraintsN &aConstraints, michael@0: const MediaEnginePrefs &aPrefs) = 0; michael@0: }; michael@0: michael@0: /** michael@0: * Audio source and friends. michael@0: */ michael@0: class MediaEngineAudioSource : public MediaEngineSource michael@0: { michael@0: public: michael@0: virtual ~MediaEngineAudioSource() {} michael@0: michael@0: /* This call reserves but does not start the device. */ michael@0: virtual nsresult Allocate(const AudioTrackConstraintsN &aConstraints, michael@0: const MediaEnginePrefs &aPrefs) = 0; michael@0: michael@0: }; michael@0: michael@0: } michael@0: michael@0: #endif /* MEDIAENGINE_H_ */