|
1 /* This Source Code Form is subject to the terms of the Mozilla Public |
|
2 * License, v. 2.0. If a copy of the MPL was not distributed with this file, |
|
3 * You can obtain one at http://mozilla.org/MPL/2.0/. */ |
|
4 |
|
5 #include "MediaEngineDefault.h" |
|
6 |
|
7 #include "nsCOMPtr.h" |
|
8 #include "nsDOMFile.h" |
|
9 #include "nsILocalFile.h" |
|
10 #include "Layers.h" |
|
11 #include "ImageContainer.h" |
|
12 #include "ImageTypes.h" |
|
13 #include "prmem.h" |
|
14 #include "nsContentUtils.h" |
|
15 |
|
16 #include "nsIFilePicker.h" |
|
17 #include "nsIPrefService.h" |
|
18 #include "nsIPrefBranch.h" |
|
19 |
|
20 #ifdef MOZ_WIDGET_ANDROID |
|
21 #include "AndroidBridge.h" |
|
22 #include "nsISupportsUtils.h" |
|
23 #endif |
|
24 |
|
25 #if defined(MOZ_WEBRTC) && defined(MOZ_WEBRTC_SIGNALING) |
|
26 #include "YuvStamper.h" |
|
27 #endif |
|
28 |
|
29 #define VIDEO_RATE USECS_PER_S |
|
30 #define AUDIO_RATE 16000 |
|
31 #define AUDIO_FRAME_LENGTH ((AUDIO_RATE * MediaEngine::DEFAULT_AUDIO_TIMER_MS) / 1000) |
|
32 namespace mozilla { |
|
33 |
|
34 using namespace mozilla::gfx; |
|
35 |
|
36 NS_IMPL_ISUPPORTS(MediaEngineDefaultVideoSource, nsITimerCallback) |
|
37 /** |
|
38 * Default video source. |
|
39 */ |
|
40 |
|
41 MediaEngineDefaultVideoSource::MediaEngineDefaultVideoSource() |
|
42 : mTimer(nullptr), mMonitor("Fake video") |
|
43 { |
|
44 mImageContainer = layers::LayerManager::CreateImageContainer(); |
|
45 mState = kReleased; |
|
46 } |
|
47 |
|
48 MediaEngineDefaultVideoSource::~MediaEngineDefaultVideoSource() |
|
49 {} |
|
50 |
|
51 void |
|
52 MediaEngineDefaultVideoSource::GetName(nsAString& aName) |
|
53 { |
|
54 aName.Assign(NS_LITERAL_STRING("Default Video Device")); |
|
55 return; |
|
56 } |
|
57 |
|
58 void |
|
59 MediaEngineDefaultVideoSource::GetUUID(nsAString& aUUID) |
|
60 { |
|
61 aUUID.Assign(NS_LITERAL_STRING("1041FCBD-3F12-4F7B-9E9B-1EC556DD5676")); |
|
62 return; |
|
63 } |
|
64 |
|
65 nsresult |
|
66 MediaEngineDefaultVideoSource::Allocate(const VideoTrackConstraintsN &aConstraints, |
|
67 const MediaEnginePrefs &aPrefs) |
|
68 { |
|
69 if (mState != kReleased) { |
|
70 return NS_ERROR_FAILURE; |
|
71 } |
|
72 |
|
73 mOpts = aPrefs; |
|
74 mOpts.mWidth = mOpts.mWidth ? mOpts.mWidth : MediaEngine::DEFAULT_43_VIDEO_WIDTH; |
|
75 mOpts.mHeight = mOpts.mHeight ? mOpts.mHeight : MediaEngine::DEFAULT_43_VIDEO_HEIGHT; |
|
76 mState = kAllocated; |
|
77 return NS_OK; |
|
78 } |
|
79 |
|
80 nsresult |
|
81 MediaEngineDefaultVideoSource::Deallocate() |
|
82 { |
|
83 if (mState != kStopped && mState != kAllocated) { |
|
84 return NS_ERROR_FAILURE; |
|
85 } |
|
86 mState = kReleased; |
|
87 return NS_OK; |
|
88 } |
|
89 |
|
90 static void AllocateSolidColorFrame(layers::PlanarYCbCrData& aData, |
|
91 int aWidth, int aHeight, |
|
92 int aY, int aCb, int aCr) |
|
93 { |
|
94 MOZ_ASSERT(!(aWidth&1)); |
|
95 MOZ_ASSERT(!(aHeight&1)); |
|
96 // Allocate a single frame with a solid color |
|
97 int yLen = aWidth*aHeight; |
|
98 int cbLen = yLen>>2; |
|
99 int crLen = cbLen; |
|
100 uint8_t* frame = (uint8_t*) PR_Malloc(yLen+cbLen+crLen); |
|
101 memset(frame, aY, yLen); |
|
102 memset(frame+yLen, aCb, cbLen); |
|
103 memset(frame+yLen+cbLen, aCr, crLen); |
|
104 |
|
105 aData.mYChannel = frame; |
|
106 aData.mYSize = IntSize(aWidth, aHeight); |
|
107 aData.mYStride = aWidth; |
|
108 aData.mCbCrStride = aWidth>>1; |
|
109 aData.mCbChannel = frame + yLen; |
|
110 aData.mCrChannel = aData.mCbChannel + cbLen; |
|
111 aData.mCbCrSize = IntSize(aWidth>>1, aHeight>>1); |
|
112 aData.mPicX = 0; |
|
113 aData.mPicY = 0; |
|
114 aData.mPicSize = IntSize(aWidth, aHeight); |
|
115 aData.mStereoMode = StereoMode::MONO; |
|
116 } |
|
117 |
|
118 static void ReleaseFrame(layers::PlanarYCbCrData& aData) |
|
119 { |
|
120 PR_Free(aData.mYChannel); |
|
121 } |
|
122 |
|
123 nsresult |
|
124 MediaEngineDefaultVideoSource::Start(SourceMediaStream* aStream, TrackID aID) |
|
125 { |
|
126 if (mState != kAllocated) { |
|
127 return NS_ERROR_FAILURE; |
|
128 } |
|
129 |
|
130 mTimer = do_CreateInstance(NS_TIMER_CONTRACTID); |
|
131 if (!mTimer) { |
|
132 return NS_ERROR_FAILURE; |
|
133 } |
|
134 |
|
135 aStream->AddTrack(aID, VIDEO_RATE, 0, new VideoSegment()); |
|
136 aStream->AdvanceKnownTracksTime(STREAM_TIME_MAX); |
|
137 |
|
138 // Remember TrackID so we can end it later |
|
139 mTrackID = aID; |
|
140 |
|
141 // Start timer for subsequent frames |
|
142 #if defined(MOZ_WIDGET_GONK) && defined(DEBUG) |
|
143 // B2G emulator debug is very, very slow and has problems dealing with realtime audio inputs |
|
144 mTimer->InitWithCallback(this, (1000 / mOpts.mFPS)*10, nsITimer::TYPE_REPEATING_SLACK); |
|
145 #else |
|
146 mTimer->InitWithCallback(this, 1000 / mOpts.mFPS, nsITimer::TYPE_REPEATING_SLACK); |
|
147 #endif |
|
148 mState = kStarted; |
|
149 |
|
150 return NS_OK; |
|
151 } |
|
152 |
|
153 nsresult |
|
154 MediaEngineDefaultVideoSource::Stop(SourceMediaStream *aSource, TrackID aID) |
|
155 { |
|
156 if (mState != kStarted) { |
|
157 return NS_ERROR_FAILURE; |
|
158 } |
|
159 if (!mTimer) { |
|
160 return NS_ERROR_FAILURE; |
|
161 } |
|
162 |
|
163 mTimer->Cancel(); |
|
164 mTimer = nullptr; |
|
165 |
|
166 aSource->EndTrack(aID); |
|
167 aSource->Finish(); |
|
168 |
|
169 mState = kStopped; |
|
170 return NS_OK; |
|
171 } |
|
172 |
|
173 nsresult |
|
174 MediaEngineDefaultVideoSource::Snapshot(uint32_t aDuration, nsIDOMFile** aFile) |
|
175 { |
|
176 *aFile = nullptr; |
|
177 |
|
178 #ifndef MOZ_WIDGET_ANDROID |
|
179 return NS_ERROR_NOT_IMPLEMENTED; |
|
180 #else |
|
181 nsAutoString filePath; |
|
182 nsCOMPtr<nsIFilePicker> filePicker = do_CreateInstance("@mozilla.org/filepicker;1"); |
|
183 if (!filePicker) |
|
184 return NS_ERROR_FAILURE; |
|
185 |
|
186 nsXPIDLString title; |
|
187 nsContentUtils::GetLocalizedString(nsContentUtils::eFORMS_PROPERTIES, "Browse", title); |
|
188 int16_t mode = static_cast<int16_t>(nsIFilePicker::modeOpen); |
|
189 |
|
190 nsresult rv = filePicker->Init(nullptr, title, mode); |
|
191 NS_ENSURE_SUCCESS(rv, rv); |
|
192 filePicker->AppendFilters(nsIFilePicker::filterImages); |
|
193 |
|
194 // XXX - This API should be made async |
|
195 PRInt16 dialogReturn; |
|
196 rv = filePicker->Show(&dialogReturn); |
|
197 NS_ENSURE_SUCCESS(rv, rv); |
|
198 if (dialogReturn == nsIFilePicker::returnCancel) { |
|
199 *aFile = nullptr; |
|
200 return NS_OK; |
|
201 } |
|
202 |
|
203 nsCOMPtr<nsIFile> localFile; |
|
204 filePicker->GetFile(getter_AddRefs(localFile)); |
|
205 |
|
206 if (!localFile) { |
|
207 *aFile = nullptr; |
|
208 return NS_OK; |
|
209 } |
|
210 |
|
211 nsCOMPtr<nsIDOMFile> domFile = new nsDOMFileFile(localFile); |
|
212 domFile.forget(aFile); |
|
213 return NS_OK; |
|
214 #endif |
|
215 } |
|
216 |
|
217 NS_IMETHODIMP |
|
218 MediaEngineDefaultVideoSource::Notify(nsITimer* aTimer) |
|
219 { |
|
220 // Update the target color |
|
221 if (mCr <= 16) { |
|
222 if (mCb < 240) { |
|
223 mCb++; |
|
224 } else { |
|
225 mCr++; |
|
226 } |
|
227 } else if (mCb >= 240) { |
|
228 if (mCr < 240) { |
|
229 mCr++; |
|
230 } else { |
|
231 mCb--; |
|
232 } |
|
233 } else if (mCr >= 240) { |
|
234 if (mCb > 16) { |
|
235 mCb--; |
|
236 } else { |
|
237 mCr--; |
|
238 } |
|
239 } else { |
|
240 mCr--; |
|
241 } |
|
242 |
|
243 // Allocate a single solid color image |
|
244 nsRefPtr<layers::Image> image = mImageContainer->CreateImage(ImageFormat::PLANAR_YCBCR); |
|
245 nsRefPtr<layers::PlanarYCbCrImage> ycbcr_image = |
|
246 static_cast<layers::PlanarYCbCrImage*>(image.get()); |
|
247 layers::PlanarYCbCrData data; |
|
248 AllocateSolidColorFrame(data, mOpts.mWidth, mOpts.mHeight, 0x80, mCb, mCr); |
|
249 |
|
250 #if defined(MOZ_WEBRTC) && defined(MOZ_WEBRTC_SIGNALING) |
|
251 uint64_t timestamp = PR_Now(); |
|
252 YuvStamper::Encode(mOpts.mWidth, mOpts.mHeight, mOpts.mWidth, |
|
253 data.mYChannel, |
|
254 reinterpret_cast<unsigned char*>(×tamp), sizeof(timestamp), |
|
255 0, 0); |
|
256 #endif |
|
257 |
|
258 ycbcr_image->SetData(data); |
|
259 // SetData copies data, so we can free the frame |
|
260 ReleaseFrame(data); |
|
261 |
|
262 MonitorAutoLock lock(mMonitor); |
|
263 |
|
264 // implicitly releases last image |
|
265 mImage = ycbcr_image.forget(); |
|
266 |
|
267 return NS_OK; |
|
268 } |
|
269 |
|
270 void |
|
271 MediaEngineDefaultVideoSource::NotifyPull(MediaStreamGraph* aGraph, |
|
272 SourceMediaStream *aSource, |
|
273 TrackID aID, |
|
274 StreamTime aDesiredTime, |
|
275 TrackTicks &aLastEndTime) |
|
276 { |
|
277 // AddTrack takes ownership of segment |
|
278 VideoSegment segment; |
|
279 MonitorAutoLock lock(mMonitor); |
|
280 if (mState != kStarted) { |
|
281 return; |
|
282 } |
|
283 |
|
284 // Note: we're not giving up mImage here |
|
285 nsRefPtr<layers::Image> image = mImage; |
|
286 TrackTicks target = TimeToTicksRoundUp(USECS_PER_S, aDesiredTime); |
|
287 TrackTicks delta = target - aLastEndTime; |
|
288 |
|
289 if (delta > 0) { |
|
290 // nullptr images are allowed |
|
291 IntSize size(image ? mOpts.mWidth : 0, image ? mOpts.mHeight : 0); |
|
292 segment.AppendFrame(image.forget(), delta, size); |
|
293 // This can fail if either a) we haven't added the track yet, or b) |
|
294 // we've removed or finished the track. |
|
295 if (aSource->AppendToTrack(aID, &segment)) { |
|
296 aLastEndTime = target; |
|
297 } |
|
298 } |
|
299 } |
|
300 |
|
301 // generate 1k sine wave per second |
|
302 class SineWaveGenerator |
|
303 { |
|
304 public: |
|
305 static const int bytesPerSample = 2; |
|
306 static const int millisecondsPerSecond = 1000; |
|
307 static const int frequency = 1000; |
|
308 |
|
309 SineWaveGenerator(int aSampleRate) : |
|
310 mTotalLength(aSampleRate / frequency), |
|
311 mReadLength(0) { |
|
312 MOZ_ASSERT(mTotalLength * frequency == aSampleRate); |
|
313 mAudioBuffer = new int16_t[mTotalLength]; |
|
314 for(int i = 0; i < mTotalLength; i++) { |
|
315 // Set volume to -20db. It's from 32768.0 * 10^(-20/20) = 3276.8 |
|
316 mAudioBuffer[i] = (3276.8f * sin(2 * M_PI * i / mTotalLength)); |
|
317 } |
|
318 } |
|
319 |
|
320 // NOTE: only safely called from a single thread (MSG callback) |
|
321 void generate(int16_t* aBuffer, int16_t aLengthInSamples) { |
|
322 int16_t remaining = aLengthInSamples; |
|
323 |
|
324 while (remaining) { |
|
325 int16_t processSamples = 0; |
|
326 |
|
327 if (mTotalLength - mReadLength >= remaining) { |
|
328 processSamples = remaining; |
|
329 } else { |
|
330 processSamples = mTotalLength - mReadLength; |
|
331 } |
|
332 memcpy(aBuffer, mAudioBuffer + mReadLength, processSamples * bytesPerSample); |
|
333 aBuffer += processSamples; |
|
334 mReadLength += processSamples; |
|
335 remaining -= processSamples; |
|
336 if (mReadLength == mTotalLength) { |
|
337 mReadLength = 0; |
|
338 } |
|
339 } |
|
340 } |
|
341 |
|
342 private: |
|
343 nsAutoArrayPtr<int16_t> mAudioBuffer; |
|
344 int16_t mTotalLength; |
|
345 int16_t mReadLength; |
|
346 }; |
|
347 |
|
348 /** |
|
349 * Default audio source. |
|
350 */ |
|
351 NS_IMPL_ISUPPORTS(MediaEngineDefaultAudioSource, nsITimerCallback) |
|
352 |
|
353 MediaEngineDefaultAudioSource::MediaEngineDefaultAudioSource() |
|
354 : mTimer(nullptr) |
|
355 { |
|
356 mState = kReleased; |
|
357 } |
|
358 |
|
359 MediaEngineDefaultAudioSource::~MediaEngineDefaultAudioSource() |
|
360 {} |
|
361 |
|
362 void |
|
363 MediaEngineDefaultAudioSource::GetName(nsAString& aName) |
|
364 { |
|
365 aName.Assign(NS_LITERAL_STRING("Default Audio Device")); |
|
366 return; |
|
367 } |
|
368 |
|
369 void |
|
370 MediaEngineDefaultAudioSource::GetUUID(nsAString& aUUID) |
|
371 { |
|
372 aUUID.Assign(NS_LITERAL_STRING("B7CBD7C1-53EF-42F9-8353-73F61C70C092")); |
|
373 return; |
|
374 } |
|
375 |
|
376 nsresult |
|
377 MediaEngineDefaultAudioSource::Allocate(const AudioTrackConstraintsN &aConstraints, |
|
378 const MediaEnginePrefs &aPrefs) |
|
379 { |
|
380 if (mState != kReleased) { |
|
381 return NS_ERROR_FAILURE; |
|
382 } |
|
383 |
|
384 mState = kAllocated; |
|
385 // generate 1Khz sine wave |
|
386 mSineGenerator = new SineWaveGenerator(AUDIO_RATE); |
|
387 return NS_OK; |
|
388 } |
|
389 |
|
390 nsresult |
|
391 MediaEngineDefaultAudioSource::Deallocate() |
|
392 { |
|
393 if (mState != kStopped && mState != kAllocated) { |
|
394 return NS_ERROR_FAILURE; |
|
395 } |
|
396 mState = kReleased; |
|
397 return NS_OK; |
|
398 } |
|
399 |
|
400 nsresult |
|
401 MediaEngineDefaultAudioSource::Start(SourceMediaStream* aStream, TrackID aID) |
|
402 { |
|
403 if (mState != kAllocated) { |
|
404 return NS_ERROR_FAILURE; |
|
405 } |
|
406 |
|
407 mTimer = do_CreateInstance(NS_TIMER_CONTRACTID); |
|
408 if (!mTimer) { |
|
409 return NS_ERROR_FAILURE; |
|
410 } |
|
411 |
|
412 mSource = aStream; |
|
413 |
|
414 // AddTrack will take ownership of segment |
|
415 AudioSegment* segment = new AudioSegment(); |
|
416 mSource->AddTrack(aID, AUDIO_RATE, 0, segment); |
|
417 |
|
418 // We aren't going to add any more tracks |
|
419 mSource->AdvanceKnownTracksTime(STREAM_TIME_MAX); |
|
420 |
|
421 // Remember TrackID so we can finish later |
|
422 mTrackID = aID; |
|
423 |
|
424 // 1 Audio frame per 10ms |
|
425 #if defined(MOZ_WIDGET_GONK) && defined(DEBUG) |
|
426 // B2G emulator debug is very, very slow and has problems dealing with realtime audio inputs |
|
427 mTimer->InitWithCallback(this, MediaEngine::DEFAULT_AUDIO_TIMER_MS*10, |
|
428 nsITimer::TYPE_REPEATING_PRECISE_CAN_SKIP); |
|
429 #else |
|
430 mTimer->InitWithCallback(this, MediaEngine::DEFAULT_AUDIO_TIMER_MS, |
|
431 nsITimer::TYPE_REPEATING_PRECISE_CAN_SKIP); |
|
432 #endif |
|
433 mState = kStarted; |
|
434 |
|
435 return NS_OK; |
|
436 } |
|
437 |
|
438 nsresult |
|
439 MediaEngineDefaultAudioSource::Stop(SourceMediaStream *aSource, TrackID aID) |
|
440 { |
|
441 if (mState != kStarted) { |
|
442 return NS_ERROR_FAILURE; |
|
443 } |
|
444 if (!mTimer) { |
|
445 return NS_ERROR_FAILURE; |
|
446 } |
|
447 |
|
448 mTimer->Cancel(); |
|
449 mTimer = nullptr; |
|
450 |
|
451 aSource->EndTrack(aID); |
|
452 aSource->Finish(); |
|
453 |
|
454 mState = kStopped; |
|
455 return NS_OK; |
|
456 } |
|
457 |
|
458 nsresult |
|
459 MediaEngineDefaultAudioSource::Snapshot(uint32_t aDuration, nsIDOMFile** aFile) |
|
460 { |
|
461 return NS_ERROR_NOT_IMPLEMENTED; |
|
462 } |
|
463 |
|
464 NS_IMETHODIMP |
|
465 MediaEngineDefaultAudioSource::Notify(nsITimer* aTimer) |
|
466 { |
|
467 AudioSegment segment; |
|
468 nsRefPtr<SharedBuffer> buffer = SharedBuffer::Create(AUDIO_FRAME_LENGTH * sizeof(int16_t)); |
|
469 int16_t* dest = static_cast<int16_t*>(buffer->Data()); |
|
470 |
|
471 mSineGenerator->generate(dest, AUDIO_FRAME_LENGTH); |
|
472 nsAutoTArray<const int16_t*,1> channels; |
|
473 channels.AppendElement(dest); |
|
474 segment.AppendFrames(buffer.forget(), channels, AUDIO_FRAME_LENGTH); |
|
475 mSource->AppendToTrack(mTrackID, &segment); |
|
476 |
|
477 return NS_OK; |
|
478 } |
|
479 |
|
480 void |
|
481 MediaEngineDefault::EnumerateVideoDevices(nsTArray<nsRefPtr<MediaEngineVideoSource> >* aVSources) { |
|
482 MutexAutoLock lock(mMutex); |
|
483 |
|
484 // We once had code here to find a VideoSource with the same settings and re-use that. |
|
485 // This no longer is possible since the resolution is being set in Allocate(). |
|
486 |
|
487 nsRefPtr<MediaEngineVideoSource> newSource = new MediaEngineDefaultVideoSource(); |
|
488 mVSources.AppendElement(newSource); |
|
489 aVSources->AppendElement(newSource); |
|
490 |
|
491 return; |
|
492 } |
|
493 |
|
494 void |
|
495 MediaEngineDefault::EnumerateAudioDevices(nsTArray<nsRefPtr<MediaEngineAudioSource> >* aASources) { |
|
496 MutexAutoLock lock(mMutex); |
|
497 int32_t len = mASources.Length(); |
|
498 |
|
499 for (int32_t i = 0; i < len; i++) { |
|
500 nsRefPtr<MediaEngineAudioSource> source = mASources.ElementAt(i); |
|
501 if (source->IsAvailable()) { |
|
502 aASources->AppendElement(source); |
|
503 } |
|
504 } |
|
505 |
|
506 // All streams are currently busy, just make a new one. |
|
507 if (aASources->Length() == 0) { |
|
508 nsRefPtr<MediaEngineAudioSource> newSource = |
|
509 new MediaEngineDefaultAudioSource(); |
|
510 mASources.AppendElement(newSource); |
|
511 aASources->AppendElement(newSource); |
|
512 } |
|
513 return; |
|
514 } |
|
515 |
|
516 } // namespace mozilla |