|
1 /* -*- Mode: c++; c-basic-offset: 2; tab-width: 20; indent-tabs-mode: nil; -*- |
|
2 * This Source Code Form is subject to the terms of the Mozilla Public |
|
3 * License, v. 2.0. If a copy of the MPL was not distributed with this |
|
4 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
|
5 |
|
6 #include "base/basictypes.h" |
|
7 #include "AndroidBridge.h" |
|
8 |
|
9 #include <android/log.h> |
|
10 #include <stdlib.h> |
|
11 #include <time.h> |
|
12 |
|
13 #include "assert.h" |
|
14 #include "ANPBase.h" |
|
15 #include "nsIThread.h" |
|
16 #include "nsThreadUtils.h" |
|
17 #include "mozilla/Mutex.h" |
|
18 |
|
19 #define LOG(args...) __android_log_print(ANDROID_LOG_INFO, "GeckoPluginsAudio" , ## args) |
|
20 #define ASSIGN(obj, name) (obj)->name = anp_audio_##name |
|
21 |
|
22 /* android.media.AudioTrack */ |
|
23 struct AudioTrack { |
|
24 jclass at_class; |
|
25 jmethodID constructor; |
|
26 jmethodID flush; |
|
27 jmethodID pause; |
|
28 jmethodID play; |
|
29 jmethodID setvol; |
|
30 jmethodID stop; |
|
31 jmethodID write; |
|
32 jmethodID getpos; |
|
33 jmethodID getstate; |
|
34 jmethodID release; |
|
35 }; |
|
36 |
|
37 enum AudioTrackMode { |
|
38 MODE_STATIC = 0, |
|
39 MODE_STREAM = 1 |
|
40 }; |
|
41 |
|
42 /* android.media.AudioManager */ |
|
43 enum AudioManagerStream { |
|
44 STREAM_VOICE_CALL = 0, |
|
45 STREAM_SYSTEM = 1, |
|
46 STREAM_RING = 2, |
|
47 STREAM_MUSIC = 3, |
|
48 STREAM_ALARM = 4, |
|
49 STREAM_NOTIFICATION = 5, |
|
50 STREAM_DTMF = 8 |
|
51 }; |
|
52 |
|
53 /* android.media.AudioFormat */ |
|
54 enum AudioFormatChannel { |
|
55 CHANNEL_OUT_MONO = 4, |
|
56 CHANNEL_OUT_STEREO = 12 |
|
57 }; |
|
58 |
|
59 enum AudioFormatEncoding { |
|
60 ENCODING_PCM_16BIT = 2, |
|
61 ENCODING_PCM_8BIT = 3 |
|
62 }; |
|
63 |
|
64 enum AudioFormatState { |
|
65 STATE_UNINITIALIZED = 0, |
|
66 STATE_INITIALIZED = 1, |
|
67 STATE_NO_STATIC_DATA = 2 |
|
68 }; |
|
69 |
|
70 static struct AudioTrack at; |
|
71 |
|
72 static jclass |
|
73 init_jni_bindings(JNIEnv *jenv) { |
|
74 jclass jc = |
|
75 (jclass)jenv->NewGlobalRef(jenv->FindClass("android/media/AudioTrack")); |
|
76 |
|
77 at.constructor = jenv->GetMethodID(jc, "<init>", "(IIIIII)V"); |
|
78 at.flush = jenv->GetMethodID(jc, "flush", "()V"); |
|
79 at.pause = jenv->GetMethodID(jc, "pause", "()V"); |
|
80 at.play = jenv->GetMethodID(jc, "play", "()V"); |
|
81 at.setvol = jenv->GetMethodID(jc, "setStereoVolume", "(FF)I"); |
|
82 at.stop = jenv->GetMethodID(jc, "stop", "()V"); |
|
83 at.write = jenv->GetMethodID(jc, "write", "([BII)I"); |
|
84 at.getpos = jenv->GetMethodID(jc, "getPlaybackHeadPosition", "()I"); |
|
85 at.getstate = jenv->GetMethodID(jc, "getState", "()I"); |
|
86 at.release = jenv->GetMethodID(jc, "release", "()V"); |
|
87 |
|
88 return jc; |
|
89 } |
|
90 |
|
91 struct ANPAudioTrack { |
|
92 jobject output_unit; |
|
93 jclass at_class; |
|
94 |
|
95 unsigned int rate; |
|
96 unsigned int channels; |
|
97 unsigned int bufferSize; |
|
98 unsigned int isStopped; |
|
99 unsigned int keepGoing; |
|
100 |
|
101 mozilla::Mutex lock; |
|
102 |
|
103 void* user; |
|
104 ANPAudioCallbackProc proc; |
|
105 ANPSampleFormat format; |
|
106 |
|
107 ANPAudioTrack() : lock("ANPAudioTrack") { } |
|
108 }; |
|
109 |
|
110 class AudioRunnable : public nsRunnable |
|
111 { |
|
112 public: |
|
113 NS_DECL_NSIRUNNABLE |
|
114 |
|
115 AudioRunnable(ANPAudioTrack* aAudioTrack) { |
|
116 mTrack = aAudioTrack; |
|
117 } |
|
118 |
|
119 ANPAudioTrack* mTrack; |
|
120 }; |
|
121 |
|
122 NS_IMETHODIMP |
|
123 AudioRunnable::Run() |
|
124 { |
|
125 PR_SetCurrentThreadName("Android Audio"); |
|
126 |
|
127 JNIEnv* jenv = GetJNIForThread(); |
|
128 |
|
129 mozilla::AutoLocalJNIFrame autoFrame(jenv, 2); |
|
130 |
|
131 jbyteArray bytearray = jenv->NewByteArray(mTrack->bufferSize); |
|
132 if (!bytearray) { |
|
133 LOG("AudioRunnable:: Run. Could not create bytearray"); |
|
134 return NS_ERROR_FAILURE; |
|
135 } |
|
136 |
|
137 jbyte *byte = jenv->GetByteArrayElements(bytearray, nullptr); |
|
138 if (!byte) { |
|
139 LOG("AudioRunnable:: Run. Could not create bytearray"); |
|
140 return NS_ERROR_FAILURE; |
|
141 } |
|
142 |
|
143 ANPAudioBuffer buffer; |
|
144 buffer.channelCount = mTrack->channels; |
|
145 buffer.format = mTrack->format; |
|
146 buffer.bufferData = (void*) byte; |
|
147 |
|
148 while (true) |
|
149 { |
|
150 // reset the buffer size |
|
151 buffer.size = mTrack->bufferSize; |
|
152 |
|
153 { |
|
154 mozilla::MutexAutoLock lock(mTrack->lock); |
|
155 |
|
156 if (!mTrack->keepGoing) |
|
157 break; |
|
158 |
|
159 // Get data from the plugin |
|
160 mTrack->proc(kMoreData_ANPAudioEvent, mTrack->user, &buffer); |
|
161 } |
|
162 |
|
163 if (buffer.size == 0) { |
|
164 LOG("%p - kMoreData_ANPAudioEvent", mTrack); |
|
165 continue; |
|
166 } |
|
167 |
|
168 size_t wroteSoFar = 0; |
|
169 jint retval; |
|
170 do { |
|
171 retval = jenv->CallIntMethod(mTrack->output_unit, |
|
172 at.write, |
|
173 bytearray, |
|
174 wroteSoFar, |
|
175 buffer.size - wroteSoFar); |
|
176 if (retval < 0) { |
|
177 LOG("%p - Write failed %d", mTrack, retval); |
|
178 break; |
|
179 } |
|
180 |
|
181 wroteSoFar += retval; |
|
182 |
|
183 } while(wroteSoFar < buffer.size); |
|
184 } |
|
185 |
|
186 jenv->CallVoidMethod(mTrack->output_unit, at.release); |
|
187 |
|
188 jenv->DeleteGlobalRef(mTrack->output_unit); |
|
189 jenv->DeleteGlobalRef(mTrack->at_class); |
|
190 |
|
191 delete mTrack; |
|
192 |
|
193 jenv->ReleaseByteArrayElements(bytearray, byte, 0); |
|
194 |
|
195 return NS_OK; |
|
196 } |
|
197 |
|
198 ANPAudioTrack* |
|
199 anp_audio_newTrack(uint32_t sampleRate, // sampling rate in Hz |
|
200 ANPSampleFormat format, |
|
201 int channelCount, // MONO=1, STEREO=2 |
|
202 ANPAudioCallbackProc proc, |
|
203 void* user) |
|
204 { |
|
205 ANPAudioTrack *s = new ANPAudioTrack(); |
|
206 if (s == nullptr) { |
|
207 return nullptr; |
|
208 } |
|
209 |
|
210 JNIEnv *jenv = GetJNIForThread(); |
|
211 |
|
212 s->at_class = init_jni_bindings(jenv); |
|
213 s->rate = sampleRate; |
|
214 s->channels = channelCount; |
|
215 s->bufferSize = s->rate * s->channels; |
|
216 s->isStopped = true; |
|
217 s->keepGoing = false; |
|
218 s->user = user; |
|
219 s->proc = proc; |
|
220 s->format = format; |
|
221 |
|
222 int jformat; |
|
223 switch (format) { |
|
224 case kPCM16Bit_ANPSampleFormat: |
|
225 jformat = ENCODING_PCM_16BIT; |
|
226 break; |
|
227 case kPCM8Bit_ANPSampleFormat: |
|
228 jformat = ENCODING_PCM_8BIT; |
|
229 break; |
|
230 default: |
|
231 LOG("Unknown audio format. defaulting to 16bit."); |
|
232 jformat = ENCODING_PCM_16BIT; |
|
233 break; |
|
234 } |
|
235 |
|
236 int jChannels; |
|
237 switch (channelCount) { |
|
238 case 1: |
|
239 jChannels = CHANNEL_OUT_MONO; |
|
240 break; |
|
241 case 2: |
|
242 jChannels = CHANNEL_OUT_STEREO; |
|
243 break; |
|
244 default: |
|
245 LOG("Unknown channel count. defaulting to mono."); |
|
246 jChannels = CHANNEL_OUT_MONO; |
|
247 break; |
|
248 } |
|
249 |
|
250 mozilla::AutoLocalJNIFrame autoFrame(jenv); |
|
251 |
|
252 jobject obj = jenv->NewObject(s->at_class, |
|
253 at.constructor, |
|
254 STREAM_MUSIC, |
|
255 s->rate, |
|
256 jChannels, |
|
257 jformat, |
|
258 s->bufferSize, |
|
259 MODE_STREAM); |
|
260 |
|
261 if (autoFrame.CheckForException() || obj == nullptr) { |
|
262 jenv->DeleteGlobalRef(s->at_class); |
|
263 free(s); |
|
264 return nullptr; |
|
265 } |
|
266 |
|
267 jint state = jenv->CallIntMethod(obj, at.getstate); |
|
268 |
|
269 if (autoFrame.CheckForException() || state == STATE_UNINITIALIZED) { |
|
270 jenv->DeleteGlobalRef(s->at_class); |
|
271 free(s); |
|
272 return nullptr; |
|
273 } |
|
274 |
|
275 s->output_unit = jenv->NewGlobalRef(obj); |
|
276 return s; |
|
277 } |
|
278 |
|
279 void |
|
280 anp_audio_deleteTrack(ANPAudioTrack* s) |
|
281 { |
|
282 if (s == nullptr) { |
|
283 return; |
|
284 } |
|
285 |
|
286 mozilla::MutexAutoLock lock(s->lock); |
|
287 s->keepGoing = false; |
|
288 |
|
289 // deallocation happens in the AudioThread. There is a |
|
290 // potential leak if anp_audio_start is never called, but |
|
291 // we do not see that from flash. |
|
292 } |
|
293 |
|
294 void |
|
295 anp_audio_start(ANPAudioTrack* s) |
|
296 { |
|
297 if (s == nullptr || s->output_unit == nullptr) { |
|
298 return; |
|
299 } |
|
300 |
|
301 if (s->keepGoing) { |
|
302 // we are already playing. Ignore. |
|
303 return; |
|
304 } |
|
305 |
|
306 JNIEnv *jenv = GetJNIForThread(); |
|
307 |
|
308 mozilla::AutoLocalJNIFrame autoFrame(jenv, 0); |
|
309 jenv->CallVoidMethod(s->output_unit, at.play); |
|
310 |
|
311 if (autoFrame.CheckForException()) { |
|
312 jenv->DeleteGlobalRef(s->at_class); |
|
313 free(s); |
|
314 return; |
|
315 } |
|
316 |
|
317 s->isStopped = false; |
|
318 s->keepGoing = true; |
|
319 |
|
320 // AudioRunnable now owns the ANPAudioTrack |
|
321 nsRefPtr<AudioRunnable> runnable = new AudioRunnable(s); |
|
322 |
|
323 nsCOMPtr<nsIThread> thread; |
|
324 NS_NewThread(getter_AddRefs(thread), runnable); |
|
325 } |
|
326 |
|
327 void |
|
328 anp_audio_pause(ANPAudioTrack* s) |
|
329 { |
|
330 if (s == nullptr || s->output_unit == nullptr) { |
|
331 return; |
|
332 } |
|
333 |
|
334 JNIEnv *jenv = GetJNIForThread(); |
|
335 |
|
336 mozilla::AutoLocalJNIFrame autoFrame(jenv, 0); |
|
337 jenv->CallVoidMethod(s->output_unit, at.pause); |
|
338 } |
|
339 |
|
340 void |
|
341 anp_audio_stop(ANPAudioTrack* s) |
|
342 { |
|
343 if (s == nullptr || s->output_unit == nullptr) { |
|
344 return; |
|
345 } |
|
346 |
|
347 s->isStopped = true; |
|
348 JNIEnv *jenv = GetJNIForThread(); |
|
349 |
|
350 mozilla::AutoLocalJNIFrame autoFrame(jenv, 0); |
|
351 jenv->CallVoidMethod(s->output_unit, at.stop); |
|
352 } |
|
353 |
|
354 bool |
|
355 anp_audio_isStopped(ANPAudioTrack* s) |
|
356 { |
|
357 return s->isStopped; |
|
358 } |
|
359 |
|
360 uint32_t |
|
361 anp_audio_trackLatency(ANPAudioTrack* s) { |
|
362 // Hardcode an estimate of the system's audio latency. Flash hardcodes |
|
363 // similar latency estimates for pre-Honeycomb devices that do not support |
|
364 // ANPAudioTrackInterfaceV1's trackLatency(). The Android stock browser |
|
365 // calls android::AudioTrack::latency(), an internal Android API that is |
|
366 // not available in the public NDK: |
|
367 // https://github.com/android/platform_external_webkit/commit/49bf866973cb3b2a6c74c0eab864e9562e4cbab1 |
|
368 return 100; // milliseconds |
|
369 } |
|
370 |
|
371 void InitAudioTrackInterfaceV0(ANPAudioTrackInterfaceV0 *i) { |
|
372 _assert(i->inSize == sizeof(*i)); |
|
373 ASSIGN(i, newTrack); |
|
374 ASSIGN(i, deleteTrack); |
|
375 ASSIGN(i, start); |
|
376 ASSIGN(i, pause); |
|
377 ASSIGN(i, stop); |
|
378 ASSIGN(i, isStopped); |
|
379 } |
|
380 |
|
381 void InitAudioTrackInterfaceV1(ANPAudioTrackInterfaceV1 *i) { |
|
382 _assert(i->inSize == sizeof(*i)); |
|
383 ASSIGN(i, newTrack); |
|
384 ASSIGN(i, deleteTrack); |
|
385 ASSIGN(i, start); |
|
386 ASSIGN(i, pause); |
|
387 ASSIGN(i, stop); |
|
388 ASSIGN(i, isStopped); |
|
389 ASSIGN(i, trackLatency); |
|
390 } |