|
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ |
|
2 /* This Source Code Form is subject to the terms of the Mozilla Public |
|
3 * License, v. 2.0. If a copy of the MPL was not distributed with this |
|
4 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
|
5 |
|
6 #include "base/basictypes.h" |
|
7 #include "AndroidCaptureProvider.h" |
|
8 #include "nsXULAppAPI.h" |
|
9 #include "AndroidBridge.h" |
|
10 #include "nsStreamUtils.h" |
|
11 #include "nsThreadUtils.h" |
|
12 #include "nsMemory.h" |
|
13 #include "RawStructs.h" |
|
14 |
|
15 // The maximum number of frames we keep in our queue. Don't live in the past. |
|
16 #define MAX_FRAMES_QUEUED 10 |
|
17 |
|
18 using namespace mozilla::net; |
|
19 |
|
20 NS_IMPL_ISUPPORTS(AndroidCameraInputStream, nsIInputStream, nsIAsyncInputStream) |
|
21 |
|
22 AndroidCameraInputStream::AndroidCameraInputStream() : |
|
23 mWidth(0), mHeight(0), mCamera(0), mHeaderSent(false), mClosed(true), mFrameSize(0), |
|
24 mMonitor("AndroidCamera.Monitor") |
|
25 { |
|
26 mAvailable = sizeof(RawVideoHeader); |
|
27 mFrameQueue = new nsDeque(); |
|
28 } |
|
29 |
|
30 AndroidCameraInputStream::~AndroidCameraInputStream() { |
|
31 // clear the frame queue |
|
32 while (mFrameQueue->GetSize() > 0) { |
|
33 nsMemory::Free(mFrameQueue->PopFront()); |
|
34 } |
|
35 delete mFrameQueue; |
|
36 } |
|
37 |
|
38 NS_IMETHODIMP |
|
39 AndroidCameraInputStream::Init(nsACString& aContentType, nsCaptureParams* aParams) |
|
40 { |
|
41 if (XRE_GetProcessType() != GeckoProcessType_Default) |
|
42 return NS_ERROR_NOT_IMPLEMENTED; |
|
43 |
|
44 mContentType = aContentType; |
|
45 mWidth = aParams->width; |
|
46 mHeight = aParams->height; |
|
47 mCamera = aParams->camera; |
|
48 |
|
49 CameraStreamImpl *impl = CameraStreamImpl::GetInstance(0); |
|
50 if (!impl) |
|
51 return NS_ERROR_OUT_OF_MEMORY; |
|
52 if (impl->Init(mContentType, mCamera, mWidth, mHeight, this)) { |
|
53 mWidth = impl->GetWidth(); |
|
54 mHeight = impl->GetHeight(); |
|
55 mClosed = false; |
|
56 } |
|
57 return NS_OK; |
|
58 } |
|
59 |
|
60 void AndroidCameraInputStream::ReceiveFrame(char* frame, uint32_t length) { |
|
61 { |
|
62 mozilla::ReentrantMonitorAutoEnter autoMonitor(mMonitor); |
|
63 if (mFrameQueue->GetSize() > MAX_FRAMES_QUEUED) { |
|
64 nsMemory::Free(mFrameQueue->PopFront()); |
|
65 mAvailable -= mFrameSize; |
|
66 } |
|
67 } |
|
68 |
|
69 mFrameSize = sizeof(RawPacketHeader) + length; |
|
70 |
|
71 char* fullFrame = (char*)nsMemory::Alloc(mFrameSize); |
|
72 |
|
73 if (!fullFrame) |
|
74 return; |
|
75 |
|
76 RawPacketHeader* header = (RawPacketHeader*) fullFrame; |
|
77 header->packetID = 0xFF; |
|
78 header->codecID = 0x595556; // "YUV" |
|
79 |
|
80 // we copy the Y plane, and de-interlace the CrCb |
|
81 |
|
82 uint32_t yFrameSize = mWidth * mHeight; |
|
83 uint32_t uvFrameSize = yFrameSize / 4; |
|
84 |
|
85 memcpy(fullFrame + sizeof(RawPacketHeader), frame, yFrameSize); |
|
86 |
|
87 char* uFrame = fullFrame + yFrameSize; |
|
88 char* vFrame = fullFrame + yFrameSize + uvFrameSize; |
|
89 char* yFrame = frame + yFrameSize; |
|
90 for (uint32_t i = 0; i < uvFrameSize; i++) { |
|
91 uFrame[i] = yFrame[2 * i + 1]; |
|
92 vFrame[i] = yFrame[2 * i]; |
|
93 } |
|
94 |
|
95 { |
|
96 mozilla::ReentrantMonitorAutoEnter autoMonitor(mMonitor); |
|
97 mAvailable += mFrameSize; |
|
98 mFrameQueue->Push((void*)fullFrame); |
|
99 } |
|
100 |
|
101 NotifyListeners(); |
|
102 } |
|
103 |
|
104 NS_IMETHODIMP |
|
105 AndroidCameraInputStream::Available(uint64_t *aAvailable) |
|
106 { |
|
107 mozilla::ReentrantMonitorAutoEnter autoMonitor(mMonitor); |
|
108 |
|
109 *aAvailable = mAvailable; |
|
110 |
|
111 return NS_OK; |
|
112 } |
|
113 |
|
114 NS_IMETHODIMP AndroidCameraInputStream::IsNonBlocking(bool *aNonBlock) { |
|
115 *aNonBlock = true; |
|
116 return NS_OK; |
|
117 } |
|
118 |
|
119 NS_IMETHODIMP AndroidCameraInputStream::Read(char *aBuffer, uint32_t aCount, uint32_t *aRead) { |
|
120 return ReadSegments(NS_CopySegmentToBuffer, aBuffer, aCount, aRead); |
|
121 } |
|
122 |
|
123 NS_IMETHODIMP AndroidCameraInputStream::ReadSegments(nsWriteSegmentFun aWriter, void *aClosure, uint32_t aCount, uint32_t *aRead) { |
|
124 *aRead = 0; |
|
125 |
|
126 nsresult rv; |
|
127 |
|
128 if (mAvailable == 0) |
|
129 return NS_BASE_STREAM_WOULD_BLOCK; |
|
130 |
|
131 if (aCount > mAvailable) |
|
132 aCount = mAvailable; |
|
133 |
|
134 if (!mHeaderSent) { |
|
135 CameraStreamImpl *impl = CameraStreamImpl::GetInstance(0); |
|
136 RawVideoHeader header; |
|
137 header.headerPacketID = 0; |
|
138 header.codecID = 0x595556; // "YUV" |
|
139 header.majorVersion = 0; |
|
140 header.minorVersion = 1; |
|
141 header.options = 1 | 1 << 1; // color, 4:2:2 |
|
142 |
|
143 header.alphaChannelBpp = 0; |
|
144 header.lumaChannelBpp = 8; |
|
145 header.chromaChannelBpp = 4; |
|
146 header.colorspace = 1; |
|
147 |
|
148 header.frameWidth = mWidth; |
|
149 header.frameHeight = mHeight; |
|
150 header.aspectNumerator = 1; |
|
151 header.aspectDenominator = 1; |
|
152 |
|
153 header.framerateNumerator = impl->GetFps(); |
|
154 header.framerateDenominator = 1; |
|
155 |
|
156 rv = aWriter(this, aClosure, (const char*)&header, 0, sizeof(RawVideoHeader), aRead); |
|
157 |
|
158 if (NS_FAILED(rv)) |
|
159 return NS_OK; |
|
160 |
|
161 mHeaderSent = true; |
|
162 aCount -= sizeof(RawVideoHeader); |
|
163 mAvailable -= sizeof(RawVideoHeader); |
|
164 } |
|
165 |
|
166 { |
|
167 mozilla::ReentrantMonitorAutoEnter autoMonitor(mMonitor); |
|
168 while ((mAvailable > 0) && (aCount >= mFrameSize)) { |
|
169 uint32_t readThisTime = 0; |
|
170 |
|
171 char* frame = (char*)mFrameQueue->PopFront(); |
|
172 rv = aWriter(this, aClosure, (const char*)frame, *aRead, mFrameSize, &readThisTime); |
|
173 |
|
174 if (readThisTime != mFrameSize) { |
|
175 mFrameQueue->PushFront((void*)frame); |
|
176 return NS_OK; |
|
177 } |
|
178 |
|
179 // RawReader does a copy when calling VideoData::Create() |
|
180 nsMemory::Free(frame); |
|
181 |
|
182 if (NS_FAILED(rv)) |
|
183 return NS_OK; |
|
184 |
|
185 aCount -= readThisTime; |
|
186 mAvailable -= readThisTime; |
|
187 *aRead += readThisTime; |
|
188 } |
|
189 } |
|
190 return NS_OK; |
|
191 } |
|
192 |
|
193 NS_IMETHODIMP AndroidCameraInputStream::Close() { |
|
194 return CloseWithStatus(NS_OK); |
|
195 } |
|
196 |
|
197 |
|
198 /** |
|
199 * must be called on the main (java) thread |
|
200 */ |
|
201 void AndroidCameraInputStream::doClose() { |
|
202 NS_ASSERTION(!mClosed, "Camera is already closed"); |
|
203 |
|
204 CameraStreamImpl *impl = CameraStreamImpl::GetInstance(0); |
|
205 impl->Close(); |
|
206 mClosed = true; |
|
207 } |
|
208 |
|
209 |
|
210 void AndroidCameraInputStream::NotifyListeners() { |
|
211 mozilla::ReentrantMonitorAutoEnter autoMonitor(mMonitor); |
|
212 |
|
213 if (mCallback && (mAvailable > sizeof(RawVideoHeader))) { |
|
214 nsCOMPtr<nsIInputStreamCallback> callback; |
|
215 if (mCallbackTarget) { |
|
216 callback = NS_NewInputStreamReadyEvent(mCallback, mCallbackTarget); |
|
217 } else { |
|
218 callback = mCallback; |
|
219 } |
|
220 |
|
221 NS_ASSERTION(callback, "Shouldn't fail to make the callback!"); |
|
222 |
|
223 // Null the callback first because OnInputStreamReady may reenter AsyncWait |
|
224 mCallback = nullptr; |
|
225 mCallbackTarget = nullptr; |
|
226 |
|
227 callback->OnInputStreamReady(this); |
|
228 } |
|
229 } |
|
230 |
|
231 NS_IMETHODIMP AndroidCameraInputStream::AsyncWait(nsIInputStreamCallback *aCallback, uint32_t aFlags, uint32_t aRequestedCount, nsIEventTarget *aTarget) |
|
232 { |
|
233 if (aFlags != 0) |
|
234 return NS_ERROR_NOT_IMPLEMENTED; |
|
235 |
|
236 if (mCallback || mCallbackTarget) |
|
237 return NS_ERROR_UNEXPECTED; |
|
238 |
|
239 mCallbackTarget = aTarget; |
|
240 mCallback = aCallback; |
|
241 |
|
242 // What we are being asked for may be present already |
|
243 NotifyListeners(); |
|
244 return NS_OK; |
|
245 } |
|
246 |
|
247 |
|
248 NS_IMETHODIMP AndroidCameraInputStream::CloseWithStatus(nsresult status) |
|
249 { |
|
250 AndroidCameraInputStream::doClose(); |
|
251 return NS_OK; |
|
252 } |
|
253 |
|
254 /** |
|
255 * AndroidCaptureProvider implementation |
|
256 */ |
|
257 |
|
258 NS_IMPL_ISUPPORTS0(AndroidCaptureProvider) |
|
259 |
|
260 AndroidCaptureProvider* AndroidCaptureProvider::sInstance = nullptr; |
|
261 |
|
262 AndroidCaptureProvider::AndroidCaptureProvider() { |
|
263 } |
|
264 |
|
265 AndroidCaptureProvider::~AndroidCaptureProvider() { |
|
266 AndroidCaptureProvider::sInstance = nullptr; |
|
267 } |
|
268 |
|
269 nsresult AndroidCaptureProvider::Init(nsACString& aContentType, |
|
270 nsCaptureParams* aParams, |
|
271 nsIInputStream** aStream) { |
|
272 |
|
273 NS_ENSURE_ARG_POINTER(aParams); |
|
274 |
|
275 NS_ASSERTION(aParams->frameLimit == 0 || aParams->timeLimit == 0, |
|
276 "Cannot set both a frame limit and a time limit!"); |
|
277 |
|
278 nsRefPtr<AndroidCameraInputStream> stream; |
|
279 |
|
280 if (aContentType.EqualsLiteral("video/x-raw-yuv")) { |
|
281 stream = new AndroidCameraInputStream(); |
|
282 if (stream) { |
|
283 nsresult rv = stream->Init(aContentType, aParams); |
|
284 if (NS_FAILED(rv)) |
|
285 return rv; |
|
286 } |
|
287 else |
|
288 return NS_ERROR_OUT_OF_MEMORY; |
|
289 } else { |
|
290 NS_NOTREACHED("Should not have asked Android for this type!"); |
|
291 } |
|
292 return CallQueryInterface(stream, aStream); |
|
293 } |
|
294 |
|
295 already_AddRefed<AndroidCaptureProvider> GetAndroidCaptureProvider() { |
|
296 if (!AndroidCaptureProvider::sInstance) { |
|
297 AndroidCaptureProvider::sInstance = new AndroidCaptureProvider(); |
|
298 } |
|
299 nsRefPtr<AndroidCaptureProvider> ret = AndroidCaptureProvider::sInstance; |
|
300 return ret.forget(); |
|
301 } |