Thu, 22 Jan 2015 13:21:57 +0100
Incorporate requested changes from Mozilla in review:
https://bugzilla.mozilla.org/show_bug.cgi?id=1123480#c6
1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
2 /* This Source Code Form is subject to the terms of the Mozilla Public
3 * License, v. 2.0. If a copy of the MPL was not distributed with this file,
4 * You can obtain one at http://mozilla.org/MPL/2.0/. */
6 #include "VP8TrackEncoder.h"
7 #include "vpx/vp8cx.h"
8 #include "vpx/vpx_encoder.h"
9 #include "VideoUtils.h"
10 #include "prsystem.h"
11 #include "WebMWriter.h"
12 #include "libyuv.h"
14 namespace mozilla {
16 #ifdef PR_LOGGING
17 PRLogModuleInfo* gVP8TrackEncoderLog;
18 #define VP8LOG(msg, ...) PR_LOG(gVP8TrackEncoderLog, PR_LOG_DEBUG, \
19 (msg, ##__VA_ARGS__))
20 // Debug logging macro with object pointer and class name.
21 #else
22 #define VP8LOG(msg, ...)
23 #endif
25 #define DEFAULT_BITRATE 2500 // in kbit/s
26 #define DEFAULT_ENCODE_FRAMERATE 30
28 using namespace mozilla::layers;
30 VP8TrackEncoder::VP8TrackEncoder()
31 : VideoTrackEncoder()
32 , mEncodedFrameDuration(0)
33 , mEncodedTimestamp(0)
34 , mRemainingTicks(0)
35 , mVPXContext(new vpx_codec_ctx_t())
36 , mVPXImageWrapper(new vpx_image_t())
37 {
38 MOZ_COUNT_CTOR(VP8TrackEncoder);
39 #ifdef PR_LOGGING
40 if (!gVP8TrackEncoderLog) {
41 gVP8TrackEncoderLog = PR_NewLogModule("VP8TrackEncoder");
42 }
43 #endif
44 }
46 VP8TrackEncoder::~VP8TrackEncoder()
47 {
48 if (mInitialized) {
49 vpx_codec_destroy(mVPXContext);
50 }
52 if (mVPXImageWrapper) {
53 vpx_img_free(mVPXImageWrapper);
54 }
55 MOZ_COUNT_DTOR(VP8TrackEncoder);
56 }
58 nsresult
59 VP8TrackEncoder::Init(int32_t aWidth, int32_t aHeight, int32_t aDisplayWidth,
60 int32_t aDisplayHeight,TrackRate aTrackRate)
61 {
62 if (aWidth < 1 || aHeight < 1 || aDisplayWidth < 1 || aDisplayHeight < 1
63 || aTrackRate <= 0) {
64 return NS_ERROR_FAILURE;
65 }
67 ReentrantMonitorAutoEnter mon(mReentrantMonitor);
69 mTrackRate = aTrackRate;
70 mEncodedFrameRate = DEFAULT_ENCODE_FRAMERATE;
71 mEncodedFrameDuration = mTrackRate / mEncodedFrameRate;
72 mFrameWidth = aWidth;
73 mFrameHeight = aHeight;
74 mDisplayWidth = aDisplayWidth;
75 mDisplayHeight = aDisplayHeight;
77 // Encoder configuration structure.
78 vpx_codec_enc_cfg_t config;
79 memset(&config, 0, sizeof(vpx_codec_enc_cfg_t));
80 if (vpx_codec_enc_config_default(vpx_codec_vp8_cx(), &config, 0)) {
81 return NS_ERROR_FAILURE;
82 }
84 // Creating a wrapper to the image - setting image data to NULL. Actual
85 // pointer will be set in encode. Setting align to 1, as it is meaningless
86 // (actual memory is not allocated).
87 vpx_img_wrap(mVPXImageWrapper, IMG_FMT_I420,
88 mFrameWidth, mFrameHeight, 1, nullptr);
90 config.g_w = mFrameWidth;
91 config.g_h = mFrameHeight;
92 // TODO: Maybe we should have various aFrameRate bitrate pair for each devices?
93 // or for different platform
94 config.rc_target_bitrate = DEFAULT_BITRATE; // in kbit/s
96 // Setting the time base of the codec
97 config.g_timebase.num = 1;
98 config.g_timebase.den = mTrackRate;
100 config.g_error_resilient = 0;
102 config.g_lag_in_frames = 0; // 0- no frame lagging
104 int32_t number_of_cores = PR_GetNumberOfProcessors();
105 if (mFrameWidth * mFrameHeight > 1280 * 960 && number_of_cores >= 6) {
106 config.g_threads = 3; // 3 threads for 1080p.
107 } else if (mFrameWidth * mFrameHeight > 640 * 480 && number_of_cores >= 3) {
108 config.g_threads = 2; // 2 threads for qHD/HD.
109 } else {
110 config.g_threads = 1; // 1 thread for VGA or less
111 }
113 // rate control settings
114 config.rc_dropframe_thresh = 0;
115 config.rc_end_usage = VPX_CBR;
116 config.g_pass = VPX_RC_ONE_PASS;
117 config.rc_resize_allowed = 1;
118 config.rc_undershoot_pct = 100;
119 config.rc_overshoot_pct = 15;
120 config.rc_buf_initial_sz = 500;
121 config.rc_buf_optimal_sz = 600;
122 config.rc_buf_sz = 1000;
124 config.kf_mode = VPX_KF_AUTO;
125 // Ensure that we can output one I-frame per second.
126 config.kf_max_dist = mEncodedFrameRate;
128 vpx_codec_flags_t flags = 0;
129 flags |= VPX_CODEC_USE_OUTPUT_PARTITION;
130 if (vpx_codec_enc_init(mVPXContext, vpx_codec_vp8_cx(), &config, flags)) {
131 return NS_ERROR_FAILURE;
132 }
134 vpx_codec_control(mVPXContext, VP8E_SET_STATIC_THRESHOLD, 1);
135 vpx_codec_control(mVPXContext, VP8E_SET_CPUUSED, -6);
136 vpx_codec_control(mVPXContext, VP8E_SET_TOKEN_PARTITIONS,
137 VP8_ONE_TOKENPARTITION);
139 mInitialized = true;
140 mon.NotifyAll();
142 return NS_OK;
143 }
145 already_AddRefed<TrackMetadataBase>
146 VP8TrackEncoder::GetMetadata()
147 {
148 {
149 // Wait if mEncoder is not initialized.
150 ReentrantMonitorAutoEnter mon(mReentrantMonitor);
151 while (!mCanceled && !mInitialized) {
152 mon.Wait();
153 }
154 }
156 if (mCanceled || mEncodingComplete) {
157 return nullptr;
158 }
160 nsRefPtr<VP8Metadata> meta = new VP8Metadata();
161 meta->mWidth = mFrameWidth;
162 meta->mHeight = mFrameHeight;
163 meta->mDisplayWidth = mDisplayWidth;
164 meta->mDisplayHeight = mDisplayHeight;
165 meta->mEncodedFrameRate = mEncodedFrameRate;
167 return meta.forget();
168 }
170 nsresult
171 VP8TrackEncoder::GetEncodedPartitions(EncodedFrameContainer& aData)
172 {
173 vpx_codec_iter_t iter = nullptr;
174 EncodedFrame::FrameType frameType = EncodedFrame::VP8_P_FRAME;
175 nsTArray<uint8_t> frameData;
176 nsresult rv;
177 const vpx_codec_cx_pkt_t *pkt = nullptr;
178 while ((pkt = vpx_codec_get_cx_data(mVPXContext, &iter)) != nullptr) {
179 switch (pkt->kind) {
180 case VPX_CODEC_CX_FRAME_PKT: {
181 // Copy the encoded data from libvpx to frameData
182 frameData.AppendElements((uint8_t*)pkt->data.frame.buf,
183 pkt->data.frame.sz);
184 break;
185 }
186 default: {
187 break;
188 }
189 }
190 // End of frame
191 if ((pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT) == 0) {
192 if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) {
193 frameType = EncodedFrame::VP8_I_FRAME;
194 }
195 break;
196 }
197 }
199 if (!frameData.IsEmpty() &&
200 (pkt->data.frame.pts == mEncodedTimestamp)) {
201 // Copy the encoded data to aData.
202 EncodedFrame* videoData = new EncodedFrame();
203 videoData->SetFrameType(frameType);
204 // Convert the timestamp and duration to Usecs.
205 CheckedInt64 timestamp = FramesToUsecs(mEncodedTimestamp, mTrackRate);
206 if (timestamp.isValid()) {
207 videoData->SetTimeStamp(
208 (uint64_t)FramesToUsecs(mEncodedTimestamp, mTrackRate).value());
209 }
210 CheckedInt64 duration = FramesToUsecs(pkt->data.frame.duration, mTrackRate);
211 if (duration.isValid()) {
212 videoData->SetDuration(
213 (uint64_t)FramesToUsecs(pkt->data.frame.duration, mTrackRate).value());
214 }
215 rv = videoData->SwapInFrameData(frameData);
216 NS_ENSURE_SUCCESS(rv, rv);
217 VP8LOG("GetEncodedPartitions TimeStamp %lld Duration %lld\n",
218 videoData->GetTimeStamp(), videoData->GetDuration());
219 VP8LOG("frameType %d\n", videoData->GetFrameType());
220 aData.AppendEncodedFrame(videoData);
221 }
223 return NS_OK;
224 }
226 void VP8TrackEncoder::PrepareMutedFrame()
227 {
228 if (mMuteFrame.IsEmpty()) {
229 CreateMutedFrame(&mMuteFrame);
230 }
232 uint32_t yPlaneSize = mFrameWidth * mFrameHeight;
233 uint32_t halfWidth = (mFrameWidth + 1) / 2;
234 uint32_t halfHeight = (mFrameHeight + 1) / 2;
235 uint32_t uvPlaneSize = halfWidth * halfHeight;
237 MOZ_ASSERT(mMuteFrame.Length() >= (yPlaneSize + uvPlaneSize * 2));
238 uint8_t *y = mMuteFrame.Elements();
239 uint8_t *cb = mMuteFrame.Elements() + yPlaneSize;
240 uint8_t *cr = mMuteFrame.Elements() + yPlaneSize + uvPlaneSize;
242 mVPXImageWrapper->planes[PLANE_Y] = y;
243 mVPXImageWrapper->planes[PLANE_U] = cb;
244 mVPXImageWrapper->planes[PLANE_V] = cr;
245 mVPXImageWrapper->stride[VPX_PLANE_Y] = mFrameWidth;
246 mVPXImageWrapper->stride[VPX_PLANE_U] = halfWidth;
247 mVPXImageWrapper->stride[VPX_PLANE_V] = halfWidth;
248 }
250 static bool isYUV420(const PlanarYCbCrImage::Data *aData)
251 {
252 if (aData->mYSize == aData->mCbCrSize * 2) {
253 return true;
254 }
255 return false;
256 }
258 static bool isYUV422(const PlanarYCbCrImage::Data *aData)
259 {
260 if ((aData->mYSize.width == aData->mCbCrSize.width * 2) &&
261 (aData->mYSize.height == aData->mCbCrSize.height)) {
262 return true;
263 }
264 return false;
265 }
267 static bool isYUV444(const PlanarYCbCrImage::Data *aData)
268 {
269 if (aData->mYSize == aData->mCbCrSize) {
270 return true;
271 }
272 return false;
273 }
275 nsresult VP8TrackEncoder::PrepareRawFrame(VideoChunk &aChunk)
276 {
277 if (aChunk.mFrame.GetForceBlack() || aChunk.IsNull()) {
278 PrepareMutedFrame();
279 } else {
280 Image* img = aChunk.mFrame.GetImage();
281 ImageFormat format = img->GetFormat();
282 if (format != ImageFormat::PLANAR_YCBCR) {
283 VP8LOG("Unsupported video format\n");
284 return NS_ERROR_FAILURE;
285 }
287 // Cast away constness b/c some of the accessors are non-const
288 PlanarYCbCrImage* yuv =
289 const_cast<PlanarYCbCrImage *>(static_cast<const PlanarYCbCrImage *>(img));
290 // Big-time assumption here that this is all contiguous data coming
291 // from getUserMedia or other sources.
292 MOZ_ASSERT(yuv);
293 if (!yuv->IsValid()) {
294 NS_WARNING("PlanarYCbCrImage is not valid");
295 return NS_ERROR_FAILURE;
296 }
297 const PlanarYCbCrImage::Data *data = yuv->GetData();
299 if (isYUV420(data) && !data->mCbSkip) { // 420 planar
300 mVPXImageWrapper->planes[PLANE_Y] = data->mYChannel;
301 mVPXImageWrapper->planes[PLANE_U] = data->mCbChannel;
302 mVPXImageWrapper->planes[PLANE_V] = data->mCrChannel;
303 mVPXImageWrapper->stride[VPX_PLANE_Y] = data->mYStride;
304 mVPXImageWrapper->stride[VPX_PLANE_U] = data->mCbCrStride;
305 mVPXImageWrapper->stride[VPX_PLANE_V] = data->mCbCrStride;
306 } else {
307 uint32_t yPlaneSize = mFrameWidth * mFrameHeight;
308 uint32_t halfWidth = (mFrameWidth + 1) / 2;
309 uint32_t halfHeight = (mFrameHeight + 1) / 2;
310 uint32_t uvPlaneSize = halfWidth * halfHeight;
311 if (mI420Frame.IsEmpty()) {
312 mI420Frame.SetLength(yPlaneSize + uvPlaneSize * 2);
313 }
315 MOZ_ASSERT(mI420Frame.Length() >= (yPlaneSize + uvPlaneSize * 2));
316 uint8_t *y = mI420Frame.Elements();
317 uint8_t *cb = mI420Frame.Elements() + yPlaneSize;
318 uint8_t *cr = mI420Frame.Elements() + yPlaneSize + uvPlaneSize;
320 if (isYUV420(data) && data->mCbSkip) {
321 // If mCbSkip is set, we assume it's nv12 or nv21.
322 if (data->mCbChannel < data->mCrChannel) { // nv12
323 libyuv::NV12ToI420(data->mYChannel, data->mYStride,
324 data->mCbChannel, data->mCbCrStride,
325 y, mFrameWidth,
326 cb, halfWidth,
327 cr, halfWidth,
328 mFrameWidth, mFrameHeight);
329 } else { // nv21
330 libyuv::NV21ToI420(data->mYChannel, data->mYStride,
331 data->mCrChannel, data->mCbCrStride,
332 y, mFrameWidth,
333 cb, halfWidth,
334 cr, halfWidth,
335 mFrameWidth, mFrameHeight);
336 }
337 } else if (isYUV444(data) && !data->mCbSkip) {
338 libyuv::I444ToI420(data->mYChannel, data->mYStride,
339 data->mCbChannel, data->mCbCrStride,
340 data->mCrChannel, data->mCbCrStride,
341 y, mFrameWidth,
342 cb, halfWidth,
343 cr, halfWidth,
344 mFrameWidth, mFrameHeight);
345 } else if (isYUV422(data) && !data->mCbSkip) {
346 libyuv::I422ToI420(data->mYChannel, data->mYStride,
347 data->mCbChannel, data->mCbCrStride,
348 data->mCrChannel, data->mCbCrStride,
349 y, mFrameWidth,
350 cb, halfWidth,
351 cr, halfWidth,
352 mFrameWidth, mFrameHeight);
353 } else {
354 VP8LOG("Unsupported planar format\n");
355 return NS_ERROR_NOT_IMPLEMENTED;
356 }
358 mVPXImageWrapper->planes[PLANE_Y] = y;
359 mVPXImageWrapper->planes[PLANE_U] = cb;
360 mVPXImageWrapper->planes[PLANE_V] = cr;
361 mVPXImageWrapper->stride[VPX_PLANE_Y] = mFrameWidth;
362 mVPXImageWrapper->stride[VPX_PLANE_U] = halfWidth;
363 mVPXImageWrapper->stride[VPX_PLANE_V] = halfWidth;
364 }
365 }
366 return NS_OK;
367 }
369 // These two define value used in GetNextEncodeOperation to determine the
370 // EncodeOperation for next target frame.
371 #define I_FRAME_RATIO (0.5)
372 #define SKIP_FRAME_RATIO (0.75)
374 /**
375 * Compares the elapsed time from the beginning of GetEncodedTrack and
376 * the processed frame duration in mSourceSegment
377 * in order to set the nextEncodeOperation for next target frame.
378 */
379 VP8TrackEncoder::EncodeOperation
380 VP8TrackEncoder::GetNextEncodeOperation(TimeDuration aTimeElapsed,
381 TrackTicks aProcessedDuration)
382 {
383 int64_t durationInUsec =
384 FramesToUsecs(aProcessedDuration + mEncodedFrameDuration,
385 mTrackRate).value();
386 if (aTimeElapsed.ToMicroseconds() > (durationInUsec * SKIP_FRAME_RATIO)) {
387 // The encoder is too slow.
388 // We should skip next frame to consume the mSourceSegment.
389 return SKIP_FRAME;
390 } else if (aTimeElapsed.ToMicroseconds() > (durationInUsec * I_FRAME_RATIO)) {
391 // The encoder is a little slow.
392 // We force the encoder to encode an I-frame to accelerate.
393 return ENCODE_I_FRAME;
394 } else {
395 return ENCODE_NORMAL_FRAME;
396 }
397 }
399 TrackTicks
400 VP8TrackEncoder::CalculateRemainingTicks(TrackTicks aDurationCopied,
401 TrackTicks aEncodedDuration)
402 {
403 return mRemainingTicks + aEncodedDuration - aDurationCopied;
404 }
406 // Try to extend the encodedDuration as long as possible if the target frame
407 // has a long duration.
408 TrackTicks
409 VP8TrackEncoder::CalculateEncodedDuration(TrackTicks aDurationCopied)
410 {
411 TrackTicks temp64 = aDurationCopied;
412 TrackTicks encodedDuration = mEncodedFrameDuration;
413 temp64 -= mRemainingTicks;
414 while (temp64 > mEncodedFrameDuration) {
415 temp64 -= mEncodedFrameDuration;
416 encodedDuration += mEncodedFrameDuration;
417 }
418 return encodedDuration;
419 }
421 /**
422 * Encoding flow in GetEncodedTrack():
423 * 1: Check the mInitialized state and the packet duration.
424 * 2: Move the data from mRawSegment to mSourceSegment.
425 * 3: Encode the video chunks in mSourceSegment in a for-loop.
426 * 3.1: Pick the video chunk by mRemainingTicks.
427 * 3.2: Calculate the encoding duration for the parameter of vpx_codec_encode().
428 * The encoding duration is a multiple of mEncodedFrameDuration.
429 * 3.3: Setup the video chunk to mVPXImageWrapper by PrepareRawFrame().
430 * 3.4: Send frame into vp8 encoder by vpx_codec_encode().
431 * 3.5: Get the output frame from encoder by calling GetEncodedPartitions().
432 * 3.6: Calculate the mRemainingTicks for next target frame.
433 * 3.7: Set the nextEncodeOperation for the next target frame.
434 * There is a heuristic: If the frame duration we have processed in
435 * mSourceSegment is 100ms, means that we can't spend more than 100ms to
436 * encode it.
437 * 4. Remove the encoded chunks in mSourceSegment after for-loop.
438 *
439 * Ex1: Input frame rate is 100 => input frame duration is 10ms for each.
440 * mEncodedFrameRate is 30 => output frame duration is 33ms.
441 * In this case, the frame duration in mSourceSegment will be:
442 * 1st : 0~10ms
443 * 2nd : 10~20ms
444 * 3rd : 20~30ms
445 * 4th : 30~40ms
446 * ...
447 * The VP8 encoder will take the 1st and 4th frames to encode. At beginning
448 * mRemainingTicks is 0 for 1st frame, then the mRemainingTicks is set
449 * to 23 to pick the 4th frame. (mEncodedFrameDuration - 1st frame duration)
450 *
451 * Ex2: Input frame rate is 25 => frame duration is 40ms for each.
452 * mEncodedFrameRate is 30 => output frame duration is 33ms.
453 * In this case, the frame duration in mSourceSegment will be:
454 * 1st : 0~40ms
455 * 2nd : 40~80ms
456 * 3rd : 80~120ms
457 * 4th : 120~160ms
458 * ...
459 * Because the input frame duration is 40ms larger than 33ms, so the first
460 * encoded frame duration will be 66ms by calling CalculateEncodedDuration.
461 * And the mRemainingTicks will be set to 26
462 * (CalculateRemainingTicks 0+66-40) in order to pick the next frame(2nd)
463 * in mSourceSegment.
464 */
465 nsresult
466 VP8TrackEncoder::GetEncodedTrack(EncodedFrameContainer& aData)
467 {
468 {
469 // Move all the samples from mRawSegment to mSourceSegment. We only hold
470 // the monitor in this block.
471 ReentrantMonitorAutoEnter mon(mReentrantMonitor);
472 // Wait if mEncoder is not initialized, or when not enough raw data, but is
473 // not the end of stream nor is being canceled.
474 while (!mCanceled && (!mInitialized ||
475 (mRawSegment.GetDuration() + mSourceSegment.GetDuration() <
476 mEncodedFrameDuration && !mEndOfStream))) {
477 mon.Wait();
478 }
479 if (mCanceled || mEncodingComplete) {
480 return NS_ERROR_FAILURE;
481 }
482 mSourceSegment.AppendFrom(&mRawSegment);
483 }
485 VideoSegment::ChunkIterator iter(mSourceSegment);
486 TrackTicks durationCopied = 0;
487 TrackTicks totalProcessedDuration = 0;
488 TimeStamp timebase = TimeStamp::Now();
489 EncodeOperation nextEncodeOperation = ENCODE_NORMAL_FRAME;
491 for (; !iter.IsEnded(); iter.Next()) {
492 VideoChunk &chunk = *iter;
493 // Accumulate chunk's duration to durationCopied until it reaches
494 // mRemainingTicks.
495 durationCopied += chunk.GetDuration();
496 MOZ_ASSERT(mRemainingTicks <= mEncodedFrameDuration);
497 VP8LOG("durationCopied %lld mRemainingTicks %lld\n",
498 durationCopied, mRemainingTicks);
499 if (durationCopied >= mRemainingTicks) {
500 VP8LOG("nextEncodeOperation is %d\n",nextEncodeOperation);
501 // Calculate encodedDuration for this target frame.
502 TrackTicks encodedDuration = CalculateEncodedDuration(durationCopied);
504 // Encode frame.
505 if (nextEncodeOperation != SKIP_FRAME) {
506 nsresult rv = PrepareRawFrame(chunk);
507 NS_ENSURE_SUCCESS(rv, NS_ERROR_FAILURE);
509 // Encode the data with VP8 encoder
510 int flags = (nextEncodeOperation == ENCODE_NORMAL_FRAME) ?
511 0 : VPX_EFLAG_FORCE_KF;
512 if (vpx_codec_encode(mVPXContext, mVPXImageWrapper, mEncodedTimestamp,
513 (unsigned long)encodedDuration, flags,
514 VPX_DL_REALTIME)) {
515 return NS_ERROR_FAILURE;
516 }
517 // Get the encoded data from VP8 encoder.
518 GetEncodedPartitions(aData);
519 } else {
520 // SKIP_FRAME
521 // Extend the duration of the last encoded data in aData
522 // because this frame will be skip.
523 nsRefPtr<EncodedFrame> last = nullptr;
524 last = aData.GetEncodedFrames().LastElement();
525 if (last) {
526 last->SetDuration(last->GetDuration() + encodedDuration);
527 }
528 }
529 // Move forward the mEncodedTimestamp.
530 mEncodedTimestamp += encodedDuration;
531 totalProcessedDuration += durationCopied;
532 // Calculate mRemainingTicks for next target frame.
533 mRemainingTicks = CalculateRemainingTicks(durationCopied,
534 encodedDuration);
536 // Check the remain data is enough for next target frame.
537 if (mSourceSegment.GetDuration() - totalProcessedDuration
538 >= mEncodedFrameDuration) {
539 TimeDuration elapsedTime = TimeStamp::Now() - timebase;
540 nextEncodeOperation = GetNextEncodeOperation(elapsedTime,
541 totalProcessedDuration);
542 // Reset durationCopied for next iteration.
543 durationCopied = 0;
544 } else {
545 // Process done, there is no enough data left for next iteration,
546 // break the for-loop.
547 break;
548 }
549 }
550 }
551 // Remove the chunks we have processed.
552 mSourceSegment.RemoveLeading(totalProcessedDuration);
553 VP8LOG("RemoveLeading %lld\n",totalProcessedDuration);
555 // End of stream, pull the rest frames in encoder.
556 if (mEndOfStream) {
557 VP8LOG("mEndOfStream is true\n");
558 mEncodingComplete = true;
559 if (vpx_codec_encode(mVPXContext, nullptr, mEncodedTimestamp,
560 mEncodedFrameDuration, 0, VPX_DL_REALTIME)) {
561 return NS_ERROR_FAILURE;
562 }
563 GetEncodedPartitions(aData);
564 }
566 return NS_OK ;
567 }
569 } // namespace mozilla