content/media/encoder/TrackEncoder.cpp

Wed, 31 Dec 2014 06:09:35 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Wed, 31 Dec 2014 06:09:35 +0100
changeset 0
6474c204b198
permissions
-rw-r--r--

Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.

     1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
     2 /* This Source Code Form is subject to the terms of the Mozilla Public
     3  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
     4  * You can obtain one at http://mozilla.org/MPL/2.0/. */
     5 #include "TrackEncoder.h"
     6 #include "AudioChannelFormat.h"
     7 #include "MediaStreamGraph.h"
     8 #include "prlog.h"
     9 #include "VideoUtils.h"
    11 #undef LOG
    12 #ifdef MOZ_WIDGET_GONK
    13 #include <android/log.h>
    14 #define LOG(args...) __android_log_print(ANDROID_LOG_INFO, "MediaEncoder", ## args);
    15 #else
    16 #define LOG(args, ...)
    17 #endif
    19 namespace mozilla {
    21 #ifdef PR_LOGGING
    22 PRLogModuleInfo* gTrackEncoderLog;
    23 #define TRACK_LOG(type, msg) PR_LOG(gTrackEncoderLog, type, msg)
    24 #else
    25 #define TRACK_LOG(type, msg)
    26 #endif
    28 static const int DEFAULT_CHANNELS = 1;
    29 static const int DEFAULT_SAMPLING_RATE = 16000;
    30 static const int DEFAULT_FRAME_WIDTH = 640;
    31 static const int DEFAULT_FRAME_HEIGHT = 480;
    32 static const int DEFAULT_TRACK_RATE = USECS_PER_S;
    34 TrackEncoder::TrackEncoder()
    35   : mReentrantMonitor("media.TrackEncoder")
    36   , mEncodingComplete(false)
    37   , mEosSetInEncoder(false)
    38   , mInitialized(false)
    39   , mEndOfStream(false)
    40   , mCanceled(false)
    41 #ifdef PR_LOGGING
    42   , mAudioInitCounter(0)
    43   , mVideoInitCounter(0)
    44 #endif
    45 {
    46 #ifdef PR_LOGGING
    47   if (!gTrackEncoderLog) {
    48     gTrackEncoderLog = PR_NewLogModule("TrackEncoder");
    49   }
    50 #endif
    51 }
    53 void
    54 AudioTrackEncoder::NotifyQueuedTrackChanges(MediaStreamGraph* aGraph,
    55                                             TrackID aID,
    56                                             TrackRate aTrackRate,
    57                                             TrackTicks aTrackOffset,
    58                                             uint32_t aTrackEvents,
    59                                             const MediaSegment& aQueuedMedia)
    60 {
    61   if (mCanceled) {
    62     return;
    63   }
    65   const AudioSegment& audio = static_cast<const AudioSegment&>(aQueuedMedia);
    67   // Check and initialize parameters for codec encoder.
    68   if (!mInitialized) {
    69 #ifdef PR_LOGGING
    70     mAudioInitCounter++;
    71     TRACK_LOG(PR_LOG_DEBUG, ("Init the audio encoder %d times", mAudioInitCounter));
    72 #endif
    73     AudioSegment::ChunkIterator iter(const_cast<AudioSegment&>(audio));
    74     while (!iter.IsEnded()) {
    75       AudioChunk chunk = *iter;
    77       // The number of channels is determined by the first non-null chunk, and
    78       // thus the audio encoder is initialized at this time.
    79       if (!chunk.IsNull()) {
    80         nsresult rv = Init(chunk.mChannelData.Length(), aTrackRate);
    81         if (NS_FAILED(rv)) {
    82           LOG("[AudioTrackEncoder]: Fail to initialize the encoder!");
    83           NotifyCancel();
    84         }
    85         break;
    86       }
    88       iter.Next();
    89     }
    90   }
    92   // Append and consume this raw segment.
    93   AppendAudioSegment(audio);
    96   // The stream has stopped and reached the end of track.
    97   if (aTrackEvents == MediaStreamListener::TRACK_EVENT_ENDED) {
    98     LOG("[AudioTrackEncoder]: Receive TRACK_EVENT_ENDED .");
    99     NotifyEndOfStream();
   100   }
   101 }
   103 void
   104 AudioTrackEncoder::NotifyEndOfStream()
   105 {
   106   // If source audio track is completely silent till the end of encoding,
   107   // initialize the encoder with default channel counts and sampling rate.
   108   if (!mCanceled && !mInitialized) {
   109     Init(DEFAULT_CHANNELS, DEFAULT_SAMPLING_RATE);
   110   }
   112   ReentrantMonitorAutoEnter mon(mReentrantMonitor);
   113   mEndOfStream = true;
   114   mReentrantMonitor.NotifyAll();
   115 }
   117 nsresult
   118 AudioTrackEncoder::AppendAudioSegment(const AudioSegment& aSegment)
   119 {
   120   ReentrantMonitorAutoEnter mon(mReentrantMonitor);
   122   AudioSegment::ChunkIterator iter(const_cast<AudioSegment&>(aSegment));
   123   while (!iter.IsEnded()) {
   124     AudioChunk chunk = *iter;
   125     // Append and consume both non-null and null chunks.
   126     mRawSegment.AppendAndConsumeChunk(&chunk);
   127     iter.Next();
   128   }
   130   if (mRawSegment.GetDuration() >= GetPacketDuration()) {
   131     mReentrantMonitor.NotifyAll();
   132   }
   134   return NS_OK;
   135 }
   137 static const int AUDIO_PROCESSING_FRAMES = 640; /* > 10ms of 48KHz audio */
   138 static const uint8_t gZeroChannel[MAX_AUDIO_SAMPLE_SIZE*AUDIO_PROCESSING_FRAMES] = {0};
   140 /*static*/
   141 void
   142 AudioTrackEncoder::InterleaveTrackData(AudioChunk& aChunk,
   143                                        int32_t aDuration,
   144                                        uint32_t aOutputChannels,
   145                                        AudioDataValue* aOutput)
   146 {
   147   if (aChunk.mChannelData.Length() < aOutputChannels) {
   148     // Up-mix. This might make the mChannelData have more than aChannels.
   149     AudioChannelsUpMix(&aChunk.mChannelData, aOutputChannels, gZeroChannel);
   150   }
   152   if (aChunk.mChannelData.Length() > aOutputChannels) {
   153     DownmixAndInterleave(aChunk.mChannelData, aChunk.mBufferFormat, aDuration,
   154                          aChunk.mVolume, aOutputChannels, aOutput);
   155   } else {
   156     InterleaveAndConvertBuffer(aChunk.mChannelData.Elements(),
   157                                aChunk.mBufferFormat, aDuration, aChunk.mVolume,
   158                                aOutputChannels, aOutput);
   159   }
   160 }
   162 /*static*/
   163 void
   164 AudioTrackEncoder::DeInterleaveTrackData(AudioDataValue* aInput,
   165                                          int32_t aDuration,
   166                                          int32_t aChannels,
   167                                          AudioDataValue* aOutput)
   168 {
   169   for (int32_t i = 0; i < aChannels; ++i) {
   170     for(int32_t j = 0; j < aDuration; ++j) {
   171       aOutput[i * aDuration + j] = aInput[i + j * aChannels];
   172     }
   173   }
   174 }
   176 void
   177 VideoTrackEncoder::NotifyQueuedTrackChanges(MediaStreamGraph* aGraph,
   178                                             TrackID aID,
   179                                             TrackRate aTrackRate,
   180                                             TrackTicks aTrackOffset,
   181                                             uint32_t aTrackEvents,
   182                                             const MediaSegment& aQueuedMedia)
   183 {
   184   if (mCanceled) {
   185     return;
   186   }
   188   const VideoSegment& video = static_cast<const VideoSegment&>(aQueuedMedia);
   190    // Check and initialize parameters for codec encoder.
   191   if (!mInitialized) {
   192 #ifdef PR_LOGGING
   193     mVideoInitCounter++;
   194     TRACK_LOG(PR_LOG_DEBUG, ("Init the video encoder %d times", mVideoInitCounter));
   195 #endif
   196     VideoSegment::ChunkIterator iter(const_cast<VideoSegment&>(video));
   197     while (!iter.IsEnded()) {
   198       VideoChunk chunk = *iter;
   199       if (!chunk.IsNull()) {
   200         gfx::IntSize imgsize = chunk.mFrame.GetImage()->GetSize();
   201         gfxIntSize intrinsicSize = chunk.mFrame.GetIntrinsicSize();
   202         nsresult rv = Init(imgsize.width, imgsize.height,
   203                            intrinsicSize.width, intrinsicSize.height,
   204                            aTrackRate);
   205         if (NS_FAILED(rv)) {
   206           LOG("[VideoTrackEncoder]: Fail to initialize the encoder!");
   207           NotifyCancel();
   208         }
   209         break;
   210       }
   212       iter.Next();
   213     }
   214   }
   216   AppendVideoSegment(video);
   218   // The stream has stopped and reached the end of track.
   219   if (aTrackEvents == MediaStreamListener::TRACK_EVENT_ENDED) {
   220     LOG("[VideoTrackEncoder]: Receive TRACK_EVENT_ENDED .");
   221     NotifyEndOfStream();
   222   }
   224 }
   226 nsresult
   227 VideoTrackEncoder::AppendVideoSegment(const VideoSegment& aSegment)
   228 {
   229   ReentrantMonitorAutoEnter mon(mReentrantMonitor);
   231   // Append all video segments from MediaStreamGraph, including null an
   232   // non-null frames.
   233   VideoSegment::ChunkIterator iter(const_cast<VideoSegment&>(aSegment));
   234   while (!iter.IsEnded()) {
   235     VideoChunk chunk = *iter;
   236     nsRefPtr<layers::Image> image = chunk.mFrame.GetImage();
   237     mRawSegment.AppendFrame(image.forget(), chunk.GetDuration(),
   238                             chunk.mFrame.GetIntrinsicSize().ToIntSize());
   239     iter.Next();
   240   }
   242   if (mRawSegment.GetDuration() > 0) {
   243     mReentrantMonitor.NotifyAll();
   244   }
   246   return NS_OK;
   247 }
   249 void
   250 VideoTrackEncoder::NotifyEndOfStream()
   251 {
   252   // If source video track is muted till the end of encoding, initialize the
   253   // encoder with default frame width, frame height, and track rate.
   254   if (!mCanceled && !mInitialized) {
   255     Init(DEFAULT_FRAME_WIDTH, DEFAULT_FRAME_HEIGHT,
   256          DEFAULT_FRAME_WIDTH, DEFAULT_FRAME_HEIGHT, DEFAULT_TRACK_RATE);
   257   }
   259   ReentrantMonitorAutoEnter mon(mReentrantMonitor);
   260   mEndOfStream = true;
   261   mReentrantMonitor.NotifyAll();
   262 }
   264 void
   265 VideoTrackEncoder::CreateMutedFrame(nsTArray<uint8_t>* aOutputBuffer)
   266 {
   267   NS_ENSURE_TRUE_VOID(aOutputBuffer);
   269   // Supports YUV420 image format only.
   270   int yPlaneLen = mFrameWidth * mFrameHeight;
   271   int cbcrPlaneLen = yPlaneLen / 2;
   272   int frameLen = yPlaneLen + cbcrPlaneLen;
   274   aOutputBuffer->SetLength(frameLen);
   275   // Fill Y plane.
   276   memset(aOutputBuffer->Elements(), 0x10, yPlaneLen);
   277   // Fill Cb/Cr planes.
   278   memset(aOutputBuffer->Elements() + yPlaneLen, 0x80, cbcrPlaneLen);
   279 }
   281 }

mercurial