media/webrtc/signaling/src/media-conduit/VideoConduit.cpp

Thu, 15 Jan 2015 15:59:08 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Thu, 15 Jan 2015 15:59:08 +0100
branch
TOR_BUG_9701
changeset 10
ac0c01689b40
permissions
-rw-r--r--

Implement a real Private Browsing Mode condition by changing the API/ABI;
This solves Tor bug #9701, complying with disk avoidance documented in
https://www.torproject.org/projects/torbrowser/design/#disk-avoidance.

     1 /* This Source Code Form is subject to the terms of the Mozilla Public
     2  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
     3  * You can obtain one at http://mozilla.org/MPL/2.0/. */
     5 #include "CSFLog.h"
     6 #include "nspr.h"
     8 // For rtcp-fb constants
     9 #include "ccsdp.h"
    11 #include "VideoConduit.h"
    12 #include "AudioConduit.h"
    13 #include "nsThreadUtils.h"
    14 #include "LoadManager.h"
    15 #include "YuvStamper.h"
    16 #include "nsServiceManagerUtils.h"
    17 #include "nsIPrefService.h"
    18 #include "nsIPrefBranch.h"
    20 #include "webrtc/common_video/interface/native_handle.h"
    21 #include "webrtc/video_engine/include/vie_errors.h"
    23 #ifdef MOZ_WIDGET_ANDROID
    24 #include "AndroidJNIWrapper.h"
    25 #endif
    27 #include <algorithm>
    28 #include <math.h>
    30 namespace mozilla {
    32 static const char* logTag ="WebrtcVideoSessionConduit";
    34 // 32 bytes is what WebRTC CodecInst expects
    35 const unsigned int WebrtcVideoConduit::CODEC_PLNAME_SIZE = 32;
    37 /**
    38  * Factory Method for VideoConduit
    39  */
    40 mozilla::RefPtr<VideoSessionConduit> VideoSessionConduit::Create(VideoSessionConduit *aOther)
    41 {
    42 #ifdef MOZILLA_INTERNAL_API
    43   // unit tests create their own "main thread"
    44   NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
    45 #endif
    46   CSFLogDebug(logTag,  "%s ", __FUNCTION__);
    48   WebrtcVideoConduit* obj = new WebrtcVideoConduit();
    49   if(obj->Init(static_cast<WebrtcVideoConduit*>(aOther)) != kMediaConduitNoError)
    50   {
    51     CSFLogError(logTag,  "%s VideoConduit Init Failed ", __FUNCTION__);
    52     delete obj;
    53     return nullptr;
    54   }
    55   CSFLogDebug(logTag,  "%s Successfully created VideoConduit ", __FUNCTION__);
    56   return obj;
    57 }
    59 WebrtcVideoConduit::~WebrtcVideoConduit()
    60 {
    61 #ifdef MOZILLA_INTERNAL_API
    62   // unit tests create their own "main thread"
    63   NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
    64 #endif
    65   CSFLogDebug(logTag,  "%s ", __FUNCTION__);
    67   for(std::vector<VideoCodecConfig*>::size_type i=0;i < mRecvCodecList.size();i++)
    68   {
    69     delete mRecvCodecList[i];
    70   }
    72   delete mCurSendCodecConfig;
    74   // The first one of a pair to be deleted shuts down media for both
    75   //Deal with External Capturer
    76   if(mPtrViECapture)
    77   {
    78     if (!mShutDown) {
    79       mPtrViECapture->DisconnectCaptureDevice(mCapId);
    80       mPtrViECapture->ReleaseCaptureDevice(mCapId);
    81       mPtrExtCapture = nullptr;
    82       if (mOtherDirection)
    83         mOtherDirection->mPtrExtCapture = nullptr;
    84     }
    85   }
    87   //Deal with External Renderer
    88   if(mPtrViERender)
    89   {
    90     if (!mShutDown) {
    91       if(mRenderer) {
    92         mPtrViERender->StopRender(mChannel);
    93       }
    94       mPtrViERender->RemoveRenderer(mChannel);
    95     }
    96   }
    98   //Deal with the transport
    99   if(mPtrViENetwork)
   100   {
   101     if (!mShutDown) {
   102       mPtrViENetwork->DeregisterSendTransport(mChannel);
   103     }
   104   }
   106   if(mPtrViEBase)
   107   {
   108     if (!mShutDown) {
   109       mPtrViEBase->StopSend(mChannel);
   110       mPtrViEBase->StopReceive(mChannel);
   111       SyncTo(nullptr);
   112       mPtrViEBase->DeleteChannel(mChannel);
   113     }
   114   }
   116   if (mOtherDirection)
   117   {
   118     // mOtherDirection owns these now!
   119     mOtherDirection->mOtherDirection = nullptr;
   120     // let other side we terminated the channel
   121     mOtherDirection->mShutDown = true;
   122     mVideoEngine = nullptr;
   123   } else {
   124     // We can't delete the VideoEngine until all these are released!
   125     // And we can't use a Scoped ptr, since the order is arbitrary
   126     mPtrViEBase = nullptr;
   127     mPtrViECapture = nullptr;
   128     mPtrViECodec = nullptr;
   129     mPtrViENetwork = nullptr;
   130     mPtrViERender = nullptr;
   131     mPtrRTP = nullptr;
   132     mPtrExtCodec = nullptr;
   134     // only one opener can call Delete.  Have it be the last to close.
   135     if(mVideoEngine)
   136     {
   137       webrtc::VideoEngine::Delete(mVideoEngine);
   138     }
   139   }
   140 }
   142 bool WebrtcVideoConduit::GetLocalSSRC(unsigned int* ssrc) {
   143   return !mPtrRTP->GetLocalSSRC(mChannel, *ssrc);
   144 }
   146 bool WebrtcVideoConduit::GetRemoteSSRC(unsigned int* ssrc) {
   147   return !mPtrRTP->GetRemoteSSRC(mChannel, *ssrc);
   148 }
   150 bool WebrtcVideoConduit::GetAVStats(int32_t* jitterBufferDelayMs,
   151                                     int32_t* playoutBufferDelayMs,
   152                                     int32_t* avSyncOffsetMs) {
   153   return false;
   154 }
   156 bool WebrtcVideoConduit::GetRTPStats(unsigned int* jitterMs,
   157                                      unsigned int* cumulativeLost) {
   158   unsigned short fractionLost;
   159   unsigned extendedMax;
   160   int rttMs;
   161   // GetReceivedRTCPStatistics is a poorly named GetRTPStatistics variant
   162   return !mPtrRTP->GetReceivedRTCPStatistics(mChannel, fractionLost,
   163                                              *cumulativeLost,
   164                                              extendedMax,
   165                                              *jitterMs,
   166                                              rttMs);
   167 }
   169 bool WebrtcVideoConduit::GetRTCPReceiverReport(DOMHighResTimeStamp* timestamp,
   170                                                uint32_t* jitterMs,
   171                                                uint32_t* packetsReceived,
   172                                                uint64_t* bytesReceived,
   173                                                uint32_t* cumulativeLost,
   174                                                int32_t* rttMs) {
   175   uint32_t ntpHigh, ntpLow;
   176   uint16_t fractionLost;
   177   bool result = !mPtrRTP->GetRemoteRTCPReceiverInfo(mChannel, ntpHigh, ntpLow,
   178                                                     *packetsReceived,
   179                                                     *bytesReceived,
   180                                                     jitterMs,
   181                                                     &fractionLost,
   182                                                     cumulativeLost,
   183                                                     rttMs);
   184   if (result) {
   185     *timestamp = NTPtoDOMHighResTimeStamp(ntpHigh, ntpLow);
   186   }
   187   return result;
   188 }
   190 bool WebrtcVideoConduit::GetRTCPSenderReport(DOMHighResTimeStamp* timestamp,
   191                                              unsigned int* packetsSent,
   192                                              uint64_t* bytesSent) {
   193   struct webrtc::SenderInfo senderInfo;
   194   bool result = !mPtrRTP->GetRemoteRTCPSenderInfo(mChannel, &senderInfo);
   195   if (result) {
   196     *timestamp = NTPtoDOMHighResTimeStamp(senderInfo.NTP_timestamp_high,
   197                                           senderInfo.NTP_timestamp_low);
   198     *packetsSent = senderInfo.sender_packet_count;
   199     *bytesSent = senderInfo.sender_octet_count;
   200   }
   201   return result;
   202 }
   204 /**
   205  * Peforms intialization of the MANDATORY components of the Video Engine
   206  */
   207 MediaConduitErrorCode WebrtcVideoConduit::Init(WebrtcVideoConduit *other)
   208 {
   209   CSFLogDebug(logTag,  "%s this=%p other=%p", __FUNCTION__, this, other);
   211   if (other) {
   212     MOZ_ASSERT(!other->mOtherDirection);
   213     other->mOtherDirection = this;
   214     mOtherDirection = other;
   216     // only one can call ::Create()/GetVideoEngine()
   217     MOZ_ASSERT(other->mVideoEngine);
   218     mVideoEngine = other->mVideoEngine;
   219   } else {
   221 #ifdef MOZ_WIDGET_ANDROID
   222     jobject context = jsjni_GetGlobalContextRef();
   224     // get the JVM
   225     JavaVM *jvm = jsjni_GetVM();
   227     if (webrtc::VideoEngine::SetAndroidObjects(jvm, (void*)context) != 0) {
   228       CSFLogError(logTag,  "%s: could not set Android objects", __FUNCTION__);
   229       return kMediaConduitSessionNotInited;
   230     }
   231 #endif
   233     // Per WebRTC APIs below function calls return nullptr on failure
   234     if( !(mVideoEngine = webrtc::VideoEngine::Create()) )
   235     {
   236       CSFLogError(logTag, "%s Unable to create video engine ", __FUNCTION__);
   237       return kMediaConduitSessionNotInited;
   238     }
   240     PRLogModuleInfo *logs = GetWebRTCLogInfo();
   241     if (!gWebrtcTraceLoggingOn && logs && logs->level > 0) {
   242       // no need to a critical section or lock here
   243       gWebrtcTraceLoggingOn = 1;
   245       const char *file = PR_GetEnv("WEBRTC_TRACE_FILE");
   246       if (!file) {
   247         file = "WebRTC.log";
   248       }
   249       CSFLogDebug(logTag,  "%s Logging webrtc to %s level %d", __FUNCTION__,
   250                   file, logs->level);
   251       mVideoEngine->SetTraceFilter(logs->level);
   252       mVideoEngine->SetTraceFile(file);
   253     }
   254   }
   256   if( !(mPtrViEBase = ViEBase::GetInterface(mVideoEngine)))
   257   {
   258     CSFLogError(logTag, "%s Unable to get video base interface ", __FUNCTION__);
   259     return kMediaConduitSessionNotInited;
   260   }
   262   if( !(mPtrViECapture = ViECapture::GetInterface(mVideoEngine)))
   263   {
   264     CSFLogError(logTag, "%s Unable to get video capture interface", __FUNCTION__);
   265     return kMediaConduitSessionNotInited;
   266   }
   268   if( !(mPtrViECodec = ViECodec::GetInterface(mVideoEngine)))
   269   {
   270     CSFLogError(logTag, "%s Unable to get video codec interface ", __FUNCTION__);
   271     return kMediaConduitSessionNotInited;
   272   }
   274   if( !(mPtrViENetwork = ViENetwork::GetInterface(mVideoEngine)))
   275   {
   276     CSFLogError(logTag, "%s Unable to get video network interface ", __FUNCTION__);
   277     return kMediaConduitSessionNotInited;
   278   }
   280   if( !(mPtrViERender = ViERender::GetInterface(mVideoEngine)))
   281   {
   282     CSFLogError(logTag, "%s Unable to get video render interface ", __FUNCTION__);
   283     return kMediaConduitSessionNotInited;
   284   }
   286   if( !(mPtrRTP = webrtc::ViERTP_RTCP::GetInterface(mVideoEngine)))
   287   {
   288     CSFLogError(logTag, "%s Unable to get video RTCP interface ", __FUNCTION__);
   289     return kMediaConduitSessionNotInited;
   290   }
   292   if ( !(mPtrExtCodec = webrtc::ViEExternalCodec::GetInterface(mVideoEngine)))
   293   {
   294     CSFLogError(logTag, "%s Unable to get external codec interface %d ",
   295                 __FUNCTION__, mPtrViEBase->LastError());
   296     return kMediaConduitSessionNotInited;
   297   }
   299   if (other) {
   300     mChannel = other->mChannel;
   301     mPtrExtCapture = other->mPtrExtCapture;
   302     mCapId = other->mCapId;
   303   } else {
   304     CSFLogDebug(logTag, "%s Engine Created: Init'ng the interfaces ",__FUNCTION__);
   306     if(mPtrViEBase->Init() == -1)
   307     {
   308       CSFLogError(logTag, " %s Video Engine Init Failed %d ",__FUNCTION__,
   309                   mPtrViEBase->LastError());
   310       return kMediaConduitSessionNotInited;
   311     }
   313     if(mPtrViEBase->CreateChannel(mChannel) == -1)
   314     {
   315       CSFLogError(logTag, " %s Channel creation Failed %d ",__FUNCTION__,
   316                   mPtrViEBase->LastError());
   317       return kMediaConduitChannelError;
   318     }
   320     if(mPtrViENetwork->RegisterSendTransport(mChannel, *this) == -1)
   321     {
   322       CSFLogError(logTag,  "%s ViENetwork Failed %d ", __FUNCTION__,
   323                   mPtrViEBase->LastError());
   324       return kMediaConduitTransportRegistrationFail;
   325     }
   327     if(mPtrViECapture->AllocateExternalCaptureDevice(mCapId,
   328                                                      mPtrExtCapture) == -1)
   329     {
   330       CSFLogError(logTag, "%s Unable to Allocate capture module: %d ",
   331                   __FUNCTION__, mPtrViEBase->LastError());
   332       return kMediaConduitCaptureError;
   333     }
   335     if(mPtrViECapture->ConnectCaptureDevice(mCapId,mChannel) == -1)
   336     {
   337       CSFLogError(logTag, "%s Unable to Connect capture module: %d ",
   338                   __FUNCTION__,mPtrViEBase->LastError());
   339       return kMediaConduitCaptureError;
   340     }
   342     if(mPtrViERender->AddRenderer(mChannel,
   343                                   webrtc::kVideoI420,
   344                                   (webrtc::ExternalRenderer*) this) == -1)
   345     {
   346       CSFLogError(logTag, "%s Failed to added external renderer ", __FUNCTION__);
   347       return kMediaConduitInvalidRenderer;
   348     }
   349     // Set up some parameters, per juberti. Set MTU.
   350     if(mPtrViENetwork->SetMTU(mChannel, 1200) != 0)
   351     {
   352       CSFLogError(logTag,  "%s MTU Failed %d ", __FUNCTION__,
   353                   mPtrViEBase->LastError());
   354       return kMediaConduitMTUError;
   355     }
   356     // Turn on RTCP and loss feedback reporting.
   357     if(mPtrRTP->SetRTCPStatus(mChannel, webrtc::kRtcpCompound_RFC4585) != 0)
   358     {
   359       CSFLogError(logTag,  "%s RTCPStatus Failed %d ", __FUNCTION__,
   360                   mPtrViEBase->LastError());
   361       return kMediaConduitRTCPStatusError;
   362     }
   363   }
   365   CSFLogError(logTag, "%s Initialization Done", __FUNCTION__);
   366   return kMediaConduitNoError;
   367 }
   369 void
   370 WebrtcVideoConduit::SyncTo(WebrtcAudioConduit *aConduit)
   371 {
   372   CSFLogDebug(logTag, "%s Synced to %p", __FUNCTION__, aConduit);
   374   // SyncTo(value) syncs to the AudioConduit, and if already synced replaces
   375   // the current sync target.  SyncTo(nullptr) cancels any existing sync and
   376   // releases the strong ref to AudioConduit.
   377   if (aConduit) {
   378     mPtrViEBase->SetVoiceEngine(aConduit->GetVoiceEngine());
   379     mPtrViEBase->ConnectAudioChannel(mChannel, aConduit->GetChannel());
   380     // NOTE: this means the VideoConduit will keep the AudioConduit alive!
   381   } else if ((mOtherDirection && mOtherDirection->mSyncedTo) || mSyncedTo) {
   382     mPtrViEBase->DisconnectAudioChannel(mChannel);
   383     mPtrViEBase->SetVoiceEngine(nullptr);
   384   }
   386   // Now manage the shared sync reference (ugly)
   387   if (mSyncedTo || !mOtherDirection ) {
   388     mSyncedTo = aConduit;
   389   } else {
   390     mOtherDirection->mSyncedTo = aConduit;
   391   }
   392 }
   394 MediaConduitErrorCode
   395 WebrtcVideoConduit::AttachRenderer(mozilla::RefPtr<VideoRenderer> aVideoRenderer)
   396 {
   397   CSFLogDebug(logTag,  "%s ", __FUNCTION__);
   399   //null renderer
   400   if(!aVideoRenderer)
   401   {
   402     CSFLogError(logTag, "%s NULL Renderer", __FUNCTION__);
   403     MOZ_ASSERT(PR_FALSE);
   404     return kMediaConduitInvalidRenderer;
   405   }
   407   //Start Rendering if we haven't already
   408   if(!mRenderer)
   409   {
   410     mRenderer = aVideoRenderer; // must be done before StartRender()
   412     if(mPtrViERender->StartRender(mChannel) == -1)
   413     {
   414       CSFLogError(logTag, "%s Starting the Renderer Failed %d ", __FUNCTION__,
   415                                                       mPtrViEBase->LastError());
   416       mRenderer = nullptr;
   417       return kMediaConduitRendererFail;
   418     }
   419   } else {
   420     //Assign the new renderer - overwrites if there is already one
   421     mRenderer = aVideoRenderer;
   422   }
   424   return kMediaConduitNoError;
   425 }
   427 void
   428 WebrtcVideoConduit::DetachRenderer()
   429 {
   430   if(mRenderer)
   431   {
   432     mPtrViERender->StopRender(mChannel);
   433     mRenderer = nullptr;
   434   }
   435 }
   437 MediaConduitErrorCode
   438 WebrtcVideoConduit::AttachTransport(mozilla::RefPtr<TransportInterface> aTransport)
   439 {
   440   CSFLogDebug(logTag,  "%s ", __FUNCTION__);
   441   if(!aTransport)
   442   {
   443     CSFLogError(logTag, "%s NULL Transport", __FUNCTION__);
   444     return kMediaConduitInvalidTransport;
   445   }
   446   // set the transport
   447   mTransport = aTransport;
   448   return kMediaConduitNoError;
   449 }
   451 /**
   452  * Note: Setting the send-codec on the Video Engine will restart the encoder,
   453  * sets up new SSRC and reset RTP_RTCP module with the new codec setting.
   454  */
   455 MediaConduitErrorCode
   456 WebrtcVideoConduit::ConfigureSendMediaCodec(const VideoCodecConfig* codecConfig)
   457 {
   458   CSFLogDebug(logTag,  "%s ", __FUNCTION__);
   459   bool codecFound = false;
   460   MediaConduitErrorCode condError = kMediaConduitNoError;
   461   int error = 0; //webrtc engine errors
   462   webrtc::VideoCodec  video_codec;
   463   std::string payloadName;
   465   //validate basic params
   466   if((condError = ValidateCodecConfig(codecConfig,true)) != kMediaConduitNoError)
   467   {
   468     return condError;
   469   }
   471   //Check if we have same codec already applied
   472   if(CheckCodecsForMatch(mCurSendCodecConfig, codecConfig))
   473   {
   474     CSFLogDebug(logTag,  "%s Codec has been applied already ", __FUNCTION__);
   475     return kMediaConduitCodecInUse;
   476   }
   478   //transmitting already ?
   479   if(mEngineTransmitting)
   480   {
   481     CSFLogDebug(logTag, "%s Engine Already Sending. Attemping to Stop ", __FUNCTION__);
   482     if(mPtrViEBase->StopSend(mChannel) == -1)
   483     {
   484       CSFLogError(logTag, "%s StopSend() Failed %d ",__FUNCTION__,
   485                   mPtrViEBase->LastError());
   486       return kMediaConduitUnknownError;
   487     }
   488   }
   490   mEngineTransmitting = false;
   492   if (codecConfig->mLoadManager) {
   493     mPtrViEBase->RegisterCpuOveruseObserver(mChannel, codecConfig->mLoadManager);
   494     mPtrViEBase->SetLoadManager(codecConfig->mLoadManager);
   495   }
   497   // we should be good here to set the new codec.
   498   for(int idx=0; idx < mPtrViECodec->NumberOfCodecs(); idx++)
   499   {
   500     if(0 == mPtrViECodec->GetCodec(idx, video_codec))
   501     {
   502       payloadName = video_codec.plName;
   503       if(codecConfig->mName.compare(payloadName) == 0)
   504       {
   505         CodecConfigToWebRTCCodec(codecConfig, video_codec);
   506         codecFound = true;
   507         break;
   508       }
   509     }
   510   }//for
   512   if(codecFound == false)
   513   {
   514     CSFLogError(logTag, "%s Codec Mismatch ", __FUNCTION__);
   515     return kMediaConduitInvalidSendCodec;
   516   }
   518   if(mPtrViECodec->SetSendCodec(mChannel, video_codec) == -1)
   519   {
   520     error = mPtrViEBase->LastError();
   521     if(error == kViECodecInvalidCodec)
   522     {
   523       CSFLogError(logTag, "%s Invalid Send Codec", __FUNCTION__);
   524       return kMediaConduitInvalidSendCodec;
   525     }
   526     CSFLogError(logTag, "%s SetSendCodec Failed %d ", __FUNCTION__,
   527                 mPtrViEBase->LastError());
   528     return kMediaConduitUnknownError;
   529   }
   530   mSendingWidth = 0;
   531   mSendingHeight = 0;
   533   if(codecConfig->RtcpFbIsSet(SDP_RTCP_FB_NACK_BASIC)) {
   534     CSFLogDebug(logTag, "Enabling NACK (send) for video stream\n");
   535     if (mPtrRTP->SetNACKStatus(mChannel, true) != 0)
   536     {
   537       CSFLogError(logTag,  "%s NACKStatus Failed %d ", __FUNCTION__,
   538                   mPtrViEBase->LastError());
   539       return kMediaConduitNACKStatusError;
   540     }
   541   }
   543   if(mPtrViEBase->StartSend(mChannel) == -1)
   544   {
   545     CSFLogError(logTag, "%s Start Send Error %d ", __FUNCTION__,
   546                 mPtrViEBase->LastError());
   547     return kMediaConduitUnknownError;
   548   }
   550   //Copy the applied config for future reference.
   551   delete mCurSendCodecConfig;
   553   mCurSendCodecConfig = new VideoCodecConfig(*codecConfig);
   555   mPtrRTP->SetRembStatus(mChannel, true, false);
   557   // by now we should be successfully started the transmission
   558   mEngineTransmitting = true;
   559   return kMediaConduitNoError;
   560 }
   562 MediaConduitErrorCode
   563 WebrtcVideoConduit::ConfigureRecvMediaCodecs(
   564     const std::vector<VideoCodecConfig* >& codecConfigList)
   565 {
   566   CSFLogDebug(logTag,  "%s ", __FUNCTION__);
   567   MediaConduitErrorCode condError = kMediaConduitNoError;
   568   int error = 0; //webrtc engine errors
   569   bool success = false;
   570   std::string  payloadName;
   572   // are we receiving already? If so, stop receiving and playout
   573   // since we can't apply new recv codec when the engine is playing.
   574   if(mEngineReceiving)
   575   {
   576     CSFLogDebug(logTag, "%s Engine Already Receiving . Attemping to Stop ", __FUNCTION__);
   577     if(mPtrViEBase->StopReceive(mChannel) == -1)
   578     {
   579       error = mPtrViEBase->LastError();
   580       if(error == kViEBaseUnknownError)
   581       {
   582         CSFLogDebug(logTag, "%s StopReceive() Success ", __FUNCTION__);
   583         mEngineReceiving = false;
   584       } else {
   585         CSFLogError(logTag, "%s StopReceive() Failed %d ", __FUNCTION__,
   586                     mPtrViEBase->LastError());
   587         return kMediaConduitUnknownError;
   588       }
   589     }
   590   }
   592   mEngineReceiving = false;
   594   if(codecConfigList.empty())
   595   {
   596     CSFLogError(logTag, "%s Zero number of codecs to configure", __FUNCTION__);
   597     return kMediaConduitMalformedArgument;
   598   }
   600   webrtc::ViEKeyFrameRequestMethod kf_request = webrtc::kViEKeyFrameRequestNone;
   601   bool use_nack_basic = false;
   603   //Try Applying the codecs in the list
   604   // we treat as success if atleast one codec was applied and reception was
   605   // started successfully.
   606   for(std::vector<VideoCodecConfig*>::size_type i=0;i < codecConfigList.size();i++)
   607   {
   608     //if the codec param is invalid or diplicate, return error
   609     if((condError = ValidateCodecConfig(codecConfigList[i],false)) != kMediaConduitNoError)
   610     {
   611       return condError;
   612     }
   614     // Check for the keyframe request type: PLI is preferred
   615     // over FIR, and FIR is preferred over none.
   616     if (codecConfigList[i]->RtcpFbIsSet(SDP_RTCP_FB_NACK_PLI))
   617     {
   618       kf_request = webrtc::kViEKeyFrameRequestPliRtcp;
   619     } else if(kf_request == webrtc::kViEKeyFrameRequestNone &&
   620               codecConfigList[i]->RtcpFbIsSet(SDP_RTCP_FB_CCM_FIR))
   621     {
   622       kf_request = webrtc::kViEKeyFrameRequestFirRtcp;
   623     }
   625     // Check whether NACK is requested
   626     if(codecConfigList[i]->RtcpFbIsSet(SDP_RTCP_FB_NACK_BASIC))
   627     {
   628       use_nack_basic = true;
   629     }
   631     webrtc::VideoCodec  video_codec;
   633     mEngineReceiving = false;
   634     memset(&video_codec, 0, sizeof(webrtc::VideoCodec));
   635     //Retrieve pre-populated codec structure for our codec.
   636     for(int idx=0; idx < mPtrViECodec->NumberOfCodecs(); idx++)
   637     {
   638       if(mPtrViECodec->GetCodec(idx, video_codec) == 0)
   639       {
   640         payloadName = video_codec.plName;
   641         if(codecConfigList[i]->mName.compare(payloadName) == 0)
   642         {
   643           CodecConfigToWebRTCCodec(codecConfigList[i], video_codec);
   644           if(mPtrViECodec->SetReceiveCodec(mChannel,video_codec) == -1)
   645           {
   646             CSFLogError(logTag, "%s Invalid Receive Codec %d ", __FUNCTION__,
   647                         mPtrViEBase->LastError());
   648           } else {
   649             CSFLogError(logTag, "%s Successfully Set the codec %s", __FUNCTION__,
   650                         codecConfigList[i]->mName.c_str());
   651             if(CopyCodecToDB(codecConfigList[i]))
   652             {
   653               success = true;
   654             } else {
   655               CSFLogError(logTag,"%s Unable to updated Codec Database", __FUNCTION__);
   656               return kMediaConduitUnknownError;
   657             }
   658           }
   659           break; //we found a match
   660         }
   661       }
   662     }//end for codeclist
   664   }//end for
   666   if(!success)
   667   {
   668     CSFLogError(logTag, "%s Setting Receive Codec Failed ", __FUNCTION__);
   669     return kMediaConduitInvalidReceiveCodec;
   670   }
   672   // XXX Currently, we gather up all of the feedback types that the remote
   673   // party indicated it supports for all video codecs and configure the entire
   674   // conduit based on those capabilities. This is technically out of spec,
   675   // as these values should be configured on a per-codec basis. However,
   676   // the video engine only provides this API on a per-conduit basis, so that's
   677   // how we have to do it. The approach of considering the remote capablities
   678   // for the entire conduit to be a union of all remote codec capabilities
   679   // (rather than the more conservative approach of using an intersection)
   680   // is made to provide as many feedback mechanisms as are likely to be
   681   // processed by the remote party (and should be relatively safe, since the
   682   // remote party is required to ignore feedback types that it does not
   683   // understand).
   684   //
   685   // Note that our configuration uses this union of remote capabilites as
   686   // input to the configuration. It is not isomorphic to the configuration.
   687   // For example, it only makes sense to have one frame request mechanism
   688   // active at a time; so, if the remote party indicates more than one
   689   // supported mechanism, we're only configuring the one we most prefer.
   690   //
   691   // See http://code.google.com/p/webrtc/issues/detail?id=2331
   693   if (kf_request != webrtc::kViEKeyFrameRequestNone)
   694   {
   695     CSFLogDebug(logTag, "Enabling %s frame requests for video stream\n",
   696                 (kf_request == webrtc::kViEKeyFrameRequestPliRtcp ?
   697                  "PLI" : "FIR"));
   698     if(mPtrRTP->SetKeyFrameRequestMethod(mChannel, kf_request) != 0)
   699     {
   700       CSFLogError(logTag,  "%s KeyFrameRequest Failed %d ", __FUNCTION__,
   701                   mPtrViEBase->LastError());
   702       return kMediaConduitKeyFrameRequestError;
   703     }
   704   }
   706   switch (kf_request) {
   707     case webrtc::kViEKeyFrameRequestNone:
   708       mFrameRequestMethod = FrameRequestNone;
   709       break;
   710     case webrtc::kViEKeyFrameRequestPliRtcp:
   711       mFrameRequestMethod = FrameRequestPli;
   712       break;
   713     case webrtc::kViEKeyFrameRequestFirRtcp:
   714       mFrameRequestMethod = FrameRequestFir;
   715       break;
   716     default:
   717       MOZ_ASSERT(PR_FALSE);
   718       mFrameRequestMethod = FrameRequestUnknown;
   719   }
   721   if(use_nack_basic)
   722   {
   723     CSFLogDebug(logTag, "Enabling NACK (recv) for video stream\n");
   724     if (mPtrRTP->SetNACKStatus(mChannel, true) != 0)
   725     {
   726       CSFLogError(logTag,  "%s NACKStatus Failed %d ", __FUNCTION__,
   727                   mPtrViEBase->LastError());
   728       return kMediaConduitNACKStatusError;
   729     }
   730   }
   731   mUsingNackBasic = use_nack_basic;
   733   //Start Receive on the video engine
   734   if(mPtrViEBase->StartReceive(mChannel) == -1)
   735   {
   736     error = mPtrViEBase->LastError();
   737     CSFLogError(logTag, "%s Start Receive Error %d ", __FUNCTION__, error);
   740     return kMediaConduitUnknownError;
   741   }
   743 #ifdef MOZILLA_INTERNAL_API
   744   if (NS_IsMainThread()) {
   745     nsresult rv;
   746     nsCOMPtr<nsIPrefService> prefs = do_GetService("@mozilla.org/preferences-service;1", &rv);
   747     if (NS_SUCCEEDED(rv)) {
   748       nsCOMPtr<nsIPrefBranch> branch = do_QueryInterface(prefs);
   750       if (branch) {
   751 	branch->GetBoolPref("media.video.test_latency", &mVideoLatencyTestEnable);
   752       }
   753     }
   754   }
   755 #endif
   757   // by now we should be successfully started the reception
   758   mPtrRTP->SetRembStatus(mChannel, false, true);
   759   mEngineReceiving = true;
   760   DumpCodecDB();
   761   return kMediaConduitNoError;
   762 }
   764 // XXX we need to figure out how to feed back changes in preferred capture
   765 // resolution to the getUserMedia source
   766 bool
   767 WebrtcVideoConduit::SelectSendResolution(unsigned short width,
   768                                          unsigned short height)
   769 {
   770   // XXX This will do bandwidth-resolution adaptation as well - bug 877954
   772   // Limit resolution to max-fs while keeping same aspect ratio as the
   773   // incoming image.
   774   if (mCurSendCodecConfig && mCurSendCodecConfig->mMaxFrameSize)
   775   {
   776     unsigned int cur_fs, max_width, max_height, mb_width, mb_height, mb_max;
   778     mb_width = (width + 15) >> 4;
   779     mb_height = (height + 15) >> 4;
   781     cur_fs = mb_width * mb_height;
   783     // Limit resolution to max_fs, but don't scale up.
   784     if (cur_fs > mCurSendCodecConfig->mMaxFrameSize)
   785     {
   786       double scale_ratio;
   788       scale_ratio = sqrt((double) mCurSendCodecConfig->mMaxFrameSize /
   789                          (double) cur_fs);
   791       mb_width = mb_width * scale_ratio;
   792       mb_height = mb_height * scale_ratio;
   794       // Adjust mb_width and mb_height if they were truncated to zero.
   795       if (mb_width == 0) {
   796         mb_width = 1;
   797         mb_height = std::min(mb_height, mCurSendCodecConfig->mMaxFrameSize);
   798       }
   799       if (mb_height == 0) {
   800         mb_height = 1;
   801         mb_width = std::min(mb_width, mCurSendCodecConfig->mMaxFrameSize);
   802       }
   803     }
   805     // Limit width/height seperately to limit effect of extreme aspect ratios.
   806     mb_max = (unsigned) sqrt(8 * (double) mCurSendCodecConfig->mMaxFrameSize);
   808     max_width = 16 * std::min(mb_width, mb_max);
   809     max_height = 16 * std::min(mb_height, mb_max);
   811     if (width * max_height > max_width * height)
   812     {
   813       if (width > max_width)
   814       {
   815         // Due to the value is truncated to integer here and forced to even
   816         // value later, adding 1 to improve accuracy.
   817         height = max_width * height / width + 1;
   818         width = max_width;
   819       }
   820     }
   821     else
   822     {
   823       if (height > max_height)
   824       {
   825         // Due to the value is truncated to integer here and forced to even
   826         // value later, adding 1 to improve accuracy.
   827         width = max_height * width / height + 1;
   828         height = max_height;
   829       }
   830     }
   832     // Favor even multiples of pixels for width and height.
   833     width = std::max(width & ~1, 2);
   834     height = std::max(height & ~1, 2);
   835   }
   837   // Adapt to getUserMedia resolution changes
   838   // check if we need to reconfigure the sending resolution
   839   if (mSendingWidth != width || mSendingHeight != height)
   840   {
   841     // This will avoid us continually retrying this operation if it fails.
   842     // If the resolution changes, we'll try again.  In the meantime, we'll
   843     // keep using the old size in the encoder.
   844     mSendingWidth = width;
   845     mSendingHeight = height;
   847     // Get current vie codec.
   848     webrtc::VideoCodec vie_codec;
   849     int32_t err;
   851     if ((err = mPtrViECodec->GetSendCodec(mChannel, vie_codec)) != 0)
   852     {
   853       CSFLogError(logTag, "%s: GetSendCodec failed, err %d", __FUNCTION__, err);
   854       return false;
   855     }
   856     if (vie_codec.width != width || vie_codec.height != height)
   857     {
   858       vie_codec.width = width;
   859       vie_codec.height = height;
   861       if ((err = mPtrViECodec->SetSendCodec(mChannel, vie_codec)) != 0)
   862       {
   863         CSFLogError(logTag, "%s: SetSendCodec(%ux%u) failed, err %d",
   864                     __FUNCTION__, width, height, err);
   865         return false;
   866       }
   867       CSFLogDebug(logTag, "%s: Encoder resolution changed to %ux%u",
   868                   __FUNCTION__, width, height);
   869     } // else no change; mSendingWidth likely was 0
   870   }
   871   return true;
   872 }
   874 MediaConduitErrorCode
   875 WebrtcVideoConduit::SetExternalSendCodec(int pltype,
   876                                          VideoEncoder* encoder) {
   877   int ret = mPtrExtCodec->RegisterExternalSendCodec(mChannel,
   878                                                     pltype,
   879                                                     static_cast<WebrtcVideoEncoder*>(encoder),
   880                                                     false);
   881   return ret ? kMediaConduitInvalidSendCodec : kMediaConduitNoError;
   882 }
   884 MediaConduitErrorCode
   885 WebrtcVideoConduit::SetExternalRecvCodec(int pltype,
   886                                          VideoDecoder* decoder) {
   887   int ret = mPtrExtCodec->RegisterExternalReceiveCodec(mChannel,
   888                                                        pltype,
   889                                                        static_cast<WebrtcVideoDecoder*>(decoder));
   890   return ret ? kMediaConduitInvalidReceiveCodec : kMediaConduitNoError;
   891 }
   893 MediaConduitErrorCode
   894 WebrtcVideoConduit::SendVideoFrame(unsigned char* video_frame,
   895                                    unsigned int video_frame_length,
   896                                    unsigned short width,
   897                                    unsigned short height,
   898                                    VideoType video_type,
   899                                    uint64_t capture_time)
   900 {
   901   CSFLogDebug(logTag,  "%s ", __FUNCTION__);
   903   //check for  the parameters sanity
   904   if(!video_frame || video_frame_length == 0 ||
   905      width == 0 || height == 0)
   906   {
   907     CSFLogError(logTag,  "%s Invalid Parameters ",__FUNCTION__);
   908     MOZ_ASSERT(PR_FALSE);
   909     return kMediaConduitMalformedArgument;
   910   }
   912   webrtc::RawVideoType type;
   913   switch (video_type) {
   914     case kVideoI420:
   915       type = webrtc::kVideoI420;
   916       break;
   917     case kVideoNV21:
   918       type = webrtc::kVideoNV21;
   919       break;
   920     default:
   921       CSFLogError(logTag,  "%s VideoType Invalid. Only 1420 and NV21 Supported",__FUNCTION__);
   922       MOZ_ASSERT(PR_FALSE);
   923       return kMediaConduitMalformedArgument;
   924   }
   925   //Transmission should be enabled before we insert any frames.
   926   if(!mEngineTransmitting)
   927   {
   928     CSFLogError(logTag, "%s Engine not transmitting ", __FUNCTION__);
   929     return kMediaConduitSessionNotInited;
   930   }
   932   // enforce even width/height (paranoia)
   933   MOZ_ASSERT(!(width & 1));
   934   MOZ_ASSERT(!(height & 1));
   936   if (!SelectSendResolution(width, height))
   937   {
   938     return kMediaConduitCaptureError;
   939   }
   941   //insert the frame to video engine in I420 format only
   942   MOZ_ASSERT(mPtrExtCapture);
   943   if(mPtrExtCapture->IncomingFrame(video_frame,
   944                                    video_frame_length,
   945                                    width, height,
   946                                    type,
   947                                    (unsigned long long)capture_time) == -1)
   948   {
   949     CSFLogError(logTag,  "%s IncomingFrame Failed %d ", __FUNCTION__,
   950                                             mPtrViEBase->LastError());
   951     return kMediaConduitCaptureError;
   952   }
   954   CSFLogDebug(logTag, "%s Inserted a frame", __FUNCTION__);
   955   return kMediaConduitNoError;
   956 }
   958 // Transport Layer Callbacks
   959 MediaConduitErrorCode
   960 WebrtcVideoConduit::ReceivedRTPPacket(const void *data, int len)
   961 {
   962   CSFLogDebug(logTag, "%s: Channel %d, Len %d ", __FUNCTION__, mChannel, len);
   964   // Media Engine should be receiving already.
   965   if(mEngineReceiving)
   966   {
   967     // let the engine know of a RTP packet to decode
   968     if(mPtrViENetwork->ReceivedRTPPacket(mChannel,data,len) == -1)
   969     {
   970       int error = mPtrViEBase->LastError();
   971       CSFLogError(logTag, "%s RTP Processing Failed %d ", __FUNCTION__, error);
   972       if(error >= kViERtpRtcpInvalidChannelId && error <= kViERtpRtcpRtcpDisabled)
   973       {
   974         return kMediaConduitRTPProcessingFailed;
   975       }
   976       return kMediaConduitRTPRTCPModuleError;
   977     }
   978   } else {
   979     CSFLogError(logTag, "Error: %s when not receiving", __FUNCTION__);
   980     return kMediaConduitSessionNotInited;
   981   }
   983   return kMediaConduitNoError;
   984 }
   986 MediaConduitErrorCode
   987 WebrtcVideoConduit::ReceivedRTCPPacket(const void *data, int len)
   988 {
   989   CSFLogDebug(logTag, " %s Channel %d, Len %d ", __FUNCTION__, mChannel, len);
   991   //Media Engine should be receiving already
   992   if(mEngineTransmitting)
   993   {
   994     if(mPtrViENetwork->ReceivedRTCPPacket(mChannel,data,len) == -1)
   995     {
   996       int error = mPtrViEBase->LastError();
   997       CSFLogError(logTag, "%s RTP Processing Failed %d", __FUNCTION__, error);
   998       if(error >= kViERtpRtcpInvalidChannelId && error <= kViERtpRtcpRtcpDisabled)
   999       {
  1000         return kMediaConduitRTPProcessingFailed;
  1002       return kMediaConduitRTPRTCPModuleError;
  1004   } else {
  1005     CSFLogError(logTag, "Error: %s when not receiving", __FUNCTION__);
  1006     return kMediaConduitSessionNotInited;
  1008   return kMediaConduitNoError;
  1011 //WebRTC::RTP Callback Implementation
  1012 int WebrtcVideoConduit::SendPacket(int channel, const void* data, int len)
  1014   CSFLogDebug(logTag,  "%s : channel %d len %d %s", __FUNCTION__, channel, len,
  1015               (mEngineReceiving && mOtherDirection) ? "(using mOtherDirection)" : "");
  1017   if (mEngineReceiving)
  1019     if (mOtherDirection)
  1021       return mOtherDirection->SendPacket(channel, data, len);
  1023     CSFLogDebug(logTag,  "%s : Asked to send RTP without an RTP sender on channel %d",
  1024                 __FUNCTION__, channel);
  1025     return -1;
  1026   } else {
  1027     if(mTransport && (mTransport->SendRtpPacket(data, len) == NS_OK))
  1029       CSFLogDebug(logTag, "%s Sent RTP Packet ", __FUNCTION__);
  1030       return len;
  1031     } else {
  1032       CSFLogError(logTag, "%s RTP Packet Send Failed ", __FUNCTION__);
  1033       return -1;
  1038 int WebrtcVideoConduit::SendRTCPPacket(int channel, const void* data, int len)
  1040   CSFLogDebug(logTag,  "%s : channel %d , len %d ", __FUNCTION__, channel,len);
  1042   if (mEngineTransmitting)
  1044     if (mOtherDirection)
  1046       return mOtherDirection->SendRTCPPacket(channel, data, len);
  1050   // We come here if we have only one pipeline/conduit setup,
  1051   // such as for unidirectional streams.
  1052   // We also end up here if we are receiving
  1053   if(mTransport && mTransport->SendRtcpPacket(data, len) == NS_OK)
  1055     CSFLogDebug(logTag, "%s Sent RTCP Packet ", __FUNCTION__);
  1056     return len;
  1057   } else {
  1058     CSFLogError(logTag, "%s RTCP Packet Send Failed ", __FUNCTION__);
  1059     return -1;
  1063 // WebRTC::ExternalMedia Implementation
  1064 int
  1065 WebrtcVideoConduit::FrameSizeChange(unsigned int width,
  1066                                     unsigned int height,
  1067                                     unsigned int numStreams)
  1069   CSFLogDebug(logTag,  "%s ", __FUNCTION__);
  1072   mReceivingWidth = width;
  1073   mReceivingHeight = height;
  1075   if(mRenderer)
  1077     mRenderer->FrameSizeChange(width, height, numStreams);
  1078     return 0;
  1081   CSFLogError(logTag,  "%s Renderer is NULL ", __FUNCTION__);
  1082   return -1;
  1085 int
  1086 WebrtcVideoConduit::DeliverFrame(unsigned char* buffer,
  1087                                  int buffer_size,
  1088                                  uint32_t time_stamp,
  1089                                  int64_t render_time,
  1090                                  void *handle)
  1092   CSFLogDebug(logTag,  "%s Buffer Size %d", __FUNCTION__, buffer_size);
  1094   if(mRenderer)
  1096     layers::Image* img = nullptr;
  1097     // |handle| should be a webrtc::NativeHandle if available.
  1098     if (handle) {
  1099       webrtc::NativeHandle* native_h = static_cast<webrtc::NativeHandle*>(handle);
  1100       // In the handle, there should be a layers::Image.
  1101       img = static_cast<layers::Image*>(native_h->GetHandle());
  1104     if (mVideoLatencyTestEnable && mReceivingWidth && mReceivingHeight) {
  1105       uint64_t now = PR_Now();
  1106       uint64_t timestamp = 0;
  1107       bool ok = YuvStamper::Decode(mReceivingWidth, mReceivingHeight, mReceivingWidth,
  1108 				   buffer,
  1109 				   reinterpret_cast<unsigned char*>(&timestamp),
  1110 				   sizeof(timestamp), 0, 0);
  1111       if (ok) {
  1112 	VideoLatencyUpdate(now - timestamp);
  1116     const ImageHandle img_h(img);
  1117     mRenderer->RenderVideoFrame(buffer, buffer_size, time_stamp, render_time,
  1118                                 img_h);
  1119     return 0;
  1122   CSFLogError(logTag,  "%s Renderer is NULL  ", __FUNCTION__);
  1123   return -1;
  1126 /**
  1127  * Copy the codec passed into Conduit's database
  1128  */
  1130 void
  1131 WebrtcVideoConduit::CodecConfigToWebRTCCodec(const VideoCodecConfig* codecInfo,
  1132                                               webrtc::VideoCodec& cinst)
  1134   cinst.plType  = codecInfo->mType;
  1135   // leave width/height alone; they'll be overridden on the first frame
  1136   if (codecInfo->mMaxFrameRate > 0)
  1138     cinst.maxFramerate = codecInfo->mMaxFrameRate;
  1140   cinst.minBitrate = 200;
  1141   cinst.startBitrate = 300;
  1142   cinst.maxBitrate = 2000;
  1145 //Copy the codec passed into Conduit's database
  1146 bool
  1147 WebrtcVideoConduit::CopyCodecToDB(const VideoCodecConfig* codecInfo)
  1149   VideoCodecConfig* cdcConfig = new VideoCodecConfig(*codecInfo);
  1150   mRecvCodecList.push_back(cdcConfig);
  1151   return true;
  1154 bool
  1155 WebrtcVideoConduit::CheckCodecsForMatch(const VideoCodecConfig* curCodecConfig,
  1156                                         const VideoCodecConfig* codecInfo) const
  1158   if(!curCodecConfig)
  1160     return false;
  1163   if(curCodecConfig->mType  == codecInfo->mType &&
  1164      curCodecConfig->mName.compare(codecInfo->mName) == 0 &&
  1165      curCodecConfig->mMaxFrameSize == codecInfo->mMaxFrameSize &&
  1166      curCodecConfig->mMaxFrameRate == codecInfo->mMaxFrameRate)
  1168     return true;
  1171   return false;
  1174 /**
  1175  * Checks if the codec is already in Conduit's database
  1176  */
  1177 bool
  1178 WebrtcVideoConduit::CheckCodecForMatch(const VideoCodecConfig* codecInfo) const
  1180   //the db should have atleast one codec
  1181   for(std::vector<VideoCodecConfig*>::size_type i=0;i < mRecvCodecList.size();i++)
  1183     if(CheckCodecsForMatch(mRecvCodecList[i],codecInfo))
  1185       //match
  1186       return true;
  1189   //no match or empty local db
  1190   return false;
  1193 /**
  1194  * Perform validation on the codecConfig to be applied
  1195  * Verifies if the codec is already applied.
  1196  */
  1197 MediaConduitErrorCode
  1198 WebrtcVideoConduit::ValidateCodecConfig(const VideoCodecConfig* codecInfo,
  1199                                         bool send) const
  1201   bool codecAppliedAlready = false;
  1203   if(!codecInfo)
  1205     CSFLogError(logTag, "%s Null CodecConfig ", __FUNCTION__);
  1206     return kMediaConduitMalformedArgument;
  1209   if((codecInfo->mName.empty()) ||
  1210      (codecInfo->mName.length() >= CODEC_PLNAME_SIZE))
  1212     CSFLogError(logTag, "%s Invalid Payload Name Length ", __FUNCTION__);
  1213     return kMediaConduitMalformedArgument;
  1216   //check if we have the same codec already applied
  1217   if(send)
  1219     codecAppliedAlready = CheckCodecsForMatch(mCurSendCodecConfig,codecInfo);
  1220   } else {
  1221     codecAppliedAlready = CheckCodecForMatch(codecInfo);
  1224   if(codecAppliedAlready)
  1226     CSFLogDebug(logTag, "%s Codec %s Already Applied  ", __FUNCTION__, codecInfo->mName.c_str());
  1227     return kMediaConduitCodecInUse;
  1229   return kMediaConduitNoError;
  1232 void
  1233 WebrtcVideoConduit::DumpCodecDB() const
  1235   for(std::vector<VideoCodecConfig*>::size_type i=0;i<mRecvCodecList.size();i++)
  1237     CSFLogDebug(logTag,"Payload Name: %s", mRecvCodecList[i]->mName.c_str());
  1238     CSFLogDebug(logTag,"Payload Type: %d", mRecvCodecList[i]->mType);
  1239     CSFLogDebug(logTag,"Payload Max Frame Size: %d", mRecvCodecList[i]->mMaxFrameSize);
  1240     CSFLogDebug(logTag,"Payload Max Frame Rate: %d", mRecvCodecList[i]->mMaxFrameRate);
  1244 void
  1245 WebrtcVideoConduit::VideoLatencyUpdate(uint64_t newSample)
  1247   mVideoLatencyAvg = (sRoundingPadding * newSample + sAlphaNum * mVideoLatencyAvg) / sAlphaDen;
  1250 uint64_t
  1251 WebrtcVideoConduit::MozVideoLatencyAvg()
  1253   return mVideoLatencyAvg / sRoundingPadding;
  1256 }// end namespace

mercurial