1.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 1.2 +++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp Wed Dec 31 06:09:35 2014 +0100 1.3 @@ -0,0 +1,1256 @@ 1.4 +/* This Source Code Form is subject to the terms of the Mozilla Public 1.5 + * License, v. 2.0. If a copy of the MPL was not distributed with this file, 1.6 + * You can obtain one at http://mozilla.org/MPL/2.0/. */ 1.7 + 1.8 +#include "CSFLog.h" 1.9 +#include "nspr.h" 1.10 + 1.11 +// For rtcp-fb constants 1.12 +#include "ccsdp.h" 1.13 + 1.14 +#include "VideoConduit.h" 1.15 +#include "AudioConduit.h" 1.16 +#include "nsThreadUtils.h" 1.17 +#include "LoadManager.h" 1.18 +#include "YuvStamper.h" 1.19 +#include "nsServiceManagerUtils.h" 1.20 +#include "nsIPrefService.h" 1.21 +#include "nsIPrefBranch.h" 1.22 + 1.23 +#include "webrtc/common_video/interface/native_handle.h" 1.24 +#include "webrtc/video_engine/include/vie_errors.h" 1.25 + 1.26 +#ifdef MOZ_WIDGET_ANDROID 1.27 +#include "AndroidJNIWrapper.h" 1.28 +#endif 1.29 + 1.30 +#include <algorithm> 1.31 +#include <math.h> 1.32 + 1.33 +namespace mozilla { 1.34 + 1.35 +static const char* logTag ="WebrtcVideoSessionConduit"; 1.36 + 1.37 +// 32 bytes is what WebRTC CodecInst expects 1.38 +const unsigned int WebrtcVideoConduit::CODEC_PLNAME_SIZE = 32; 1.39 + 1.40 +/** 1.41 + * Factory Method for VideoConduit 1.42 + */ 1.43 +mozilla::RefPtr<VideoSessionConduit> VideoSessionConduit::Create(VideoSessionConduit *aOther) 1.44 +{ 1.45 +#ifdef MOZILLA_INTERNAL_API 1.46 + // unit tests create their own "main thread" 1.47 + NS_ASSERTION(NS_IsMainThread(), "Only call on main thread"); 1.48 +#endif 1.49 + CSFLogDebug(logTag, "%s ", __FUNCTION__); 1.50 + 1.51 + WebrtcVideoConduit* obj = new WebrtcVideoConduit(); 1.52 + if(obj->Init(static_cast<WebrtcVideoConduit*>(aOther)) != kMediaConduitNoError) 1.53 + { 1.54 + CSFLogError(logTag, "%s VideoConduit Init Failed ", __FUNCTION__); 1.55 + delete obj; 1.56 + return nullptr; 1.57 + } 1.58 + CSFLogDebug(logTag, "%s Successfully created VideoConduit ", __FUNCTION__); 1.59 + return obj; 1.60 +} 1.61 + 1.62 +WebrtcVideoConduit::~WebrtcVideoConduit() 1.63 +{ 1.64 +#ifdef MOZILLA_INTERNAL_API 1.65 + // unit tests create their own "main thread" 1.66 + NS_ASSERTION(NS_IsMainThread(), "Only call on main thread"); 1.67 +#endif 1.68 + CSFLogDebug(logTag, "%s ", __FUNCTION__); 1.69 + 1.70 + for(std::vector<VideoCodecConfig*>::size_type i=0;i < mRecvCodecList.size();i++) 1.71 + { 1.72 + delete mRecvCodecList[i]; 1.73 + } 1.74 + 1.75 + delete mCurSendCodecConfig; 1.76 + 1.77 + // The first one of a pair to be deleted shuts down media for both 1.78 + //Deal with External Capturer 1.79 + if(mPtrViECapture) 1.80 + { 1.81 + if (!mShutDown) { 1.82 + mPtrViECapture->DisconnectCaptureDevice(mCapId); 1.83 + mPtrViECapture->ReleaseCaptureDevice(mCapId); 1.84 + mPtrExtCapture = nullptr; 1.85 + if (mOtherDirection) 1.86 + mOtherDirection->mPtrExtCapture = nullptr; 1.87 + } 1.88 + } 1.89 + 1.90 + //Deal with External Renderer 1.91 + if(mPtrViERender) 1.92 + { 1.93 + if (!mShutDown) { 1.94 + if(mRenderer) { 1.95 + mPtrViERender->StopRender(mChannel); 1.96 + } 1.97 + mPtrViERender->RemoveRenderer(mChannel); 1.98 + } 1.99 + } 1.100 + 1.101 + //Deal with the transport 1.102 + if(mPtrViENetwork) 1.103 + { 1.104 + if (!mShutDown) { 1.105 + mPtrViENetwork->DeregisterSendTransport(mChannel); 1.106 + } 1.107 + } 1.108 + 1.109 + if(mPtrViEBase) 1.110 + { 1.111 + if (!mShutDown) { 1.112 + mPtrViEBase->StopSend(mChannel); 1.113 + mPtrViEBase->StopReceive(mChannel); 1.114 + SyncTo(nullptr); 1.115 + mPtrViEBase->DeleteChannel(mChannel); 1.116 + } 1.117 + } 1.118 + 1.119 + if (mOtherDirection) 1.120 + { 1.121 + // mOtherDirection owns these now! 1.122 + mOtherDirection->mOtherDirection = nullptr; 1.123 + // let other side we terminated the channel 1.124 + mOtherDirection->mShutDown = true; 1.125 + mVideoEngine = nullptr; 1.126 + } else { 1.127 + // We can't delete the VideoEngine until all these are released! 1.128 + // And we can't use a Scoped ptr, since the order is arbitrary 1.129 + mPtrViEBase = nullptr; 1.130 + mPtrViECapture = nullptr; 1.131 + mPtrViECodec = nullptr; 1.132 + mPtrViENetwork = nullptr; 1.133 + mPtrViERender = nullptr; 1.134 + mPtrRTP = nullptr; 1.135 + mPtrExtCodec = nullptr; 1.136 + 1.137 + // only one opener can call Delete. Have it be the last to close. 1.138 + if(mVideoEngine) 1.139 + { 1.140 + webrtc::VideoEngine::Delete(mVideoEngine); 1.141 + } 1.142 + } 1.143 +} 1.144 + 1.145 +bool WebrtcVideoConduit::GetLocalSSRC(unsigned int* ssrc) { 1.146 + return !mPtrRTP->GetLocalSSRC(mChannel, *ssrc); 1.147 +} 1.148 + 1.149 +bool WebrtcVideoConduit::GetRemoteSSRC(unsigned int* ssrc) { 1.150 + return !mPtrRTP->GetRemoteSSRC(mChannel, *ssrc); 1.151 +} 1.152 + 1.153 +bool WebrtcVideoConduit::GetAVStats(int32_t* jitterBufferDelayMs, 1.154 + int32_t* playoutBufferDelayMs, 1.155 + int32_t* avSyncOffsetMs) { 1.156 + return false; 1.157 +} 1.158 + 1.159 +bool WebrtcVideoConduit::GetRTPStats(unsigned int* jitterMs, 1.160 + unsigned int* cumulativeLost) { 1.161 + unsigned short fractionLost; 1.162 + unsigned extendedMax; 1.163 + int rttMs; 1.164 + // GetReceivedRTCPStatistics is a poorly named GetRTPStatistics variant 1.165 + return !mPtrRTP->GetReceivedRTCPStatistics(mChannel, fractionLost, 1.166 + *cumulativeLost, 1.167 + extendedMax, 1.168 + *jitterMs, 1.169 + rttMs); 1.170 +} 1.171 + 1.172 +bool WebrtcVideoConduit::GetRTCPReceiverReport(DOMHighResTimeStamp* timestamp, 1.173 + uint32_t* jitterMs, 1.174 + uint32_t* packetsReceived, 1.175 + uint64_t* bytesReceived, 1.176 + uint32_t* cumulativeLost, 1.177 + int32_t* rttMs) { 1.178 + uint32_t ntpHigh, ntpLow; 1.179 + uint16_t fractionLost; 1.180 + bool result = !mPtrRTP->GetRemoteRTCPReceiverInfo(mChannel, ntpHigh, ntpLow, 1.181 + *packetsReceived, 1.182 + *bytesReceived, 1.183 + jitterMs, 1.184 + &fractionLost, 1.185 + cumulativeLost, 1.186 + rttMs); 1.187 + if (result) { 1.188 + *timestamp = NTPtoDOMHighResTimeStamp(ntpHigh, ntpLow); 1.189 + } 1.190 + return result; 1.191 +} 1.192 + 1.193 +bool WebrtcVideoConduit::GetRTCPSenderReport(DOMHighResTimeStamp* timestamp, 1.194 + unsigned int* packetsSent, 1.195 + uint64_t* bytesSent) { 1.196 + struct webrtc::SenderInfo senderInfo; 1.197 + bool result = !mPtrRTP->GetRemoteRTCPSenderInfo(mChannel, &senderInfo); 1.198 + if (result) { 1.199 + *timestamp = NTPtoDOMHighResTimeStamp(senderInfo.NTP_timestamp_high, 1.200 + senderInfo.NTP_timestamp_low); 1.201 + *packetsSent = senderInfo.sender_packet_count; 1.202 + *bytesSent = senderInfo.sender_octet_count; 1.203 + } 1.204 + return result; 1.205 +} 1.206 + 1.207 +/** 1.208 + * Peforms intialization of the MANDATORY components of the Video Engine 1.209 + */ 1.210 +MediaConduitErrorCode WebrtcVideoConduit::Init(WebrtcVideoConduit *other) 1.211 +{ 1.212 + CSFLogDebug(logTag, "%s this=%p other=%p", __FUNCTION__, this, other); 1.213 + 1.214 + if (other) { 1.215 + MOZ_ASSERT(!other->mOtherDirection); 1.216 + other->mOtherDirection = this; 1.217 + mOtherDirection = other; 1.218 + 1.219 + // only one can call ::Create()/GetVideoEngine() 1.220 + MOZ_ASSERT(other->mVideoEngine); 1.221 + mVideoEngine = other->mVideoEngine; 1.222 + } else { 1.223 + 1.224 +#ifdef MOZ_WIDGET_ANDROID 1.225 + jobject context = jsjni_GetGlobalContextRef(); 1.226 + 1.227 + // get the JVM 1.228 + JavaVM *jvm = jsjni_GetVM(); 1.229 + 1.230 + if (webrtc::VideoEngine::SetAndroidObjects(jvm, (void*)context) != 0) { 1.231 + CSFLogError(logTag, "%s: could not set Android objects", __FUNCTION__); 1.232 + return kMediaConduitSessionNotInited; 1.233 + } 1.234 +#endif 1.235 + 1.236 + // Per WebRTC APIs below function calls return nullptr on failure 1.237 + if( !(mVideoEngine = webrtc::VideoEngine::Create()) ) 1.238 + { 1.239 + CSFLogError(logTag, "%s Unable to create video engine ", __FUNCTION__); 1.240 + return kMediaConduitSessionNotInited; 1.241 + } 1.242 + 1.243 + PRLogModuleInfo *logs = GetWebRTCLogInfo(); 1.244 + if (!gWebrtcTraceLoggingOn && logs && logs->level > 0) { 1.245 + // no need to a critical section or lock here 1.246 + gWebrtcTraceLoggingOn = 1; 1.247 + 1.248 + const char *file = PR_GetEnv("WEBRTC_TRACE_FILE"); 1.249 + if (!file) { 1.250 + file = "WebRTC.log"; 1.251 + } 1.252 + CSFLogDebug(logTag, "%s Logging webrtc to %s level %d", __FUNCTION__, 1.253 + file, logs->level); 1.254 + mVideoEngine->SetTraceFilter(logs->level); 1.255 + mVideoEngine->SetTraceFile(file); 1.256 + } 1.257 + } 1.258 + 1.259 + if( !(mPtrViEBase = ViEBase::GetInterface(mVideoEngine))) 1.260 + { 1.261 + CSFLogError(logTag, "%s Unable to get video base interface ", __FUNCTION__); 1.262 + return kMediaConduitSessionNotInited; 1.263 + } 1.264 + 1.265 + if( !(mPtrViECapture = ViECapture::GetInterface(mVideoEngine))) 1.266 + { 1.267 + CSFLogError(logTag, "%s Unable to get video capture interface", __FUNCTION__); 1.268 + return kMediaConduitSessionNotInited; 1.269 + } 1.270 + 1.271 + if( !(mPtrViECodec = ViECodec::GetInterface(mVideoEngine))) 1.272 + { 1.273 + CSFLogError(logTag, "%s Unable to get video codec interface ", __FUNCTION__); 1.274 + return kMediaConduitSessionNotInited; 1.275 + } 1.276 + 1.277 + if( !(mPtrViENetwork = ViENetwork::GetInterface(mVideoEngine))) 1.278 + { 1.279 + CSFLogError(logTag, "%s Unable to get video network interface ", __FUNCTION__); 1.280 + return kMediaConduitSessionNotInited; 1.281 + } 1.282 + 1.283 + if( !(mPtrViERender = ViERender::GetInterface(mVideoEngine))) 1.284 + { 1.285 + CSFLogError(logTag, "%s Unable to get video render interface ", __FUNCTION__); 1.286 + return kMediaConduitSessionNotInited; 1.287 + } 1.288 + 1.289 + if( !(mPtrRTP = webrtc::ViERTP_RTCP::GetInterface(mVideoEngine))) 1.290 + { 1.291 + CSFLogError(logTag, "%s Unable to get video RTCP interface ", __FUNCTION__); 1.292 + return kMediaConduitSessionNotInited; 1.293 + } 1.294 + 1.295 + if ( !(mPtrExtCodec = webrtc::ViEExternalCodec::GetInterface(mVideoEngine))) 1.296 + { 1.297 + CSFLogError(logTag, "%s Unable to get external codec interface %d ", 1.298 + __FUNCTION__, mPtrViEBase->LastError()); 1.299 + return kMediaConduitSessionNotInited; 1.300 + } 1.301 + 1.302 + if (other) { 1.303 + mChannel = other->mChannel; 1.304 + mPtrExtCapture = other->mPtrExtCapture; 1.305 + mCapId = other->mCapId; 1.306 + } else { 1.307 + CSFLogDebug(logTag, "%s Engine Created: Init'ng the interfaces ",__FUNCTION__); 1.308 + 1.309 + if(mPtrViEBase->Init() == -1) 1.310 + { 1.311 + CSFLogError(logTag, " %s Video Engine Init Failed %d ",__FUNCTION__, 1.312 + mPtrViEBase->LastError()); 1.313 + return kMediaConduitSessionNotInited; 1.314 + } 1.315 + 1.316 + if(mPtrViEBase->CreateChannel(mChannel) == -1) 1.317 + { 1.318 + CSFLogError(logTag, " %s Channel creation Failed %d ",__FUNCTION__, 1.319 + mPtrViEBase->LastError()); 1.320 + return kMediaConduitChannelError; 1.321 + } 1.322 + 1.323 + if(mPtrViENetwork->RegisterSendTransport(mChannel, *this) == -1) 1.324 + { 1.325 + CSFLogError(logTag, "%s ViENetwork Failed %d ", __FUNCTION__, 1.326 + mPtrViEBase->LastError()); 1.327 + return kMediaConduitTransportRegistrationFail; 1.328 + } 1.329 + 1.330 + if(mPtrViECapture->AllocateExternalCaptureDevice(mCapId, 1.331 + mPtrExtCapture) == -1) 1.332 + { 1.333 + CSFLogError(logTag, "%s Unable to Allocate capture module: %d ", 1.334 + __FUNCTION__, mPtrViEBase->LastError()); 1.335 + return kMediaConduitCaptureError; 1.336 + } 1.337 + 1.338 + if(mPtrViECapture->ConnectCaptureDevice(mCapId,mChannel) == -1) 1.339 + { 1.340 + CSFLogError(logTag, "%s Unable to Connect capture module: %d ", 1.341 + __FUNCTION__,mPtrViEBase->LastError()); 1.342 + return kMediaConduitCaptureError; 1.343 + } 1.344 + 1.345 + if(mPtrViERender->AddRenderer(mChannel, 1.346 + webrtc::kVideoI420, 1.347 + (webrtc::ExternalRenderer*) this) == -1) 1.348 + { 1.349 + CSFLogError(logTag, "%s Failed to added external renderer ", __FUNCTION__); 1.350 + return kMediaConduitInvalidRenderer; 1.351 + } 1.352 + // Set up some parameters, per juberti. Set MTU. 1.353 + if(mPtrViENetwork->SetMTU(mChannel, 1200) != 0) 1.354 + { 1.355 + CSFLogError(logTag, "%s MTU Failed %d ", __FUNCTION__, 1.356 + mPtrViEBase->LastError()); 1.357 + return kMediaConduitMTUError; 1.358 + } 1.359 + // Turn on RTCP and loss feedback reporting. 1.360 + if(mPtrRTP->SetRTCPStatus(mChannel, webrtc::kRtcpCompound_RFC4585) != 0) 1.361 + { 1.362 + CSFLogError(logTag, "%s RTCPStatus Failed %d ", __FUNCTION__, 1.363 + mPtrViEBase->LastError()); 1.364 + return kMediaConduitRTCPStatusError; 1.365 + } 1.366 + } 1.367 + 1.368 + CSFLogError(logTag, "%s Initialization Done", __FUNCTION__); 1.369 + return kMediaConduitNoError; 1.370 +} 1.371 + 1.372 +void 1.373 +WebrtcVideoConduit::SyncTo(WebrtcAudioConduit *aConduit) 1.374 +{ 1.375 + CSFLogDebug(logTag, "%s Synced to %p", __FUNCTION__, aConduit); 1.376 + 1.377 + // SyncTo(value) syncs to the AudioConduit, and if already synced replaces 1.378 + // the current sync target. SyncTo(nullptr) cancels any existing sync and 1.379 + // releases the strong ref to AudioConduit. 1.380 + if (aConduit) { 1.381 + mPtrViEBase->SetVoiceEngine(aConduit->GetVoiceEngine()); 1.382 + mPtrViEBase->ConnectAudioChannel(mChannel, aConduit->GetChannel()); 1.383 + // NOTE: this means the VideoConduit will keep the AudioConduit alive! 1.384 + } else if ((mOtherDirection && mOtherDirection->mSyncedTo) || mSyncedTo) { 1.385 + mPtrViEBase->DisconnectAudioChannel(mChannel); 1.386 + mPtrViEBase->SetVoiceEngine(nullptr); 1.387 + } 1.388 + 1.389 + // Now manage the shared sync reference (ugly) 1.390 + if (mSyncedTo || !mOtherDirection ) { 1.391 + mSyncedTo = aConduit; 1.392 + } else { 1.393 + mOtherDirection->mSyncedTo = aConduit; 1.394 + } 1.395 +} 1.396 + 1.397 +MediaConduitErrorCode 1.398 +WebrtcVideoConduit::AttachRenderer(mozilla::RefPtr<VideoRenderer> aVideoRenderer) 1.399 +{ 1.400 + CSFLogDebug(logTag, "%s ", __FUNCTION__); 1.401 + 1.402 + //null renderer 1.403 + if(!aVideoRenderer) 1.404 + { 1.405 + CSFLogError(logTag, "%s NULL Renderer", __FUNCTION__); 1.406 + MOZ_ASSERT(PR_FALSE); 1.407 + return kMediaConduitInvalidRenderer; 1.408 + } 1.409 + 1.410 + //Start Rendering if we haven't already 1.411 + if(!mRenderer) 1.412 + { 1.413 + mRenderer = aVideoRenderer; // must be done before StartRender() 1.414 + 1.415 + if(mPtrViERender->StartRender(mChannel) == -1) 1.416 + { 1.417 + CSFLogError(logTag, "%s Starting the Renderer Failed %d ", __FUNCTION__, 1.418 + mPtrViEBase->LastError()); 1.419 + mRenderer = nullptr; 1.420 + return kMediaConduitRendererFail; 1.421 + } 1.422 + } else { 1.423 + //Assign the new renderer - overwrites if there is already one 1.424 + mRenderer = aVideoRenderer; 1.425 + } 1.426 + 1.427 + return kMediaConduitNoError; 1.428 +} 1.429 + 1.430 +void 1.431 +WebrtcVideoConduit::DetachRenderer() 1.432 +{ 1.433 + if(mRenderer) 1.434 + { 1.435 + mPtrViERender->StopRender(mChannel); 1.436 + mRenderer = nullptr; 1.437 + } 1.438 +} 1.439 + 1.440 +MediaConduitErrorCode 1.441 +WebrtcVideoConduit::AttachTransport(mozilla::RefPtr<TransportInterface> aTransport) 1.442 +{ 1.443 + CSFLogDebug(logTag, "%s ", __FUNCTION__); 1.444 + if(!aTransport) 1.445 + { 1.446 + CSFLogError(logTag, "%s NULL Transport", __FUNCTION__); 1.447 + return kMediaConduitInvalidTransport; 1.448 + } 1.449 + // set the transport 1.450 + mTransport = aTransport; 1.451 + return kMediaConduitNoError; 1.452 +} 1.453 + 1.454 +/** 1.455 + * Note: Setting the send-codec on the Video Engine will restart the encoder, 1.456 + * sets up new SSRC and reset RTP_RTCP module with the new codec setting. 1.457 + */ 1.458 +MediaConduitErrorCode 1.459 +WebrtcVideoConduit::ConfigureSendMediaCodec(const VideoCodecConfig* codecConfig) 1.460 +{ 1.461 + CSFLogDebug(logTag, "%s ", __FUNCTION__); 1.462 + bool codecFound = false; 1.463 + MediaConduitErrorCode condError = kMediaConduitNoError; 1.464 + int error = 0; //webrtc engine errors 1.465 + webrtc::VideoCodec video_codec; 1.466 + std::string payloadName; 1.467 + 1.468 + //validate basic params 1.469 + if((condError = ValidateCodecConfig(codecConfig,true)) != kMediaConduitNoError) 1.470 + { 1.471 + return condError; 1.472 + } 1.473 + 1.474 + //Check if we have same codec already applied 1.475 + if(CheckCodecsForMatch(mCurSendCodecConfig, codecConfig)) 1.476 + { 1.477 + CSFLogDebug(logTag, "%s Codec has been applied already ", __FUNCTION__); 1.478 + return kMediaConduitCodecInUse; 1.479 + } 1.480 + 1.481 + //transmitting already ? 1.482 + if(mEngineTransmitting) 1.483 + { 1.484 + CSFLogDebug(logTag, "%s Engine Already Sending. Attemping to Stop ", __FUNCTION__); 1.485 + if(mPtrViEBase->StopSend(mChannel) == -1) 1.486 + { 1.487 + CSFLogError(logTag, "%s StopSend() Failed %d ",__FUNCTION__, 1.488 + mPtrViEBase->LastError()); 1.489 + return kMediaConduitUnknownError; 1.490 + } 1.491 + } 1.492 + 1.493 + mEngineTransmitting = false; 1.494 + 1.495 + if (codecConfig->mLoadManager) { 1.496 + mPtrViEBase->RegisterCpuOveruseObserver(mChannel, codecConfig->mLoadManager); 1.497 + mPtrViEBase->SetLoadManager(codecConfig->mLoadManager); 1.498 + } 1.499 + 1.500 + // we should be good here to set the new codec. 1.501 + for(int idx=0; idx < mPtrViECodec->NumberOfCodecs(); idx++) 1.502 + { 1.503 + if(0 == mPtrViECodec->GetCodec(idx, video_codec)) 1.504 + { 1.505 + payloadName = video_codec.plName; 1.506 + if(codecConfig->mName.compare(payloadName) == 0) 1.507 + { 1.508 + CodecConfigToWebRTCCodec(codecConfig, video_codec); 1.509 + codecFound = true; 1.510 + break; 1.511 + } 1.512 + } 1.513 + }//for 1.514 + 1.515 + if(codecFound == false) 1.516 + { 1.517 + CSFLogError(logTag, "%s Codec Mismatch ", __FUNCTION__); 1.518 + return kMediaConduitInvalidSendCodec; 1.519 + } 1.520 + 1.521 + if(mPtrViECodec->SetSendCodec(mChannel, video_codec) == -1) 1.522 + { 1.523 + error = mPtrViEBase->LastError(); 1.524 + if(error == kViECodecInvalidCodec) 1.525 + { 1.526 + CSFLogError(logTag, "%s Invalid Send Codec", __FUNCTION__); 1.527 + return kMediaConduitInvalidSendCodec; 1.528 + } 1.529 + CSFLogError(logTag, "%s SetSendCodec Failed %d ", __FUNCTION__, 1.530 + mPtrViEBase->LastError()); 1.531 + return kMediaConduitUnknownError; 1.532 + } 1.533 + mSendingWidth = 0; 1.534 + mSendingHeight = 0; 1.535 + 1.536 + if(codecConfig->RtcpFbIsSet(SDP_RTCP_FB_NACK_BASIC)) { 1.537 + CSFLogDebug(logTag, "Enabling NACK (send) for video stream\n"); 1.538 + if (mPtrRTP->SetNACKStatus(mChannel, true) != 0) 1.539 + { 1.540 + CSFLogError(logTag, "%s NACKStatus Failed %d ", __FUNCTION__, 1.541 + mPtrViEBase->LastError()); 1.542 + return kMediaConduitNACKStatusError; 1.543 + } 1.544 + } 1.545 + 1.546 + if(mPtrViEBase->StartSend(mChannel) == -1) 1.547 + { 1.548 + CSFLogError(logTag, "%s Start Send Error %d ", __FUNCTION__, 1.549 + mPtrViEBase->LastError()); 1.550 + return kMediaConduitUnknownError; 1.551 + } 1.552 + 1.553 + //Copy the applied config for future reference. 1.554 + delete mCurSendCodecConfig; 1.555 + 1.556 + mCurSendCodecConfig = new VideoCodecConfig(*codecConfig); 1.557 + 1.558 + mPtrRTP->SetRembStatus(mChannel, true, false); 1.559 + 1.560 + // by now we should be successfully started the transmission 1.561 + mEngineTransmitting = true; 1.562 + return kMediaConduitNoError; 1.563 +} 1.564 + 1.565 +MediaConduitErrorCode 1.566 +WebrtcVideoConduit::ConfigureRecvMediaCodecs( 1.567 + const std::vector<VideoCodecConfig* >& codecConfigList) 1.568 +{ 1.569 + CSFLogDebug(logTag, "%s ", __FUNCTION__); 1.570 + MediaConduitErrorCode condError = kMediaConduitNoError; 1.571 + int error = 0; //webrtc engine errors 1.572 + bool success = false; 1.573 + std::string payloadName; 1.574 + 1.575 + // are we receiving already? If so, stop receiving and playout 1.576 + // since we can't apply new recv codec when the engine is playing. 1.577 + if(mEngineReceiving) 1.578 + { 1.579 + CSFLogDebug(logTag, "%s Engine Already Receiving . Attemping to Stop ", __FUNCTION__); 1.580 + if(mPtrViEBase->StopReceive(mChannel) == -1) 1.581 + { 1.582 + error = mPtrViEBase->LastError(); 1.583 + if(error == kViEBaseUnknownError) 1.584 + { 1.585 + CSFLogDebug(logTag, "%s StopReceive() Success ", __FUNCTION__); 1.586 + mEngineReceiving = false; 1.587 + } else { 1.588 + CSFLogError(logTag, "%s StopReceive() Failed %d ", __FUNCTION__, 1.589 + mPtrViEBase->LastError()); 1.590 + return kMediaConduitUnknownError; 1.591 + } 1.592 + } 1.593 + } 1.594 + 1.595 + mEngineReceiving = false; 1.596 + 1.597 + if(codecConfigList.empty()) 1.598 + { 1.599 + CSFLogError(logTag, "%s Zero number of codecs to configure", __FUNCTION__); 1.600 + return kMediaConduitMalformedArgument; 1.601 + } 1.602 + 1.603 + webrtc::ViEKeyFrameRequestMethod kf_request = webrtc::kViEKeyFrameRequestNone; 1.604 + bool use_nack_basic = false; 1.605 + 1.606 + //Try Applying the codecs in the list 1.607 + // we treat as success if atleast one codec was applied and reception was 1.608 + // started successfully. 1.609 + for(std::vector<VideoCodecConfig*>::size_type i=0;i < codecConfigList.size();i++) 1.610 + { 1.611 + //if the codec param is invalid or diplicate, return error 1.612 + if((condError = ValidateCodecConfig(codecConfigList[i],false)) != kMediaConduitNoError) 1.613 + { 1.614 + return condError; 1.615 + } 1.616 + 1.617 + // Check for the keyframe request type: PLI is preferred 1.618 + // over FIR, and FIR is preferred over none. 1.619 + if (codecConfigList[i]->RtcpFbIsSet(SDP_RTCP_FB_NACK_PLI)) 1.620 + { 1.621 + kf_request = webrtc::kViEKeyFrameRequestPliRtcp; 1.622 + } else if(kf_request == webrtc::kViEKeyFrameRequestNone && 1.623 + codecConfigList[i]->RtcpFbIsSet(SDP_RTCP_FB_CCM_FIR)) 1.624 + { 1.625 + kf_request = webrtc::kViEKeyFrameRequestFirRtcp; 1.626 + } 1.627 + 1.628 + // Check whether NACK is requested 1.629 + if(codecConfigList[i]->RtcpFbIsSet(SDP_RTCP_FB_NACK_BASIC)) 1.630 + { 1.631 + use_nack_basic = true; 1.632 + } 1.633 + 1.634 + webrtc::VideoCodec video_codec; 1.635 + 1.636 + mEngineReceiving = false; 1.637 + memset(&video_codec, 0, sizeof(webrtc::VideoCodec)); 1.638 + //Retrieve pre-populated codec structure for our codec. 1.639 + for(int idx=0; idx < mPtrViECodec->NumberOfCodecs(); idx++) 1.640 + { 1.641 + if(mPtrViECodec->GetCodec(idx, video_codec) == 0) 1.642 + { 1.643 + payloadName = video_codec.plName; 1.644 + if(codecConfigList[i]->mName.compare(payloadName) == 0) 1.645 + { 1.646 + CodecConfigToWebRTCCodec(codecConfigList[i], video_codec); 1.647 + if(mPtrViECodec->SetReceiveCodec(mChannel,video_codec) == -1) 1.648 + { 1.649 + CSFLogError(logTag, "%s Invalid Receive Codec %d ", __FUNCTION__, 1.650 + mPtrViEBase->LastError()); 1.651 + } else { 1.652 + CSFLogError(logTag, "%s Successfully Set the codec %s", __FUNCTION__, 1.653 + codecConfigList[i]->mName.c_str()); 1.654 + if(CopyCodecToDB(codecConfigList[i])) 1.655 + { 1.656 + success = true; 1.657 + } else { 1.658 + CSFLogError(logTag,"%s Unable to updated Codec Database", __FUNCTION__); 1.659 + return kMediaConduitUnknownError; 1.660 + } 1.661 + } 1.662 + break; //we found a match 1.663 + } 1.664 + } 1.665 + }//end for codeclist 1.666 + 1.667 + }//end for 1.668 + 1.669 + if(!success) 1.670 + { 1.671 + CSFLogError(logTag, "%s Setting Receive Codec Failed ", __FUNCTION__); 1.672 + return kMediaConduitInvalidReceiveCodec; 1.673 + } 1.674 + 1.675 + // XXX Currently, we gather up all of the feedback types that the remote 1.676 + // party indicated it supports for all video codecs and configure the entire 1.677 + // conduit based on those capabilities. This is technically out of spec, 1.678 + // as these values should be configured on a per-codec basis. However, 1.679 + // the video engine only provides this API on a per-conduit basis, so that's 1.680 + // how we have to do it. The approach of considering the remote capablities 1.681 + // for the entire conduit to be a union of all remote codec capabilities 1.682 + // (rather than the more conservative approach of using an intersection) 1.683 + // is made to provide as many feedback mechanisms as are likely to be 1.684 + // processed by the remote party (and should be relatively safe, since the 1.685 + // remote party is required to ignore feedback types that it does not 1.686 + // understand). 1.687 + // 1.688 + // Note that our configuration uses this union of remote capabilites as 1.689 + // input to the configuration. It is not isomorphic to the configuration. 1.690 + // For example, it only makes sense to have one frame request mechanism 1.691 + // active at a time; so, if the remote party indicates more than one 1.692 + // supported mechanism, we're only configuring the one we most prefer. 1.693 + // 1.694 + // See http://code.google.com/p/webrtc/issues/detail?id=2331 1.695 + 1.696 + if (kf_request != webrtc::kViEKeyFrameRequestNone) 1.697 + { 1.698 + CSFLogDebug(logTag, "Enabling %s frame requests for video stream\n", 1.699 + (kf_request == webrtc::kViEKeyFrameRequestPliRtcp ? 1.700 + "PLI" : "FIR")); 1.701 + if(mPtrRTP->SetKeyFrameRequestMethod(mChannel, kf_request) != 0) 1.702 + { 1.703 + CSFLogError(logTag, "%s KeyFrameRequest Failed %d ", __FUNCTION__, 1.704 + mPtrViEBase->LastError()); 1.705 + return kMediaConduitKeyFrameRequestError; 1.706 + } 1.707 + } 1.708 + 1.709 + switch (kf_request) { 1.710 + case webrtc::kViEKeyFrameRequestNone: 1.711 + mFrameRequestMethod = FrameRequestNone; 1.712 + break; 1.713 + case webrtc::kViEKeyFrameRequestPliRtcp: 1.714 + mFrameRequestMethod = FrameRequestPli; 1.715 + break; 1.716 + case webrtc::kViEKeyFrameRequestFirRtcp: 1.717 + mFrameRequestMethod = FrameRequestFir; 1.718 + break; 1.719 + default: 1.720 + MOZ_ASSERT(PR_FALSE); 1.721 + mFrameRequestMethod = FrameRequestUnknown; 1.722 + } 1.723 + 1.724 + if(use_nack_basic) 1.725 + { 1.726 + CSFLogDebug(logTag, "Enabling NACK (recv) for video stream\n"); 1.727 + if (mPtrRTP->SetNACKStatus(mChannel, true) != 0) 1.728 + { 1.729 + CSFLogError(logTag, "%s NACKStatus Failed %d ", __FUNCTION__, 1.730 + mPtrViEBase->LastError()); 1.731 + return kMediaConduitNACKStatusError; 1.732 + } 1.733 + } 1.734 + mUsingNackBasic = use_nack_basic; 1.735 + 1.736 + //Start Receive on the video engine 1.737 + if(mPtrViEBase->StartReceive(mChannel) == -1) 1.738 + { 1.739 + error = mPtrViEBase->LastError(); 1.740 + CSFLogError(logTag, "%s Start Receive Error %d ", __FUNCTION__, error); 1.741 + 1.742 + 1.743 + return kMediaConduitUnknownError; 1.744 + } 1.745 + 1.746 +#ifdef MOZILLA_INTERNAL_API 1.747 + if (NS_IsMainThread()) { 1.748 + nsresult rv; 1.749 + nsCOMPtr<nsIPrefService> prefs = do_GetService("@mozilla.org/preferences-service;1", &rv); 1.750 + if (NS_SUCCEEDED(rv)) { 1.751 + nsCOMPtr<nsIPrefBranch> branch = do_QueryInterface(prefs); 1.752 + 1.753 + if (branch) { 1.754 + branch->GetBoolPref("media.video.test_latency", &mVideoLatencyTestEnable); 1.755 + } 1.756 + } 1.757 + } 1.758 +#endif 1.759 + 1.760 + // by now we should be successfully started the reception 1.761 + mPtrRTP->SetRembStatus(mChannel, false, true); 1.762 + mEngineReceiving = true; 1.763 + DumpCodecDB(); 1.764 + return kMediaConduitNoError; 1.765 +} 1.766 + 1.767 +// XXX we need to figure out how to feed back changes in preferred capture 1.768 +// resolution to the getUserMedia source 1.769 +bool 1.770 +WebrtcVideoConduit::SelectSendResolution(unsigned short width, 1.771 + unsigned short height) 1.772 +{ 1.773 + // XXX This will do bandwidth-resolution adaptation as well - bug 877954 1.774 + 1.775 + // Limit resolution to max-fs while keeping same aspect ratio as the 1.776 + // incoming image. 1.777 + if (mCurSendCodecConfig && mCurSendCodecConfig->mMaxFrameSize) 1.778 + { 1.779 + unsigned int cur_fs, max_width, max_height, mb_width, mb_height, mb_max; 1.780 + 1.781 + mb_width = (width + 15) >> 4; 1.782 + mb_height = (height + 15) >> 4; 1.783 + 1.784 + cur_fs = mb_width * mb_height; 1.785 + 1.786 + // Limit resolution to max_fs, but don't scale up. 1.787 + if (cur_fs > mCurSendCodecConfig->mMaxFrameSize) 1.788 + { 1.789 + double scale_ratio; 1.790 + 1.791 + scale_ratio = sqrt((double) mCurSendCodecConfig->mMaxFrameSize / 1.792 + (double) cur_fs); 1.793 + 1.794 + mb_width = mb_width * scale_ratio; 1.795 + mb_height = mb_height * scale_ratio; 1.796 + 1.797 + // Adjust mb_width and mb_height if they were truncated to zero. 1.798 + if (mb_width == 0) { 1.799 + mb_width = 1; 1.800 + mb_height = std::min(mb_height, mCurSendCodecConfig->mMaxFrameSize); 1.801 + } 1.802 + if (mb_height == 0) { 1.803 + mb_height = 1; 1.804 + mb_width = std::min(mb_width, mCurSendCodecConfig->mMaxFrameSize); 1.805 + } 1.806 + } 1.807 + 1.808 + // Limit width/height seperately to limit effect of extreme aspect ratios. 1.809 + mb_max = (unsigned) sqrt(8 * (double) mCurSendCodecConfig->mMaxFrameSize); 1.810 + 1.811 + max_width = 16 * std::min(mb_width, mb_max); 1.812 + max_height = 16 * std::min(mb_height, mb_max); 1.813 + 1.814 + if (width * max_height > max_width * height) 1.815 + { 1.816 + if (width > max_width) 1.817 + { 1.818 + // Due to the value is truncated to integer here and forced to even 1.819 + // value later, adding 1 to improve accuracy. 1.820 + height = max_width * height / width + 1; 1.821 + width = max_width; 1.822 + } 1.823 + } 1.824 + else 1.825 + { 1.826 + if (height > max_height) 1.827 + { 1.828 + // Due to the value is truncated to integer here and forced to even 1.829 + // value later, adding 1 to improve accuracy. 1.830 + width = max_height * width / height + 1; 1.831 + height = max_height; 1.832 + } 1.833 + } 1.834 + 1.835 + // Favor even multiples of pixels for width and height. 1.836 + width = std::max(width & ~1, 2); 1.837 + height = std::max(height & ~1, 2); 1.838 + } 1.839 + 1.840 + // Adapt to getUserMedia resolution changes 1.841 + // check if we need to reconfigure the sending resolution 1.842 + if (mSendingWidth != width || mSendingHeight != height) 1.843 + { 1.844 + // This will avoid us continually retrying this operation if it fails. 1.845 + // If the resolution changes, we'll try again. In the meantime, we'll 1.846 + // keep using the old size in the encoder. 1.847 + mSendingWidth = width; 1.848 + mSendingHeight = height; 1.849 + 1.850 + // Get current vie codec. 1.851 + webrtc::VideoCodec vie_codec; 1.852 + int32_t err; 1.853 + 1.854 + if ((err = mPtrViECodec->GetSendCodec(mChannel, vie_codec)) != 0) 1.855 + { 1.856 + CSFLogError(logTag, "%s: GetSendCodec failed, err %d", __FUNCTION__, err); 1.857 + return false; 1.858 + } 1.859 + if (vie_codec.width != width || vie_codec.height != height) 1.860 + { 1.861 + vie_codec.width = width; 1.862 + vie_codec.height = height; 1.863 + 1.864 + if ((err = mPtrViECodec->SetSendCodec(mChannel, vie_codec)) != 0) 1.865 + { 1.866 + CSFLogError(logTag, "%s: SetSendCodec(%ux%u) failed, err %d", 1.867 + __FUNCTION__, width, height, err); 1.868 + return false; 1.869 + } 1.870 + CSFLogDebug(logTag, "%s: Encoder resolution changed to %ux%u", 1.871 + __FUNCTION__, width, height); 1.872 + } // else no change; mSendingWidth likely was 0 1.873 + } 1.874 + return true; 1.875 +} 1.876 + 1.877 +MediaConduitErrorCode 1.878 +WebrtcVideoConduit::SetExternalSendCodec(int pltype, 1.879 + VideoEncoder* encoder) { 1.880 + int ret = mPtrExtCodec->RegisterExternalSendCodec(mChannel, 1.881 + pltype, 1.882 + static_cast<WebrtcVideoEncoder*>(encoder), 1.883 + false); 1.884 + return ret ? kMediaConduitInvalidSendCodec : kMediaConduitNoError; 1.885 +} 1.886 + 1.887 +MediaConduitErrorCode 1.888 +WebrtcVideoConduit::SetExternalRecvCodec(int pltype, 1.889 + VideoDecoder* decoder) { 1.890 + int ret = mPtrExtCodec->RegisterExternalReceiveCodec(mChannel, 1.891 + pltype, 1.892 + static_cast<WebrtcVideoDecoder*>(decoder)); 1.893 + return ret ? kMediaConduitInvalidReceiveCodec : kMediaConduitNoError; 1.894 +} 1.895 + 1.896 +MediaConduitErrorCode 1.897 +WebrtcVideoConduit::SendVideoFrame(unsigned char* video_frame, 1.898 + unsigned int video_frame_length, 1.899 + unsigned short width, 1.900 + unsigned short height, 1.901 + VideoType video_type, 1.902 + uint64_t capture_time) 1.903 +{ 1.904 + CSFLogDebug(logTag, "%s ", __FUNCTION__); 1.905 + 1.906 + //check for the parameters sanity 1.907 + if(!video_frame || video_frame_length == 0 || 1.908 + width == 0 || height == 0) 1.909 + { 1.910 + CSFLogError(logTag, "%s Invalid Parameters ",__FUNCTION__); 1.911 + MOZ_ASSERT(PR_FALSE); 1.912 + return kMediaConduitMalformedArgument; 1.913 + } 1.914 + 1.915 + webrtc::RawVideoType type; 1.916 + switch (video_type) { 1.917 + case kVideoI420: 1.918 + type = webrtc::kVideoI420; 1.919 + break; 1.920 + case kVideoNV21: 1.921 + type = webrtc::kVideoNV21; 1.922 + break; 1.923 + default: 1.924 + CSFLogError(logTag, "%s VideoType Invalid. Only 1420 and NV21 Supported",__FUNCTION__); 1.925 + MOZ_ASSERT(PR_FALSE); 1.926 + return kMediaConduitMalformedArgument; 1.927 + } 1.928 + //Transmission should be enabled before we insert any frames. 1.929 + if(!mEngineTransmitting) 1.930 + { 1.931 + CSFLogError(logTag, "%s Engine not transmitting ", __FUNCTION__); 1.932 + return kMediaConduitSessionNotInited; 1.933 + } 1.934 + 1.935 + // enforce even width/height (paranoia) 1.936 + MOZ_ASSERT(!(width & 1)); 1.937 + MOZ_ASSERT(!(height & 1)); 1.938 + 1.939 + if (!SelectSendResolution(width, height)) 1.940 + { 1.941 + return kMediaConduitCaptureError; 1.942 + } 1.943 + 1.944 + //insert the frame to video engine in I420 format only 1.945 + MOZ_ASSERT(mPtrExtCapture); 1.946 + if(mPtrExtCapture->IncomingFrame(video_frame, 1.947 + video_frame_length, 1.948 + width, height, 1.949 + type, 1.950 + (unsigned long long)capture_time) == -1) 1.951 + { 1.952 + CSFLogError(logTag, "%s IncomingFrame Failed %d ", __FUNCTION__, 1.953 + mPtrViEBase->LastError()); 1.954 + return kMediaConduitCaptureError; 1.955 + } 1.956 + 1.957 + CSFLogDebug(logTag, "%s Inserted a frame", __FUNCTION__); 1.958 + return kMediaConduitNoError; 1.959 +} 1.960 + 1.961 +// Transport Layer Callbacks 1.962 +MediaConduitErrorCode 1.963 +WebrtcVideoConduit::ReceivedRTPPacket(const void *data, int len) 1.964 +{ 1.965 + CSFLogDebug(logTag, "%s: Channel %d, Len %d ", __FUNCTION__, mChannel, len); 1.966 + 1.967 + // Media Engine should be receiving already. 1.968 + if(mEngineReceiving) 1.969 + { 1.970 + // let the engine know of a RTP packet to decode 1.971 + if(mPtrViENetwork->ReceivedRTPPacket(mChannel,data,len) == -1) 1.972 + { 1.973 + int error = mPtrViEBase->LastError(); 1.974 + CSFLogError(logTag, "%s RTP Processing Failed %d ", __FUNCTION__, error); 1.975 + if(error >= kViERtpRtcpInvalidChannelId && error <= kViERtpRtcpRtcpDisabled) 1.976 + { 1.977 + return kMediaConduitRTPProcessingFailed; 1.978 + } 1.979 + return kMediaConduitRTPRTCPModuleError; 1.980 + } 1.981 + } else { 1.982 + CSFLogError(logTag, "Error: %s when not receiving", __FUNCTION__); 1.983 + return kMediaConduitSessionNotInited; 1.984 + } 1.985 + 1.986 + return kMediaConduitNoError; 1.987 +} 1.988 + 1.989 +MediaConduitErrorCode 1.990 +WebrtcVideoConduit::ReceivedRTCPPacket(const void *data, int len) 1.991 +{ 1.992 + CSFLogDebug(logTag, " %s Channel %d, Len %d ", __FUNCTION__, mChannel, len); 1.993 + 1.994 + //Media Engine should be receiving already 1.995 + if(mEngineTransmitting) 1.996 + { 1.997 + if(mPtrViENetwork->ReceivedRTCPPacket(mChannel,data,len) == -1) 1.998 + { 1.999 + int error = mPtrViEBase->LastError(); 1.1000 + CSFLogError(logTag, "%s RTP Processing Failed %d", __FUNCTION__, error); 1.1001 + if(error >= kViERtpRtcpInvalidChannelId && error <= kViERtpRtcpRtcpDisabled) 1.1002 + { 1.1003 + return kMediaConduitRTPProcessingFailed; 1.1004 + } 1.1005 + return kMediaConduitRTPRTCPModuleError; 1.1006 + } 1.1007 + } else { 1.1008 + CSFLogError(logTag, "Error: %s when not receiving", __FUNCTION__); 1.1009 + return kMediaConduitSessionNotInited; 1.1010 + } 1.1011 + return kMediaConduitNoError; 1.1012 +} 1.1013 + 1.1014 +//WebRTC::RTP Callback Implementation 1.1015 +int WebrtcVideoConduit::SendPacket(int channel, const void* data, int len) 1.1016 +{ 1.1017 + CSFLogDebug(logTag, "%s : channel %d len %d %s", __FUNCTION__, channel, len, 1.1018 + (mEngineReceiving && mOtherDirection) ? "(using mOtherDirection)" : ""); 1.1019 + 1.1020 + if (mEngineReceiving) 1.1021 + { 1.1022 + if (mOtherDirection) 1.1023 + { 1.1024 + return mOtherDirection->SendPacket(channel, data, len); 1.1025 + } 1.1026 + CSFLogDebug(logTag, "%s : Asked to send RTP without an RTP sender on channel %d", 1.1027 + __FUNCTION__, channel); 1.1028 + return -1; 1.1029 + } else { 1.1030 + if(mTransport && (mTransport->SendRtpPacket(data, len) == NS_OK)) 1.1031 + { 1.1032 + CSFLogDebug(logTag, "%s Sent RTP Packet ", __FUNCTION__); 1.1033 + return len; 1.1034 + } else { 1.1035 + CSFLogError(logTag, "%s RTP Packet Send Failed ", __FUNCTION__); 1.1036 + return -1; 1.1037 + } 1.1038 + } 1.1039 +} 1.1040 + 1.1041 +int WebrtcVideoConduit::SendRTCPPacket(int channel, const void* data, int len) 1.1042 +{ 1.1043 + CSFLogDebug(logTag, "%s : channel %d , len %d ", __FUNCTION__, channel,len); 1.1044 + 1.1045 + if (mEngineTransmitting) 1.1046 + { 1.1047 + if (mOtherDirection) 1.1048 + { 1.1049 + return mOtherDirection->SendRTCPPacket(channel, data, len); 1.1050 + } 1.1051 + } 1.1052 + 1.1053 + // We come here if we have only one pipeline/conduit setup, 1.1054 + // such as for unidirectional streams. 1.1055 + // We also end up here if we are receiving 1.1056 + if(mTransport && mTransport->SendRtcpPacket(data, len) == NS_OK) 1.1057 + { 1.1058 + CSFLogDebug(logTag, "%s Sent RTCP Packet ", __FUNCTION__); 1.1059 + return len; 1.1060 + } else { 1.1061 + CSFLogError(logTag, "%s RTCP Packet Send Failed ", __FUNCTION__); 1.1062 + return -1; 1.1063 + } 1.1064 +} 1.1065 + 1.1066 +// WebRTC::ExternalMedia Implementation 1.1067 +int 1.1068 +WebrtcVideoConduit::FrameSizeChange(unsigned int width, 1.1069 + unsigned int height, 1.1070 + unsigned int numStreams) 1.1071 +{ 1.1072 + CSFLogDebug(logTag, "%s ", __FUNCTION__); 1.1073 + 1.1074 + 1.1075 + mReceivingWidth = width; 1.1076 + mReceivingHeight = height; 1.1077 + 1.1078 + if(mRenderer) 1.1079 + { 1.1080 + mRenderer->FrameSizeChange(width, height, numStreams); 1.1081 + return 0; 1.1082 + } 1.1083 + 1.1084 + CSFLogError(logTag, "%s Renderer is NULL ", __FUNCTION__); 1.1085 + return -1; 1.1086 +} 1.1087 + 1.1088 +int 1.1089 +WebrtcVideoConduit::DeliverFrame(unsigned char* buffer, 1.1090 + int buffer_size, 1.1091 + uint32_t time_stamp, 1.1092 + int64_t render_time, 1.1093 + void *handle) 1.1094 +{ 1.1095 + CSFLogDebug(logTag, "%s Buffer Size %d", __FUNCTION__, buffer_size); 1.1096 + 1.1097 + if(mRenderer) 1.1098 + { 1.1099 + layers::Image* img = nullptr; 1.1100 + // |handle| should be a webrtc::NativeHandle if available. 1.1101 + if (handle) { 1.1102 + webrtc::NativeHandle* native_h = static_cast<webrtc::NativeHandle*>(handle); 1.1103 + // In the handle, there should be a layers::Image. 1.1104 + img = static_cast<layers::Image*>(native_h->GetHandle()); 1.1105 + } 1.1106 + 1.1107 + if (mVideoLatencyTestEnable && mReceivingWidth && mReceivingHeight) { 1.1108 + uint64_t now = PR_Now(); 1.1109 + uint64_t timestamp = 0; 1.1110 + bool ok = YuvStamper::Decode(mReceivingWidth, mReceivingHeight, mReceivingWidth, 1.1111 + buffer, 1.1112 + reinterpret_cast<unsigned char*>(×tamp), 1.1113 + sizeof(timestamp), 0, 0); 1.1114 + if (ok) { 1.1115 + VideoLatencyUpdate(now - timestamp); 1.1116 + } 1.1117 + } 1.1118 + 1.1119 + const ImageHandle img_h(img); 1.1120 + mRenderer->RenderVideoFrame(buffer, buffer_size, time_stamp, render_time, 1.1121 + img_h); 1.1122 + return 0; 1.1123 + } 1.1124 + 1.1125 + CSFLogError(logTag, "%s Renderer is NULL ", __FUNCTION__); 1.1126 + return -1; 1.1127 +} 1.1128 + 1.1129 +/** 1.1130 + * Copy the codec passed into Conduit's database 1.1131 + */ 1.1132 + 1.1133 +void 1.1134 +WebrtcVideoConduit::CodecConfigToWebRTCCodec(const VideoCodecConfig* codecInfo, 1.1135 + webrtc::VideoCodec& cinst) 1.1136 +{ 1.1137 + cinst.plType = codecInfo->mType; 1.1138 + // leave width/height alone; they'll be overridden on the first frame 1.1139 + if (codecInfo->mMaxFrameRate > 0) 1.1140 + { 1.1141 + cinst.maxFramerate = codecInfo->mMaxFrameRate; 1.1142 + } 1.1143 + cinst.minBitrate = 200; 1.1144 + cinst.startBitrate = 300; 1.1145 + cinst.maxBitrate = 2000; 1.1146 +} 1.1147 + 1.1148 +//Copy the codec passed into Conduit's database 1.1149 +bool 1.1150 +WebrtcVideoConduit::CopyCodecToDB(const VideoCodecConfig* codecInfo) 1.1151 +{ 1.1152 + VideoCodecConfig* cdcConfig = new VideoCodecConfig(*codecInfo); 1.1153 + mRecvCodecList.push_back(cdcConfig); 1.1154 + return true; 1.1155 +} 1.1156 + 1.1157 +bool 1.1158 +WebrtcVideoConduit::CheckCodecsForMatch(const VideoCodecConfig* curCodecConfig, 1.1159 + const VideoCodecConfig* codecInfo) const 1.1160 +{ 1.1161 + if(!curCodecConfig) 1.1162 + { 1.1163 + return false; 1.1164 + } 1.1165 + 1.1166 + if(curCodecConfig->mType == codecInfo->mType && 1.1167 + curCodecConfig->mName.compare(codecInfo->mName) == 0 && 1.1168 + curCodecConfig->mMaxFrameSize == codecInfo->mMaxFrameSize && 1.1169 + curCodecConfig->mMaxFrameRate == codecInfo->mMaxFrameRate) 1.1170 + { 1.1171 + return true; 1.1172 + } 1.1173 + 1.1174 + return false; 1.1175 +} 1.1176 + 1.1177 +/** 1.1178 + * Checks if the codec is already in Conduit's database 1.1179 + */ 1.1180 +bool 1.1181 +WebrtcVideoConduit::CheckCodecForMatch(const VideoCodecConfig* codecInfo) const 1.1182 +{ 1.1183 + //the db should have atleast one codec 1.1184 + for(std::vector<VideoCodecConfig*>::size_type i=0;i < mRecvCodecList.size();i++) 1.1185 + { 1.1186 + if(CheckCodecsForMatch(mRecvCodecList[i],codecInfo)) 1.1187 + { 1.1188 + //match 1.1189 + return true; 1.1190 + } 1.1191 + } 1.1192 + //no match or empty local db 1.1193 + return false; 1.1194 +} 1.1195 + 1.1196 +/** 1.1197 + * Perform validation on the codecConfig to be applied 1.1198 + * Verifies if the codec is already applied. 1.1199 + */ 1.1200 +MediaConduitErrorCode 1.1201 +WebrtcVideoConduit::ValidateCodecConfig(const VideoCodecConfig* codecInfo, 1.1202 + bool send) const 1.1203 +{ 1.1204 + bool codecAppliedAlready = false; 1.1205 + 1.1206 + if(!codecInfo) 1.1207 + { 1.1208 + CSFLogError(logTag, "%s Null CodecConfig ", __FUNCTION__); 1.1209 + return kMediaConduitMalformedArgument; 1.1210 + } 1.1211 + 1.1212 + if((codecInfo->mName.empty()) || 1.1213 + (codecInfo->mName.length() >= CODEC_PLNAME_SIZE)) 1.1214 + { 1.1215 + CSFLogError(logTag, "%s Invalid Payload Name Length ", __FUNCTION__); 1.1216 + return kMediaConduitMalformedArgument; 1.1217 + } 1.1218 + 1.1219 + //check if we have the same codec already applied 1.1220 + if(send) 1.1221 + { 1.1222 + codecAppliedAlready = CheckCodecsForMatch(mCurSendCodecConfig,codecInfo); 1.1223 + } else { 1.1224 + codecAppliedAlready = CheckCodecForMatch(codecInfo); 1.1225 + } 1.1226 + 1.1227 + if(codecAppliedAlready) 1.1228 + { 1.1229 + CSFLogDebug(logTag, "%s Codec %s Already Applied ", __FUNCTION__, codecInfo->mName.c_str()); 1.1230 + return kMediaConduitCodecInUse; 1.1231 + } 1.1232 + return kMediaConduitNoError; 1.1233 +} 1.1234 + 1.1235 +void 1.1236 +WebrtcVideoConduit::DumpCodecDB() const 1.1237 +{ 1.1238 + for(std::vector<VideoCodecConfig*>::size_type i=0;i<mRecvCodecList.size();i++) 1.1239 + { 1.1240 + CSFLogDebug(logTag,"Payload Name: %s", mRecvCodecList[i]->mName.c_str()); 1.1241 + CSFLogDebug(logTag,"Payload Type: %d", mRecvCodecList[i]->mType); 1.1242 + CSFLogDebug(logTag,"Payload Max Frame Size: %d", mRecvCodecList[i]->mMaxFrameSize); 1.1243 + CSFLogDebug(logTag,"Payload Max Frame Rate: %d", mRecvCodecList[i]->mMaxFrameRate); 1.1244 + } 1.1245 +} 1.1246 + 1.1247 +void 1.1248 +WebrtcVideoConduit::VideoLatencyUpdate(uint64_t newSample) 1.1249 +{ 1.1250 + mVideoLatencyAvg = (sRoundingPadding * newSample + sAlphaNum * mVideoLatencyAvg) / sAlphaDen; 1.1251 +} 1.1252 + 1.1253 +uint64_t 1.1254 +WebrtcVideoConduit::MozVideoLatencyAvg() 1.1255 +{ 1.1256 + return mVideoLatencyAvg / sRoundingPadding; 1.1257 +} 1.1258 + 1.1259 +}// end namespace