Wed, 31 Dec 2014 06:09:35 +0100
Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.
michael@0 | 1 | /* This Source Code Form is subject to the terms of the Mozilla Public |
michael@0 | 2 | * License, v. 2.0. If a copy of the MPL was not distributed with this file, |
michael@0 | 3 | * You can obtain one at http://mozilla.org/MPL/2.0/. */ |
michael@0 | 4 | |
michael@0 | 5 | #include "CSFLog.h" |
michael@0 | 6 | #include "nspr.h" |
michael@0 | 7 | |
michael@0 | 8 | // For rtcp-fb constants |
michael@0 | 9 | #include "ccsdp.h" |
michael@0 | 10 | |
michael@0 | 11 | #include "VideoConduit.h" |
michael@0 | 12 | #include "AudioConduit.h" |
michael@0 | 13 | #include "nsThreadUtils.h" |
michael@0 | 14 | #include "LoadManager.h" |
michael@0 | 15 | #include "YuvStamper.h" |
michael@0 | 16 | #include "nsServiceManagerUtils.h" |
michael@0 | 17 | #include "nsIPrefService.h" |
michael@0 | 18 | #include "nsIPrefBranch.h" |
michael@0 | 19 | |
michael@0 | 20 | #include "webrtc/common_video/interface/native_handle.h" |
michael@0 | 21 | #include "webrtc/video_engine/include/vie_errors.h" |
michael@0 | 22 | |
michael@0 | 23 | #ifdef MOZ_WIDGET_ANDROID |
michael@0 | 24 | #include "AndroidJNIWrapper.h" |
michael@0 | 25 | #endif |
michael@0 | 26 | |
michael@0 | 27 | #include <algorithm> |
michael@0 | 28 | #include <math.h> |
michael@0 | 29 | |
michael@0 | 30 | namespace mozilla { |
michael@0 | 31 | |
michael@0 | 32 | static const char* logTag ="WebrtcVideoSessionConduit"; |
michael@0 | 33 | |
michael@0 | 34 | // 32 bytes is what WebRTC CodecInst expects |
michael@0 | 35 | const unsigned int WebrtcVideoConduit::CODEC_PLNAME_SIZE = 32; |
michael@0 | 36 | |
michael@0 | 37 | /** |
michael@0 | 38 | * Factory Method for VideoConduit |
michael@0 | 39 | */ |
michael@0 | 40 | mozilla::RefPtr<VideoSessionConduit> VideoSessionConduit::Create(VideoSessionConduit *aOther) |
michael@0 | 41 | { |
michael@0 | 42 | #ifdef MOZILLA_INTERNAL_API |
michael@0 | 43 | // unit tests create their own "main thread" |
michael@0 | 44 | NS_ASSERTION(NS_IsMainThread(), "Only call on main thread"); |
michael@0 | 45 | #endif |
michael@0 | 46 | CSFLogDebug(logTag, "%s ", __FUNCTION__); |
michael@0 | 47 | |
michael@0 | 48 | WebrtcVideoConduit* obj = new WebrtcVideoConduit(); |
michael@0 | 49 | if(obj->Init(static_cast<WebrtcVideoConduit*>(aOther)) != kMediaConduitNoError) |
michael@0 | 50 | { |
michael@0 | 51 | CSFLogError(logTag, "%s VideoConduit Init Failed ", __FUNCTION__); |
michael@0 | 52 | delete obj; |
michael@0 | 53 | return nullptr; |
michael@0 | 54 | } |
michael@0 | 55 | CSFLogDebug(logTag, "%s Successfully created VideoConduit ", __FUNCTION__); |
michael@0 | 56 | return obj; |
michael@0 | 57 | } |
michael@0 | 58 | |
michael@0 | 59 | WebrtcVideoConduit::~WebrtcVideoConduit() |
michael@0 | 60 | { |
michael@0 | 61 | #ifdef MOZILLA_INTERNAL_API |
michael@0 | 62 | // unit tests create their own "main thread" |
michael@0 | 63 | NS_ASSERTION(NS_IsMainThread(), "Only call on main thread"); |
michael@0 | 64 | #endif |
michael@0 | 65 | CSFLogDebug(logTag, "%s ", __FUNCTION__); |
michael@0 | 66 | |
michael@0 | 67 | for(std::vector<VideoCodecConfig*>::size_type i=0;i < mRecvCodecList.size();i++) |
michael@0 | 68 | { |
michael@0 | 69 | delete mRecvCodecList[i]; |
michael@0 | 70 | } |
michael@0 | 71 | |
michael@0 | 72 | delete mCurSendCodecConfig; |
michael@0 | 73 | |
michael@0 | 74 | // The first one of a pair to be deleted shuts down media for both |
michael@0 | 75 | //Deal with External Capturer |
michael@0 | 76 | if(mPtrViECapture) |
michael@0 | 77 | { |
michael@0 | 78 | if (!mShutDown) { |
michael@0 | 79 | mPtrViECapture->DisconnectCaptureDevice(mCapId); |
michael@0 | 80 | mPtrViECapture->ReleaseCaptureDevice(mCapId); |
michael@0 | 81 | mPtrExtCapture = nullptr; |
michael@0 | 82 | if (mOtherDirection) |
michael@0 | 83 | mOtherDirection->mPtrExtCapture = nullptr; |
michael@0 | 84 | } |
michael@0 | 85 | } |
michael@0 | 86 | |
michael@0 | 87 | //Deal with External Renderer |
michael@0 | 88 | if(mPtrViERender) |
michael@0 | 89 | { |
michael@0 | 90 | if (!mShutDown) { |
michael@0 | 91 | if(mRenderer) { |
michael@0 | 92 | mPtrViERender->StopRender(mChannel); |
michael@0 | 93 | } |
michael@0 | 94 | mPtrViERender->RemoveRenderer(mChannel); |
michael@0 | 95 | } |
michael@0 | 96 | } |
michael@0 | 97 | |
michael@0 | 98 | //Deal with the transport |
michael@0 | 99 | if(mPtrViENetwork) |
michael@0 | 100 | { |
michael@0 | 101 | if (!mShutDown) { |
michael@0 | 102 | mPtrViENetwork->DeregisterSendTransport(mChannel); |
michael@0 | 103 | } |
michael@0 | 104 | } |
michael@0 | 105 | |
michael@0 | 106 | if(mPtrViEBase) |
michael@0 | 107 | { |
michael@0 | 108 | if (!mShutDown) { |
michael@0 | 109 | mPtrViEBase->StopSend(mChannel); |
michael@0 | 110 | mPtrViEBase->StopReceive(mChannel); |
michael@0 | 111 | SyncTo(nullptr); |
michael@0 | 112 | mPtrViEBase->DeleteChannel(mChannel); |
michael@0 | 113 | } |
michael@0 | 114 | } |
michael@0 | 115 | |
michael@0 | 116 | if (mOtherDirection) |
michael@0 | 117 | { |
michael@0 | 118 | // mOtherDirection owns these now! |
michael@0 | 119 | mOtherDirection->mOtherDirection = nullptr; |
michael@0 | 120 | // let other side we terminated the channel |
michael@0 | 121 | mOtherDirection->mShutDown = true; |
michael@0 | 122 | mVideoEngine = nullptr; |
michael@0 | 123 | } else { |
michael@0 | 124 | // We can't delete the VideoEngine until all these are released! |
michael@0 | 125 | // And we can't use a Scoped ptr, since the order is arbitrary |
michael@0 | 126 | mPtrViEBase = nullptr; |
michael@0 | 127 | mPtrViECapture = nullptr; |
michael@0 | 128 | mPtrViECodec = nullptr; |
michael@0 | 129 | mPtrViENetwork = nullptr; |
michael@0 | 130 | mPtrViERender = nullptr; |
michael@0 | 131 | mPtrRTP = nullptr; |
michael@0 | 132 | mPtrExtCodec = nullptr; |
michael@0 | 133 | |
michael@0 | 134 | // only one opener can call Delete. Have it be the last to close. |
michael@0 | 135 | if(mVideoEngine) |
michael@0 | 136 | { |
michael@0 | 137 | webrtc::VideoEngine::Delete(mVideoEngine); |
michael@0 | 138 | } |
michael@0 | 139 | } |
michael@0 | 140 | } |
michael@0 | 141 | |
michael@0 | 142 | bool WebrtcVideoConduit::GetLocalSSRC(unsigned int* ssrc) { |
michael@0 | 143 | return !mPtrRTP->GetLocalSSRC(mChannel, *ssrc); |
michael@0 | 144 | } |
michael@0 | 145 | |
michael@0 | 146 | bool WebrtcVideoConduit::GetRemoteSSRC(unsigned int* ssrc) { |
michael@0 | 147 | return !mPtrRTP->GetRemoteSSRC(mChannel, *ssrc); |
michael@0 | 148 | } |
michael@0 | 149 | |
michael@0 | 150 | bool WebrtcVideoConduit::GetAVStats(int32_t* jitterBufferDelayMs, |
michael@0 | 151 | int32_t* playoutBufferDelayMs, |
michael@0 | 152 | int32_t* avSyncOffsetMs) { |
michael@0 | 153 | return false; |
michael@0 | 154 | } |
michael@0 | 155 | |
michael@0 | 156 | bool WebrtcVideoConduit::GetRTPStats(unsigned int* jitterMs, |
michael@0 | 157 | unsigned int* cumulativeLost) { |
michael@0 | 158 | unsigned short fractionLost; |
michael@0 | 159 | unsigned extendedMax; |
michael@0 | 160 | int rttMs; |
michael@0 | 161 | // GetReceivedRTCPStatistics is a poorly named GetRTPStatistics variant |
michael@0 | 162 | return !mPtrRTP->GetReceivedRTCPStatistics(mChannel, fractionLost, |
michael@0 | 163 | *cumulativeLost, |
michael@0 | 164 | extendedMax, |
michael@0 | 165 | *jitterMs, |
michael@0 | 166 | rttMs); |
michael@0 | 167 | } |
michael@0 | 168 | |
michael@0 | 169 | bool WebrtcVideoConduit::GetRTCPReceiverReport(DOMHighResTimeStamp* timestamp, |
michael@0 | 170 | uint32_t* jitterMs, |
michael@0 | 171 | uint32_t* packetsReceived, |
michael@0 | 172 | uint64_t* bytesReceived, |
michael@0 | 173 | uint32_t* cumulativeLost, |
michael@0 | 174 | int32_t* rttMs) { |
michael@0 | 175 | uint32_t ntpHigh, ntpLow; |
michael@0 | 176 | uint16_t fractionLost; |
michael@0 | 177 | bool result = !mPtrRTP->GetRemoteRTCPReceiverInfo(mChannel, ntpHigh, ntpLow, |
michael@0 | 178 | *packetsReceived, |
michael@0 | 179 | *bytesReceived, |
michael@0 | 180 | jitterMs, |
michael@0 | 181 | &fractionLost, |
michael@0 | 182 | cumulativeLost, |
michael@0 | 183 | rttMs); |
michael@0 | 184 | if (result) { |
michael@0 | 185 | *timestamp = NTPtoDOMHighResTimeStamp(ntpHigh, ntpLow); |
michael@0 | 186 | } |
michael@0 | 187 | return result; |
michael@0 | 188 | } |
michael@0 | 189 | |
michael@0 | 190 | bool WebrtcVideoConduit::GetRTCPSenderReport(DOMHighResTimeStamp* timestamp, |
michael@0 | 191 | unsigned int* packetsSent, |
michael@0 | 192 | uint64_t* bytesSent) { |
michael@0 | 193 | struct webrtc::SenderInfo senderInfo; |
michael@0 | 194 | bool result = !mPtrRTP->GetRemoteRTCPSenderInfo(mChannel, &senderInfo); |
michael@0 | 195 | if (result) { |
michael@0 | 196 | *timestamp = NTPtoDOMHighResTimeStamp(senderInfo.NTP_timestamp_high, |
michael@0 | 197 | senderInfo.NTP_timestamp_low); |
michael@0 | 198 | *packetsSent = senderInfo.sender_packet_count; |
michael@0 | 199 | *bytesSent = senderInfo.sender_octet_count; |
michael@0 | 200 | } |
michael@0 | 201 | return result; |
michael@0 | 202 | } |
michael@0 | 203 | |
michael@0 | 204 | /** |
michael@0 | 205 | * Peforms intialization of the MANDATORY components of the Video Engine |
michael@0 | 206 | */ |
michael@0 | 207 | MediaConduitErrorCode WebrtcVideoConduit::Init(WebrtcVideoConduit *other) |
michael@0 | 208 | { |
michael@0 | 209 | CSFLogDebug(logTag, "%s this=%p other=%p", __FUNCTION__, this, other); |
michael@0 | 210 | |
michael@0 | 211 | if (other) { |
michael@0 | 212 | MOZ_ASSERT(!other->mOtherDirection); |
michael@0 | 213 | other->mOtherDirection = this; |
michael@0 | 214 | mOtherDirection = other; |
michael@0 | 215 | |
michael@0 | 216 | // only one can call ::Create()/GetVideoEngine() |
michael@0 | 217 | MOZ_ASSERT(other->mVideoEngine); |
michael@0 | 218 | mVideoEngine = other->mVideoEngine; |
michael@0 | 219 | } else { |
michael@0 | 220 | |
michael@0 | 221 | #ifdef MOZ_WIDGET_ANDROID |
michael@0 | 222 | jobject context = jsjni_GetGlobalContextRef(); |
michael@0 | 223 | |
michael@0 | 224 | // get the JVM |
michael@0 | 225 | JavaVM *jvm = jsjni_GetVM(); |
michael@0 | 226 | |
michael@0 | 227 | if (webrtc::VideoEngine::SetAndroidObjects(jvm, (void*)context) != 0) { |
michael@0 | 228 | CSFLogError(logTag, "%s: could not set Android objects", __FUNCTION__); |
michael@0 | 229 | return kMediaConduitSessionNotInited; |
michael@0 | 230 | } |
michael@0 | 231 | #endif |
michael@0 | 232 | |
michael@0 | 233 | // Per WebRTC APIs below function calls return nullptr on failure |
michael@0 | 234 | if( !(mVideoEngine = webrtc::VideoEngine::Create()) ) |
michael@0 | 235 | { |
michael@0 | 236 | CSFLogError(logTag, "%s Unable to create video engine ", __FUNCTION__); |
michael@0 | 237 | return kMediaConduitSessionNotInited; |
michael@0 | 238 | } |
michael@0 | 239 | |
michael@0 | 240 | PRLogModuleInfo *logs = GetWebRTCLogInfo(); |
michael@0 | 241 | if (!gWebrtcTraceLoggingOn && logs && logs->level > 0) { |
michael@0 | 242 | // no need to a critical section or lock here |
michael@0 | 243 | gWebrtcTraceLoggingOn = 1; |
michael@0 | 244 | |
michael@0 | 245 | const char *file = PR_GetEnv("WEBRTC_TRACE_FILE"); |
michael@0 | 246 | if (!file) { |
michael@0 | 247 | file = "WebRTC.log"; |
michael@0 | 248 | } |
michael@0 | 249 | CSFLogDebug(logTag, "%s Logging webrtc to %s level %d", __FUNCTION__, |
michael@0 | 250 | file, logs->level); |
michael@0 | 251 | mVideoEngine->SetTraceFilter(logs->level); |
michael@0 | 252 | mVideoEngine->SetTraceFile(file); |
michael@0 | 253 | } |
michael@0 | 254 | } |
michael@0 | 255 | |
michael@0 | 256 | if( !(mPtrViEBase = ViEBase::GetInterface(mVideoEngine))) |
michael@0 | 257 | { |
michael@0 | 258 | CSFLogError(logTag, "%s Unable to get video base interface ", __FUNCTION__); |
michael@0 | 259 | return kMediaConduitSessionNotInited; |
michael@0 | 260 | } |
michael@0 | 261 | |
michael@0 | 262 | if( !(mPtrViECapture = ViECapture::GetInterface(mVideoEngine))) |
michael@0 | 263 | { |
michael@0 | 264 | CSFLogError(logTag, "%s Unable to get video capture interface", __FUNCTION__); |
michael@0 | 265 | return kMediaConduitSessionNotInited; |
michael@0 | 266 | } |
michael@0 | 267 | |
michael@0 | 268 | if( !(mPtrViECodec = ViECodec::GetInterface(mVideoEngine))) |
michael@0 | 269 | { |
michael@0 | 270 | CSFLogError(logTag, "%s Unable to get video codec interface ", __FUNCTION__); |
michael@0 | 271 | return kMediaConduitSessionNotInited; |
michael@0 | 272 | } |
michael@0 | 273 | |
michael@0 | 274 | if( !(mPtrViENetwork = ViENetwork::GetInterface(mVideoEngine))) |
michael@0 | 275 | { |
michael@0 | 276 | CSFLogError(logTag, "%s Unable to get video network interface ", __FUNCTION__); |
michael@0 | 277 | return kMediaConduitSessionNotInited; |
michael@0 | 278 | } |
michael@0 | 279 | |
michael@0 | 280 | if( !(mPtrViERender = ViERender::GetInterface(mVideoEngine))) |
michael@0 | 281 | { |
michael@0 | 282 | CSFLogError(logTag, "%s Unable to get video render interface ", __FUNCTION__); |
michael@0 | 283 | return kMediaConduitSessionNotInited; |
michael@0 | 284 | } |
michael@0 | 285 | |
michael@0 | 286 | if( !(mPtrRTP = webrtc::ViERTP_RTCP::GetInterface(mVideoEngine))) |
michael@0 | 287 | { |
michael@0 | 288 | CSFLogError(logTag, "%s Unable to get video RTCP interface ", __FUNCTION__); |
michael@0 | 289 | return kMediaConduitSessionNotInited; |
michael@0 | 290 | } |
michael@0 | 291 | |
michael@0 | 292 | if ( !(mPtrExtCodec = webrtc::ViEExternalCodec::GetInterface(mVideoEngine))) |
michael@0 | 293 | { |
michael@0 | 294 | CSFLogError(logTag, "%s Unable to get external codec interface %d ", |
michael@0 | 295 | __FUNCTION__, mPtrViEBase->LastError()); |
michael@0 | 296 | return kMediaConduitSessionNotInited; |
michael@0 | 297 | } |
michael@0 | 298 | |
michael@0 | 299 | if (other) { |
michael@0 | 300 | mChannel = other->mChannel; |
michael@0 | 301 | mPtrExtCapture = other->mPtrExtCapture; |
michael@0 | 302 | mCapId = other->mCapId; |
michael@0 | 303 | } else { |
michael@0 | 304 | CSFLogDebug(logTag, "%s Engine Created: Init'ng the interfaces ",__FUNCTION__); |
michael@0 | 305 | |
michael@0 | 306 | if(mPtrViEBase->Init() == -1) |
michael@0 | 307 | { |
michael@0 | 308 | CSFLogError(logTag, " %s Video Engine Init Failed %d ",__FUNCTION__, |
michael@0 | 309 | mPtrViEBase->LastError()); |
michael@0 | 310 | return kMediaConduitSessionNotInited; |
michael@0 | 311 | } |
michael@0 | 312 | |
michael@0 | 313 | if(mPtrViEBase->CreateChannel(mChannel) == -1) |
michael@0 | 314 | { |
michael@0 | 315 | CSFLogError(logTag, " %s Channel creation Failed %d ",__FUNCTION__, |
michael@0 | 316 | mPtrViEBase->LastError()); |
michael@0 | 317 | return kMediaConduitChannelError; |
michael@0 | 318 | } |
michael@0 | 319 | |
michael@0 | 320 | if(mPtrViENetwork->RegisterSendTransport(mChannel, *this) == -1) |
michael@0 | 321 | { |
michael@0 | 322 | CSFLogError(logTag, "%s ViENetwork Failed %d ", __FUNCTION__, |
michael@0 | 323 | mPtrViEBase->LastError()); |
michael@0 | 324 | return kMediaConduitTransportRegistrationFail; |
michael@0 | 325 | } |
michael@0 | 326 | |
michael@0 | 327 | if(mPtrViECapture->AllocateExternalCaptureDevice(mCapId, |
michael@0 | 328 | mPtrExtCapture) == -1) |
michael@0 | 329 | { |
michael@0 | 330 | CSFLogError(logTag, "%s Unable to Allocate capture module: %d ", |
michael@0 | 331 | __FUNCTION__, mPtrViEBase->LastError()); |
michael@0 | 332 | return kMediaConduitCaptureError; |
michael@0 | 333 | } |
michael@0 | 334 | |
michael@0 | 335 | if(mPtrViECapture->ConnectCaptureDevice(mCapId,mChannel) == -1) |
michael@0 | 336 | { |
michael@0 | 337 | CSFLogError(logTag, "%s Unable to Connect capture module: %d ", |
michael@0 | 338 | __FUNCTION__,mPtrViEBase->LastError()); |
michael@0 | 339 | return kMediaConduitCaptureError; |
michael@0 | 340 | } |
michael@0 | 341 | |
michael@0 | 342 | if(mPtrViERender->AddRenderer(mChannel, |
michael@0 | 343 | webrtc::kVideoI420, |
michael@0 | 344 | (webrtc::ExternalRenderer*) this) == -1) |
michael@0 | 345 | { |
michael@0 | 346 | CSFLogError(logTag, "%s Failed to added external renderer ", __FUNCTION__); |
michael@0 | 347 | return kMediaConduitInvalidRenderer; |
michael@0 | 348 | } |
michael@0 | 349 | // Set up some parameters, per juberti. Set MTU. |
michael@0 | 350 | if(mPtrViENetwork->SetMTU(mChannel, 1200) != 0) |
michael@0 | 351 | { |
michael@0 | 352 | CSFLogError(logTag, "%s MTU Failed %d ", __FUNCTION__, |
michael@0 | 353 | mPtrViEBase->LastError()); |
michael@0 | 354 | return kMediaConduitMTUError; |
michael@0 | 355 | } |
michael@0 | 356 | // Turn on RTCP and loss feedback reporting. |
michael@0 | 357 | if(mPtrRTP->SetRTCPStatus(mChannel, webrtc::kRtcpCompound_RFC4585) != 0) |
michael@0 | 358 | { |
michael@0 | 359 | CSFLogError(logTag, "%s RTCPStatus Failed %d ", __FUNCTION__, |
michael@0 | 360 | mPtrViEBase->LastError()); |
michael@0 | 361 | return kMediaConduitRTCPStatusError; |
michael@0 | 362 | } |
michael@0 | 363 | } |
michael@0 | 364 | |
michael@0 | 365 | CSFLogError(logTag, "%s Initialization Done", __FUNCTION__); |
michael@0 | 366 | return kMediaConduitNoError; |
michael@0 | 367 | } |
michael@0 | 368 | |
michael@0 | 369 | void |
michael@0 | 370 | WebrtcVideoConduit::SyncTo(WebrtcAudioConduit *aConduit) |
michael@0 | 371 | { |
michael@0 | 372 | CSFLogDebug(logTag, "%s Synced to %p", __FUNCTION__, aConduit); |
michael@0 | 373 | |
michael@0 | 374 | // SyncTo(value) syncs to the AudioConduit, and if already synced replaces |
michael@0 | 375 | // the current sync target. SyncTo(nullptr) cancels any existing sync and |
michael@0 | 376 | // releases the strong ref to AudioConduit. |
michael@0 | 377 | if (aConduit) { |
michael@0 | 378 | mPtrViEBase->SetVoiceEngine(aConduit->GetVoiceEngine()); |
michael@0 | 379 | mPtrViEBase->ConnectAudioChannel(mChannel, aConduit->GetChannel()); |
michael@0 | 380 | // NOTE: this means the VideoConduit will keep the AudioConduit alive! |
michael@0 | 381 | } else if ((mOtherDirection && mOtherDirection->mSyncedTo) || mSyncedTo) { |
michael@0 | 382 | mPtrViEBase->DisconnectAudioChannel(mChannel); |
michael@0 | 383 | mPtrViEBase->SetVoiceEngine(nullptr); |
michael@0 | 384 | } |
michael@0 | 385 | |
michael@0 | 386 | // Now manage the shared sync reference (ugly) |
michael@0 | 387 | if (mSyncedTo || !mOtherDirection ) { |
michael@0 | 388 | mSyncedTo = aConduit; |
michael@0 | 389 | } else { |
michael@0 | 390 | mOtherDirection->mSyncedTo = aConduit; |
michael@0 | 391 | } |
michael@0 | 392 | } |
michael@0 | 393 | |
michael@0 | 394 | MediaConduitErrorCode |
michael@0 | 395 | WebrtcVideoConduit::AttachRenderer(mozilla::RefPtr<VideoRenderer> aVideoRenderer) |
michael@0 | 396 | { |
michael@0 | 397 | CSFLogDebug(logTag, "%s ", __FUNCTION__); |
michael@0 | 398 | |
michael@0 | 399 | //null renderer |
michael@0 | 400 | if(!aVideoRenderer) |
michael@0 | 401 | { |
michael@0 | 402 | CSFLogError(logTag, "%s NULL Renderer", __FUNCTION__); |
michael@0 | 403 | MOZ_ASSERT(PR_FALSE); |
michael@0 | 404 | return kMediaConduitInvalidRenderer; |
michael@0 | 405 | } |
michael@0 | 406 | |
michael@0 | 407 | //Start Rendering if we haven't already |
michael@0 | 408 | if(!mRenderer) |
michael@0 | 409 | { |
michael@0 | 410 | mRenderer = aVideoRenderer; // must be done before StartRender() |
michael@0 | 411 | |
michael@0 | 412 | if(mPtrViERender->StartRender(mChannel) == -1) |
michael@0 | 413 | { |
michael@0 | 414 | CSFLogError(logTag, "%s Starting the Renderer Failed %d ", __FUNCTION__, |
michael@0 | 415 | mPtrViEBase->LastError()); |
michael@0 | 416 | mRenderer = nullptr; |
michael@0 | 417 | return kMediaConduitRendererFail; |
michael@0 | 418 | } |
michael@0 | 419 | } else { |
michael@0 | 420 | //Assign the new renderer - overwrites if there is already one |
michael@0 | 421 | mRenderer = aVideoRenderer; |
michael@0 | 422 | } |
michael@0 | 423 | |
michael@0 | 424 | return kMediaConduitNoError; |
michael@0 | 425 | } |
michael@0 | 426 | |
michael@0 | 427 | void |
michael@0 | 428 | WebrtcVideoConduit::DetachRenderer() |
michael@0 | 429 | { |
michael@0 | 430 | if(mRenderer) |
michael@0 | 431 | { |
michael@0 | 432 | mPtrViERender->StopRender(mChannel); |
michael@0 | 433 | mRenderer = nullptr; |
michael@0 | 434 | } |
michael@0 | 435 | } |
michael@0 | 436 | |
michael@0 | 437 | MediaConduitErrorCode |
michael@0 | 438 | WebrtcVideoConduit::AttachTransport(mozilla::RefPtr<TransportInterface> aTransport) |
michael@0 | 439 | { |
michael@0 | 440 | CSFLogDebug(logTag, "%s ", __FUNCTION__); |
michael@0 | 441 | if(!aTransport) |
michael@0 | 442 | { |
michael@0 | 443 | CSFLogError(logTag, "%s NULL Transport", __FUNCTION__); |
michael@0 | 444 | return kMediaConduitInvalidTransport; |
michael@0 | 445 | } |
michael@0 | 446 | // set the transport |
michael@0 | 447 | mTransport = aTransport; |
michael@0 | 448 | return kMediaConduitNoError; |
michael@0 | 449 | } |
michael@0 | 450 | |
michael@0 | 451 | /** |
michael@0 | 452 | * Note: Setting the send-codec on the Video Engine will restart the encoder, |
michael@0 | 453 | * sets up new SSRC and reset RTP_RTCP module with the new codec setting. |
michael@0 | 454 | */ |
michael@0 | 455 | MediaConduitErrorCode |
michael@0 | 456 | WebrtcVideoConduit::ConfigureSendMediaCodec(const VideoCodecConfig* codecConfig) |
michael@0 | 457 | { |
michael@0 | 458 | CSFLogDebug(logTag, "%s ", __FUNCTION__); |
michael@0 | 459 | bool codecFound = false; |
michael@0 | 460 | MediaConduitErrorCode condError = kMediaConduitNoError; |
michael@0 | 461 | int error = 0; //webrtc engine errors |
michael@0 | 462 | webrtc::VideoCodec video_codec; |
michael@0 | 463 | std::string payloadName; |
michael@0 | 464 | |
michael@0 | 465 | //validate basic params |
michael@0 | 466 | if((condError = ValidateCodecConfig(codecConfig,true)) != kMediaConduitNoError) |
michael@0 | 467 | { |
michael@0 | 468 | return condError; |
michael@0 | 469 | } |
michael@0 | 470 | |
michael@0 | 471 | //Check if we have same codec already applied |
michael@0 | 472 | if(CheckCodecsForMatch(mCurSendCodecConfig, codecConfig)) |
michael@0 | 473 | { |
michael@0 | 474 | CSFLogDebug(logTag, "%s Codec has been applied already ", __FUNCTION__); |
michael@0 | 475 | return kMediaConduitCodecInUse; |
michael@0 | 476 | } |
michael@0 | 477 | |
michael@0 | 478 | //transmitting already ? |
michael@0 | 479 | if(mEngineTransmitting) |
michael@0 | 480 | { |
michael@0 | 481 | CSFLogDebug(logTag, "%s Engine Already Sending. Attemping to Stop ", __FUNCTION__); |
michael@0 | 482 | if(mPtrViEBase->StopSend(mChannel) == -1) |
michael@0 | 483 | { |
michael@0 | 484 | CSFLogError(logTag, "%s StopSend() Failed %d ",__FUNCTION__, |
michael@0 | 485 | mPtrViEBase->LastError()); |
michael@0 | 486 | return kMediaConduitUnknownError; |
michael@0 | 487 | } |
michael@0 | 488 | } |
michael@0 | 489 | |
michael@0 | 490 | mEngineTransmitting = false; |
michael@0 | 491 | |
michael@0 | 492 | if (codecConfig->mLoadManager) { |
michael@0 | 493 | mPtrViEBase->RegisterCpuOveruseObserver(mChannel, codecConfig->mLoadManager); |
michael@0 | 494 | mPtrViEBase->SetLoadManager(codecConfig->mLoadManager); |
michael@0 | 495 | } |
michael@0 | 496 | |
michael@0 | 497 | // we should be good here to set the new codec. |
michael@0 | 498 | for(int idx=0; idx < mPtrViECodec->NumberOfCodecs(); idx++) |
michael@0 | 499 | { |
michael@0 | 500 | if(0 == mPtrViECodec->GetCodec(idx, video_codec)) |
michael@0 | 501 | { |
michael@0 | 502 | payloadName = video_codec.plName; |
michael@0 | 503 | if(codecConfig->mName.compare(payloadName) == 0) |
michael@0 | 504 | { |
michael@0 | 505 | CodecConfigToWebRTCCodec(codecConfig, video_codec); |
michael@0 | 506 | codecFound = true; |
michael@0 | 507 | break; |
michael@0 | 508 | } |
michael@0 | 509 | } |
michael@0 | 510 | }//for |
michael@0 | 511 | |
michael@0 | 512 | if(codecFound == false) |
michael@0 | 513 | { |
michael@0 | 514 | CSFLogError(logTag, "%s Codec Mismatch ", __FUNCTION__); |
michael@0 | 515 | return kMediaConduitInvalidSendCodec; |
michael@0 | 516 | } |
michael@0 | 517 | |
michael@0 | 518 | if(mPtrViECodec->SetSendCodec(mChannel, video_codec) == -1) |
michael@0 | 519 | { |
michael@0 | 520 | error = mPtrViEBase->LastError(); |
michael@0 | 521 | if(error == kViECodecInvalidCodec) |
michael@0 | 522 | { |
michael@0 | 523 | CSFLogError(logTag, "%s Invalid Send Codec", __FUNCTION__); |
michael@0 | 524 | return kMediaConduitInvalidSendCodec; |
michael@0 | 525 | } |
michael@0 | 526 | CSFLogError(logTag, "%s SetSendCodec Failed %d ", __FUNCTION__, |
michael@0 | 527 | mPtrViEBase->LastError()); |
michael@0 | 528 | return kMediaConduitUnknownError; |
michael@0 | 529 | } |
michael@0 | 530 | mSendingWidth = 0; |
michael@0 | 531 | mSendingHeight = 0; |
michael@0 | 532 | |
michael@0 | 533 | if(codecConfig->RtcpFbIsSet(SDP_RTCP_FB_NACK_BASIC)) { |
michael@0 | 534 | CSFLogDebug(logTag, "Enabling NACK (send) for video stream\n"); |
michael@0 | 535 | if (mPtrRTP->SetNACKStatus(mChannel, true) != 0) |
michael@0 | 536 | { |
michael@0 | 537 | CSFLogError(logTag, "%s NACKStatus Failed %d ", __FUNCTION__, |
michael@0 | 538 | mPtrViEBase->LastError()); |
michael@0 | 539 | return kMediaConduitNACKStatusError; |
michael@0 | 540 | } |
michael@0 | 541 | } |
michael@0 | 542 | |
michael@0 | 543 | if(mPtrViEBase->StartSend(mChannel) == -1) |
michael@0 | 544 | { |
michael@0 | 545 | CSFLogError(logTag, "%s Start Send Error %d ", __FUNCTION__, |
michael@0 | 546 | mPtrViEBase->LastError()); |
michael@0 | 547 | return kMediaConduitUnknownError; |
michael@0 | 548 | } |
michael@0 | 549 | |
michael@0 | 550 | //Copy the applied config for future reference. |
michael@0 | 551 | delete mCurSendCodecConfig; |
michael@0 | 552 | |
michael@0 | 553 | mCurSendCodecConfig = new VideoCodecConfig(*codecConfig); |
michael@0 | 554 | |
michael@0 | 555 | mPtrRTP->SetRembStatus(mChannel, true, false); |
michael@0 | 556 | |
michael@0 | 557 | // by now we should be successfully started the transmission |
michael@0 | 558 | mEngineTransmitting = true; |
michael@0 | 559 | return kMediaConduitNoError; |
michael@0 | 560 | } |
michael@0 | 561 | |
michael@0 | 562 | MediaConduitErrorCode |
michael@0 | 563 | WebrtcVideoConduit::ConfigureRecvMediaCodecs( |
michael@0 | 564 | const std::vector<VideoCodecConfig* >& codecConfigList) |
michael@0 | 565 | { |
michael@0 | 566 | CSFLogDebug(logTag, "%s ", __FUNCTION__); |
michael@0 | 567 | MediaConduitErrorCode condError = kMediaConduitNoError; |
michael@0 | 568 | int error = 0; //webrtc engine errors |
michael@0 | 569 | bool success = false; |
michael@0 | 570 | std::string payloadName; |
michael@0 | 571 | |
michael@0 | 572 | // are we receiving already? If so, stop receiving and playout |
michael@0 | 573 | // since we can't apply new recv codec when the engine is playing. |
michael@0 | 574 | if(mEngineReceiving) |
michael@0 | 575 | { |
michael@0 | 576 | CSFLogDebug(logTag, "%s Engine Already Receiving . Attemping to Stop ", __FUNCTION__); |
michael@0 | 577 | if(mPtrViEBase->StopReceive(mChannel) == -1) |
michael@0 | 578 | { |
michael@0 | 579 | error = mPtrViEBase->LastError(); |
michael@0 | 580 | if(error == kViEBaseUnknownError) |
michael@0 | 581 | { |
michael@0 | 582 | CSFLogDebug(logTag, "%s StopReceive() Success ", __FUNCTION__); |
michael@0 | 583 | mEngineReceiving = false; |
michael@0 | 584 | } else { |
michael@0 | 585 | CSFLogError(logTag, "%s StopReceive() Failed %d ", __FUNCTION__, |
michael@0 | 586 | mPtrViEBase->LastError()); |
michael@0 | 587 | return kMediaConduitUnknownError; |
michael@0 | 588 | } |
michael@0 | 589 | } |
michael@0 | 590 | } |
michael@0 | 591 | |
michael@0 | 592 | mEngineReceiving = false; |
michael@0 | 593 | |
michael@0 | 594 | if(codecConfigList.empty()) |
michael@0 | 595 | { |
michael@0 | 596 | CSFLogError(logTag, "%s Zero number of codecs to configure", __FUNCTION__); |
michael@0 | 597 | return kMediaConduitMalformedArgument; |
michael@0 | 598 | } |
michael@0 | 599 | |
michael@0 | 600 | webrtc::ViEKeyFrameRequestMethod kf_request = webrtc::kViEKeyFrameRequestNone; |
michael@0 | 601 | bool use_nack_basic = false; |
michael@0 | 602 | |
michael@0 | 603 | //Try Applying the codecs in the list |
michael@0 | 604 | // we treat as success if atleast one codec was applied and reception was |
michael@0 | 605 | // started successfully. |
michael@0 | 606 | for(std::vector<VideoCodecConfig*>::size_type i=0;i < codecConfigList.size();i++) |
michael@0 | 607 | { |
michael@0 | 608 | //if the codec param is invalid or diplicate, return error |
michael@0 | 609 | if((condError = ValidateCodecConfig(codecConfigList[i],false)) != kMediaConduitNoError) |
michael@0 | 610 | { |
michael@0 | 611 | return condError; |
michael@0 | 612 | } |
michael@0 | 613 | |
michael@0 | 614 | // Check for the keyframe request type: PLI is preferred |
michael@0 | 615 | // over FIR, and FIR is preferred over none. |
michael@0 | 616 | if (codecConfigList[i]->RtcpFbIsSet(SDP_RTCP_FB_NACK_PLI)) |
michael@0 | 617 | { |
michael@0 | 618 | kf_request = webrtc::kViEKeyFrameRequestPliRtcp; |
michael@0 | 619 | } else if(kf_request == webrtc::kViEKeyFrameRequestNone && |
michael@0 | 620 | codecConfigList[i]->RtcpFbIsSet(SDP_RTCP_FB_CCM_FIR)) |
michael@0 | 621 | { |
michael@0 | 622 | kf_request = webrtc::kViEKeyFrameRequestFirRtcp; |
michael@0 | 623 | } |
michael@0 | 624 | |
michael@0 | 625 | // Check whether NACK is requested |
michael@0 | 626 | if(codecConfigList[i]->RtcpFbIsSet(SDP_RTCP_FB_NACK_BASIC)) |
michael@0 | 627 | { |
michael@0 | 628 | use_nack_basic = true; |
michael@0 | 629 | } |
michael@0 | 630 | |
michael@0 | 631 | webrtc::VideoCodec video_codec; |
michael@0 | 632 | |
michael@0 | 633 | mEngineReceiving = false; |
michael@0 | 634 | memset(&video_codec, 0, sizeof(webrtc::VideoCodec)); |
michael@0 | 635 | //Retrieve pre-populated codec structure for our codec. |
michael@0 | 636 | for(int idx=0; idx < mPtrViECodec->NumberOfCodecs(); idx++) |
michael@0 | 637 | { |
michael@0 | 638 | if(mPtrViECodec->GetCodec(idx, video_codec) == 0) |
michael@0 | 639 | { |
michael@0 | 640 | payloadName = video_codec.plName; |
michael@0 | 641 | if(codecConfigList[i]->mName.compare(payloadName) == 0) |
michael@0 | 642 | { |
michael@0 | 643 | CodecConfigToWebRTCCodec(codecConfigList[i], video_codec); |
michael@0 | 644 | if(mPtrViECodec->SetReceiveCodec(mChannel,video_codec) == -1) |
michael@0 | 645 | { |
michael@0 | 646 | CSFLogError(logTag, "%s Invalid Receive Codec %d ", __FUNCTION__, |
michael@0 | 647 | mPtrViEBase->LastError()); |
michael@0 | 648 | } else { |
michael@0 | 649 | CSFLogError(logTag, "%s Successfully Set the codec %s", __FUNCTION__, |
michael@0 | 650 | codecConfigList[i]->mName.c_str()); |
michael@0 | 651 | if(CopyCodecToDB(codecConfigList[i])) |
michael@0 | 652 | { |
michael@0 | 653 | success = true; |
michael@0 | 654 | } else { |
michael@0 | 655 | CSFLogError(logTag,"%s Unable to updated Codec Database", __FUNCTION__); |
michael@0 | 656 | return kMediaConduitUnknownError; |
michael@0 | 657 | } |
michael@0 | 658 | } |
michael@0 | 659 | break; //we found a match |
michael@0 | 660 | } |
michael@0 | 661 | } |
michael@0 | 662 | }//end for codeclist |
michael@0 | 663 | |
michael@0 | 664 | }//end for |
michael@0 | 665 | |
michael@0 | 666 | if(!success) |
michael@0 | 667 | { |
michael@0 | 668 | CSFLogError(logTag, "%s Setting Receive Codec Failed ", __FUNCTION__); |
michael@0 | 669 | return kMediaConduitInvalidReceiveCodec; |
michael@0 | 670 | } |
michael@0 | 671 | |
michael@0 | 672 | // XXX Currently, we gather up all of the feedback types that the remote |
michael@0 | 673 | // party indicated it supports for all video codecs and configure the entire |
michael@0 | 674 | // conduit based on those capabilities. This is technically out of spec, |
michael@0 | 675 | // as these values should be configured on a per-codec basis. However, |
michael@0 | 676 | // the video engine only provides this API on a per-conduit basis, so that's |
michael@0 | 677 | // how we have to do it. The approach of considering the remote capablities |
michael@0 | 678 | // for the entire conduit to be a union of all remote codec capabilities |
michael@0 | 679 | // (rather than the more conservative approach of using an intersection) |
michael@0 | 680 | // is made to provide as many feedback mechanisms as are likely to be |
michael@0 | 681 | // processed by the remote party (and should be relatively safe, since the |
michael@0 | 682 | // remote party is required to ignore feedback types that it does not |
michael@0 | 683 | // understand). |
michael@0 | 684 | // |
michael@0 | 685 | // Note that our configuration uses this union of remote capabilites as |
michael@0 | 686 | // input to the configuration. It is not isomorphic to the configuration. |
michael@0 | 687 | // For example, it only makes sense to have one frame request mechanism |
michael@0 | 688 | // active at a time; so, if the remote party indicates more than one |
michael@0 | 689 | // supported mechanism, we're only configuring the one we most prefer. |
michael@0 | 690 | // |
michael@0 | 691 | // See http://code.google.com/p/webrtc/issues/detail?id=2331 |
michael@0 | 692 | |
michael@0 | 693 | if (kf_request != webrtc::kViEKeyFrameRequestNone) |
michael@0 | 694 | { |
michael@0 | 695 | CSFLogDebug(logTag, "Enabling %s frame requests for video stream\n", |
michael@0 | 696 | (kf_request == webrtc::kViEKeyFrameRequestPliRtcp ? |
michael@0 | 697 | "PLI" : "FIR")); |
michael@0 | 698 | if(mPtrRTP->SetKeyFrameRequestMethod(mChannel, kf_request) != 0) |
michael@0 | 699 | { |
michael@0 | 700 | CSFLogError(logTag, "%s KeyFrameRequest Failed %d ", __FUNCTION__, |
michael@0 | 701 | mPtrViEBase->LastError()); |
michael@0 | 702 | return kMediaConduitKeyFrameRequestError; |
michael@0 | 703 | } |
michael@0 | 704 | } |
michael@0 | 705 | |
michael@0 | 706 | switch (kf_request) { |
michael@0 | 707 | case webrtc::kViEKeyFrameRequestNone: |
michael@0 | 708 | mFrameRequestMethod = FrameRequestNone; |
michael@0 | 709 | break; |
michael@0 | 710 | case webrtc::kViEKeyFrameRequestPliRtcp: |
michael@0 | 711 | mFrameRequestMethod = FrameRequestPli; |
michael@0 | 712 | break; |
michael@0 | 713 | case webrtc::kViEKeyFrameRequestFirRtcp: |
michael@0 | 714 | mFrameRequestMethod = FrameRequestFir; |
michael@0 | 715 | break; |
michael@0 | 716 | default: |
michael@0 | 717 | MOZ_ASSERT(PR_FALSE); |
michael@0 | 718 | mFrameRequestMethod = FrameRequestUnknown; |
michael@0 | 719 | } |
michael@0 | 720 | |
michael@0 | 721 | if(use_nack_basic) |
michael@0 | 722 | { |
michael@0 | 723 | CSFLogDebug(logTag, "Enabling NACK (recv) for video stream\n"); |
michael@0 | 724 | if (mPtrRTP->SetNACKStatus(mChannel, true) != 0) |
michael@0 | 725 | { |
michael@0 | 726 | CSFLogError(logTag, "%s NACKStatus Failed %d ", __FUNCTION__, |
michael@0 | 727 | mPtrViEBase->LastError()); |
michael@0 | 728 | return kMediaConduitNACKStatusError; |
michael@0 | 729 | } |
michael@0 | 730 | } |
michael@0 | 731 | mUsingNackBasic = use_nack_basic; |
michael@0 | 732 | |
michael@0 | 733 | //Start Receive on the video engine |
michael@0 | 734 | if(mPtrViEBase->StartReceive(mChannel) == -1) |
michael@0 | 735 | { |
michael@0 | 736 | error = mPtrViEBase->LastError(); |
michael@0 | 737 | CSFLogError(logTag, "%s Start Receive Error %d ", __FUNCTION__, error); |
michael@0 | 738 | |
michael@0 | 739 | |
michael@0 | 740 | return kMediaConduitUnknownError; |
michael@0 | 741 | } |
michael@0 | 742 | |
michael@0 | 743 | #ifdef MOZILLA_INTERNAL_API |
michael@0 | 744 | if (NS_IsMainThread()) { |
michael@0 | 745 | nsresult rv; |
michael@0 | 746 | nsCOMPtr<nsIPrefService> prefs = do_GetService("@mozilla.org/preferences-service;1", &rv); |
michael@0 | 747 | if (NS_SUCCEEDED(rv)) { |
michael@0 | 748 | nsCOMPtr<nsIPrefBranch> branch = do_QueryInterface(prefs); |
michael@0 | 749 | |
michael@0 | 750 | if (branch) { |
michael@0 | 751 | branch->GetBoolPref("media.video.test_latency", &mVideoLatencyTestEnable); |
michael@0 | 752 | } |
michael@0 | 753 | } |
michael@0 | 754 | } |
michael@0 | 755 | #endif |
michael@0 | 756 | |
michael@0 | 757 | // by now we should be successfully started the reception |
michael@0 | 758 | mPtrRTP->SetRembStatus(mChannel, false, true); |
michael@0 | 759 | mEngineReceiving = true; |
michael@0 | 760 | DumpCodecDB(); |
michael@0 | 761 | return kMediaConduitNoError; |
michael@0 | 762 | } |
michael@0 | 763 | |
michael@0 | 764 | // XXX we need to figure out how to feed back changes in preferred capture |
michael@0 | 765 | // resolution to the getUserMedia source |
michael@0 | 766 | bool |
michael@0 | 767 | WebrtcVideoConduit::SelectSendResolution(unsigned short width, |
michael@0 | 768 | unsigned short height) |
michael@0 | 769 | { |
michael@0 | 770 | // XXX This will do bandwidth-resolution adaptation as well - bug 877954 |
michael@0 | 771 | |
michael@0 | 772 | // Limit resolution to max-fs while keeping same aspect ratio as the |
michael@0 | 773 | // incoming image. |
michael@0 | 774 | if (mCurSendCodecConfig && mCurSendCodecConfig->mMaxFrameSize) |
michael@0 | 775 | { |
michael@0 | 776 | unsigned int cur_fs, max_width, max_height, mb_width, mb_height, mb_max; |
michael@0 | 777 | |
michael@0 | 778 | mb_width = (width + 15) >> 4; |
michael@0 | 779 | mb_height = (height + 15) >> 4; |
michael@0 | 780 | |
michael@0 | 781 | cur_fs = mb_width * mb_height; |
michael@0 | 782 | |
michael@0 | 783 | // Limit resolution to max_fs, but don't scale up. |
michael@0 | 784 | if (cur_fs > mCurSendCodecConfig->mMaxFrameSize) |
michael@0 | 785 | { |
michael@0 | 786 | double scale_ratio; |
michael@0 | 787 | |
michael@0 | 788 | scale_ratio = sqrt((double) mCurSendCodecConfig->mMaxFrameSize / |
michael@0 | 789 | (double) cur_fs); |
michael@0 | 790 | |
michael@0 | 791 | mb_width = mb_width * scale_ratio; |
michael@0 | 792 | mb_height = mb_height * scale_ratio; |
michael@0 | 793 | |
michael@0 | 794 | // Adjust mb_width and mb_height if they were truncated to zero. |
michael@0 | 795 | if (mb_width == 0) { |
michael@0 | 796 | mb_width = 1; |
michael@0 | 797 | mb_height = std::min(mb_height, mCurSendCodecConfig->mMaxFrameSize); |
michael@0 | 798 | } |
michael@0 | 799 | if (mb_height == 0) { |
michael@0 | 800 | mb_height = 1; |
michael@0 | 801 | mb_width = std::min(mb_width, mCurSendCodecConfig->mMaxFrameSize); |
michael@0 | 802 | } |
michael@0 | 803 | } |
michael@0 | 804 | |
michael@0 | 805 | // Limit width/height seperately to limit effect of extreme aspect ratios. |
michael@0 | 806 | mb_max = (unsigned) sqrt(8 * (double) mCurSendCodecConfig->mMaxFrameSize); |
michael@0 | 807 | |
michael@0 | 808 | max_width = 16 * std::min(mb_width, mb_max); |
michael@0 | 809 | max_height = 16 * std::min(mb_height, mb_max); |
michael@0 | 810 | |
michael@0 | 811 | if (width * max_height > max_width * height) |
michael@0 | 812 | { |
michael@0 | 813 | if (width > max_width) |
michael@0 | 814 | { |
michael@0 | 815 | // Due to the value is truncated to integer here and forced to even |
michael@0 | 816 | // value later, adding 1 to improve accuracy. |
michael@0 | 817 | height = max_width * height / width + 1; |
michael@0 | 818 | width = max_width; |
michael@0 | 819 | } |
michael@0 | 820 | } |
michael@0 | 821 | else |
michael@0 | 822 | { |
michael@0 | 823 | if (height > max_height) |
michael@0 | 824 | { |
michael@0 | 825 | // Due to the value is truncated to integer here and forced to even |
michael@0 | 826 | // value later, adding 1 to improve accuracy. |
michael@0 | 827 | width = max_height * width / height + 1; |
michael@0 | 828 | height = max_height; |
michael@0 | 829 | } |
michael@0 | 830 | } |
michael@0 | 831 | |
michael@0 | 832 | // Favor even multiples of pixels for width and height. |
michael@0 | 833 | width = std::max(width & ~1, 2); |
michael@0 | 834 | height = std::max(height & ~1, 2); |
michael@0 | 835 | } |
michael@0 | 836 | |
michael@0 | 837 | // Adapt to getUserMedia resolution changes |
michael@0 | 838 | // check if we need to reconfigure the sending resolution |
michael@0 | 839 | if (mSendingWidth != width || mSendingHeight != height) |
michael@0 | 840 | { |
michael@0 | 841 | // This will avoid us continually retrying this operation if it fails. |
michael@0 | 842 | // If the resolution changes, we'll try again. In the meantime, we'll |
michael@0 | 843 | // keep using the old size in the encoder. |
michael@0 | 844 | mSendingWidth = width; |
michael@0 | 845 | mSendingHeight = height; |
michael@0 | 846 | |
michael@0 | 847 | // Get current vie codec. |
michael@0 | 848 | webrtc::VideoCodec vie_codec; |
michael@0 | 849 | int32_t err; |
michael@0 | 850 | |
michael@0 | 851 | if ((err = mPtrViECodec->GetSendCodec(mChannel, vie_codec)) != 0) |
michael@0 | 852 | { |
michael@0 | 853 | CSFLogError(logTag, "%s: GetSendCodec failed, err %d", __FUNCTION__, err); |
michael@0 | 854 | return false; |
michael@0 | 855 | } |
michael@0 | 856 | if (vie_codec.width != width || vie_codec.height != height) |
michael@0 | 857 | { |
michael@0 | 858 | vie_codec.width = width; |
michael@0 | 859 | vie_codec.height = height; |
michael@0 | 860 | |
michael@0 | 861 | if ((err = mPtrViECodec->SetSendCodec(mChannel, vie_codec)) != 0) |
michael@0 | 862 | { |
michael@0 | 863 | CSFLogError(logTag, "%s: SetSendCodec(%ux%u) failed, err %d", |
michael@0 | 864 | __FUNCTION__, width, height, err); |
michael@0 | 865 | return false; |
michael@0 | 866 | } |
michael@0 | 867 | CSFLogDebug(logTag, "%s: Encoder resolution changed to %ux%u", |
michael@0 | 868 | __FUNCTION__, width, height); |
michael@0 | 869 | } // else no change; mSendingWidth likely was 0 |
michael@0 | 870 | } |
michael@0 | 871 | return true; |
michael@0 | 872 | } |
michael@0 | 873 | |
michael@0 | 874 | MediaConduitErrorCode |
michael@0 | 875 | WebrtcVideoConduit::SetExternalSendCodec(int pltype, |
michael@0 | 876 | VideoEncoder* encoder) { |
michael@0 | 877 | int ret = mPtrExtCodec->RegisterExternalSendCodec(mChannel, |
michael@0 | 878 | pltype, |
michael@0 | 879 | static_cast<WebrtcVideoEncoder*>(encoder), |
michael@0 | 880 | false); |
michael@0 | 881 | return ret ? kMediaConduitInvalidSendCodec : kMediaConduitNoError; |
michael@0 | 882 | } |
michael@0 | 883 | |
michael@0 | 884 | MediaConduitErrorCode |
michael@0 | 885 | WebrtcVideoConduit::SetExternalRecvCodec(int pltype, |
michael@0 | 886 | VideoDecoder* decoder) { |
michael@0 | 887 | int ret = mPtrExtCodec->RegisterExternalReceiveCodec(mChannel, |
michael@0 | 888 | pltype, |
michael@0 | 889 | static_cast<WebrtcVideoDecoder*>(decoder)); |
michael@0 | 890 | return ret ? kMediaConduitInvalidReceiveCodec : kMediaConduitNoError; |
michael@0 | 891 | } |
michael@0 | 892 | |
michael@0 | 893 | MediaConduitErrorCode |
michael@0 | 894 | WebrtcVideoConduit::SendVideoFrame(unsigned char* video_frame, |
michael@0 | 895 | unsigned int video_frame_length, |
michael@0 | 896 | unsigned short width, |
michael@0 | 897 | unsigned short height, |
michael@0 | 898 | VideoType video_type, |
michael@0 | 899 | uint64_t capture_time) |
michael@0 | 900 | { |
michael@0 | 901 | CSFLogDebug(logTag, "%s ", __FUNCTION__); |
michael@0 | 902 | |
michael@0 | 903 | //check for the parameters sanity |
michael@0 | 904 | if(!video_frame || video_frame_length == 0 || |
michael@0 | 905 | width == 0 || height == 0) |
michael@0 | 906 | { |
michael@0 | 907 | CSFLogError(logTag, "%s Invalid Parameters ",__FUNCTION__); |
michael@0 | 908 | MOZ_ASSERT(PR_FALSE); |
michael@0 | 909 | return kMediaConduitMalformedArgument; |
michael@0 | 910 | } |
michael@0 | 911 | |
michael@0 | 912 | webrtc::RawVideoType type; |
michael@0 | 913 | switch (video_type) { |
michael@0 | 914 | case kVideoI420: |
michael@0 | 915 | type = webrtc::kVideoI420; |
michael@0 | 916 | break; |
michael@0 | 917 | case kVideoNV21: |
michael@0 | 918 | type = webrtc::kVideoNV21; |
michael@0 | 919 | break; |
michael@0 | 920 | default: |
michael@0 | 921 | CSFLogError(logTag, "%s VideoType Invalid. Only 1420 and NV21 Supported",__FUNCTION__); |
michael@0 | 922 | MOZ_ASSERT(PR_FALSE); |
michael@0 | 923 | return kMediaConduitMalformedArgument; |
michael@0 | 924 | } |
michael@0 | 925 | //Transmission should be enabled before we insert any frames. |
michael@0 | 926 | if(!mEngineTransmitting) |
michael@0 | 927 | { |
michael@0 | 928 | CSFLogError(logTag, "%s Engine not transmitting ", __FUNCTION__); |
michael@0 | 929 | return kMediaConduitSessionNotInited; |
michael@0 | 930 | } |
michael@0 | 931 | |
michael@0 | 932 | // enforce even width/height (paranoia) |
michael@0 | 933 | MOZ_ASSERT(!(width & 1)); |
michael@0 | 934 | MOZ_ASSERT(!(height & 1)); |
michael@0 | 935 | |
michael@0 | 936 | if (!SelectSendResolution(width, height)) |
michael@0 | 937 | { |
michael@0 | 938 | return kMediaConduitCaptureError; |
michael@0 | 939 | } |
michael@0 | 940 | |
michael@0 | 941 | //insert the frame to video engine in I420 format only |
michael@0 | 942 | MOZ_ASSERT(mPtrExtCapture); |
michael@0 | 943 | if(mPtrExtCapture->IncomingFrame(video_frame, |
michael@0 | 944 | video_frame_length, |
michael@0 | 945 | width, height, |
michael@0 | 946 | type, |
michael@0 | 947 | (unsigned long long)capture_time) == -1) |
michael@0 | 948 | { |
michael@0 | 949 | CSFLogError(logTag, "%s IncomingFrame Failed %d ", __FUNCTION__, |
michael@0 | 950 | mPtrViEBase->LastError()); |
michael@0 | 951 | return kMediaConduitCaptureError; |
michael@0 | 952 | } |
michael@0 | 953 | |
michael@0 | 954 | CSFLogDebug(logTag, "%s Inserted a frame", __FUNCTION__); |
michael@0 | 955 | return kMediaConduitNoError; |
michael@0 | 956 | } |
michael@0 | 957 | |
michael@0 | 958 | // Transport Layer Callbacks |
michael@0 | 959 | MediaConduitErrorCode |
michael@0 | 960 | WebrtcVideoConduit::ReceivedRTPPacket(const void *data, int len) |
michael@0 | 961 | { |
michael@0 | 962 | CSFLogDebug(logTag, "%s: Channel %d, Len %d ", __FUNCTION__, mChannel, len); |
michael@0 | 963 | |
michael@0 | 964 | // Media Engine should be receiving already. |
michael@0 | 965 | if(mEngineReceiving) |
michael@0 | 966 | { |
michael@0 | 967 | // let the engine know of a RTP packet to decode |
michael@0 | 968 | if(mPtrViENetwork->ReceivedRTPPacket(mChannel,data,len) == -1) |
michael@0 | 969 | { |
michael@0 | 970 | int error = mPtrViEBase->LastError(); |
michael@0 | 971 | CSFLogError(logTag, "%s RTP Processing Failed %d ", __FUNCTION__, error); |
michael@0 | 972 | if(error >= kViERtpRtcpInvalidChannelId && error <= kViERtpRtcpRtcpDisabled) |
michael@0 | 973 | { |
michael@0 | 974 | return kMediaConduitRTPProcessingFailed; |
michael@0 | 975 | } |
michael@0 | 976 | return kMediaConduitRTPRTCPModuleError; |
michael@0 | 977 | } |
michael@0 | 978 | } else { |
michael@0 | 979 | CSFLogError(logTag, "Error: %s when not receiving", __FUNCTION__); |
michael@0 | 980 | return kMediaConduitSessionNotInited; |
michael@0 | 981 | } |
michael@0 | 982 | |
michael@0 | 983 | return kMediaConduitNoError; |
michael@0 | 984 | } |
michael@0 | 985 | |
michael@0 | 986 | MediaConduitErrorCode |
michael@0 | 987 | WebrtcVideoConduit::ReceivedRTCPPacket(const void *data, int len) |
michael@0 | 988 | { |
michael@0 | 989 | CSFLogDebug(logTag, " %s Channel %d, Len %d ", __FUNCTION__, mChannel, len); |
michael@0 | 990 | |
michael@0 | 991 | //Media Engine should be receiving already |
michael@0 | 992 | if(mEngineTransmitting) |
michael@0 | 993 | { |
michael@0 | 994 | if(mPtrViENetwork->ReceivedRTCPPacket(mChannel,data,len) == -1) |
michael@0 | 995 | { |
michael@0 | 996 | int error = mPtrViEBase->LastError(); |
michael@0 | 997 | CSFLogError(logTag, "%s RTP Processing Failed %d", __FUNCTION__, error); |
michael@0 | 998 | if(error >= kViERtpRtcpInvalidChannelId && error <= kViERtpRtcpRtcpDisabled) |
michael@0 | 999 | { |
michael@0 | 1000 | return kMediaConduitRTPProcessingFailed; |
michael@0 | 1001 | } |
michael@0 | 1002 | return kMediaConduitRTPRTCPModuleError; |
michael@0 | 1003 | } |
michael@0 | 1004 | } else { |
michael@0 | 1005 | CSFLogError(logTag, "Error: %s when not receiving", __FUNCTION__); |
michael@0 | 1006 | return kMediaConduitSessionNotInited; |
michael@0 | 1007 | } |
michael@0 | 1008 | return kMediaConduitNoError; |
michael@0 | 1009 | } |
michael@0 | 1010 | |
michael@0 | 1011 | //WebRTC::RTP Callback Implementation |
michael@0 | 1012 | int WebrtcVideoConduit::SendPacket(int channel, const void* data, int len) |
michael@0 | 1013 | { |
michael@0 | 1014 | CSFLogDebug(logTag, "%s : channel %d len %d %s", __FUNCTION__, channel, len, |
michael@0 | 1015 | (mEngineReceiving && mOtherDirection) ? "(using mOtherDirection)" : ""); |
michael@0 | 1016 | |
michael@0 | 1017 | if (mEngineReceiving) |
michael@0 | 1018 | { |
michael@0 | 1019 | if (mOtherDirection) |
michael@0 | 1020 | { |
michael@0 | 1021 | return mOtherDirection->SendPacket(channel, data, len); |
michael@0 | 1022 | } |
michael@0 | 1023 | CSFLogDebug(logTag, "%s : Asked to send RTP without an RTP sender on channel %d", |
michael@0 | 1024 | __FUNCTION__, channel); |
michael@0 | 1025 | return -1; |
michael@0 | 1026 | } else { |
michael@0 | 1027 | if(mTransport && (mTransport->SendRtpPacket(data, len) == NS_OK)) |
michael@0 | 1028 | { |
michael@0 | 1029 | CSFLogDebug(logTag, "%s Sent RTP Packet ", __FUNCTION__); |
michael@0 | 1030 | return len; |
michael@0 | 1031 | } else { |
michael@0 | 1032 | CSFLogError(logTag, "%s RTP Packet Send Failed ", __FUNCTION__); |
michael@0 | 1033 | return -1; |
michael@0 | 1034 | } |
michael@0 | 1035 | } |
michael@0 | 1036 | } |
michael@0 | 1037 | |
michael@0 | 1038 | int WebrtcVideoConduit::SendRTCPPacket(int channel, const void* data, int len) |
michael@0 | 1039 | { |
michael@0 | 1040 | CSFLogDebug(logTag, "%s : channel %d , len %d ", __FUNCTION__, channel,len); |
michael@0 | 1041 | |
michael@0 | 1042 | if (mEngineTransmitting) |
michael@0 | 1043 | { |
michael@0 | 1044 | if (mOtherDirection) |
michael@0 | 1045 | { |
michael@0 | 1046 | return mOtherDirection->SendRTCPPacket(channel, data, len); |
michael@0 | 1047 | } |
michael@0 | 1048 | } |
michael@0 | 1049 | |
michael@0 | 1050 | // We come here if we have only one pipeline/conduit setup, |
michael@0 | 1051 | // such as for unidirectional streams. |
michael@0 | 1052 | // We also end up here if we are receiving |
michael@0 | 1053 | if(mTransport && mTransport->SendRtcpPacket(data, len) == NS_OK) |
michael@0 | 1054 | { |
michael@0 | 1055 | CSFLogDebug(logTag, "%s Sent RTCP Packet ", __FUNCTION__); |
michael@0 | 1056 | return len; |
michael@0 | 1057 | } else { |
michael@0 | 1058 | CSFLogError(logTag, "%s RTCP Packet Send Failed ", __FUNCTION__); |
michael@0 | 1059 | return -1; |
michael@0 | 1060 | } |
michael@0 | 1061 | } |
michael@0 | 1062 | |
michael@0 | 1063 | // WebRTC::ExternalMedia Implementation |
michael@0 | 1064 | int |
michael@0 | 1065 | WebrtcVideoConduit::FrameSizeChange(unsigned int width, |
michael@0 | 1066 | unsigned int height, |
michael@0 | 1067 | unsigned int numStreams) |
michael@0 | 1068 | { |
michael@0 | 1069 | CSFLogDebug(logTag, "%s ", __FUNCTION__); |
michael@0 | 1070 | |
michael@0 | 1071 | |
michael@0 | 1072 | mReceivingWidth = width; |
michael@0 | 1073 | mReceivingHeight = height; |
michael@0 | 1074 | |
michael@0 | 1075 | if(mRenderer) |
michael@0 | 1076 | { |
michael@0 | 1077 | mRenderer->FrameSizeChange(width, height, numStreams); |
michael@0 | 1078 | return 0; |
michael@0 | 1079 | } |
michael@0 | 1080 | |
michael@0 | 1081 | CSFLogError(logTag, "%s Renderer is NULL ", __FUNCTION__); |
michael@0 | 1082 | return -1; |
michael@0 | 1083 | } |
michael@0 | 1084 | |
michael@0 | 1085 | int |
michael@0 | 1086 | WebrtcVideoConduit::DeliverFrame(unsigned char* buffer, |
michael@0 | 1087 | int buffer_size, |
michael@0 | 1088 | uint32_t time_stamp, |
michael@0 | 1089 | int64_t render_time, |
michael@0 | 1090 | void *handle) |
michael@0 | 1091 | { |
michael@0 | 1092 | CSFLogDebug(logTag, "%s Buffer Size %d", __FUNCTION__, buffer_size); |
michael@0 | 1093 | |
michael@0 | 1094 | if(mRenderer) |
michael@0 | 1095 | { |
michael@0 | 1096 | layers::Image* img = nullptr; |
michael@0 | 1097 | // |handle| should be a webrtc::NativeHandle if available. |
michael@0 | 1098 | if (handle) { |
michael@0 | 1099 | webrtc::NativeHandle* native_h = static_cast<webrtc::NativeHandle*>(handle); |
michael@0 | 1100 | // In the handle, there should be a layers::Image. |
michael@0 | 1101 | img = static_cast<layers::Image*>(native_h->GetHandle()); |
michael@0 | 1102 | } |
michael@0 | 1103 | |
michael@0 | 1104 | if (mVideoLatencyTestEnable && mReceivingWidth && mReceivingHeight) { |
michael@0 | 1105 | uint64_t now = PR_Now(); |
michael@0 | 1106 | uint64_t timestamp = 0; |
michael@0 | 1107 | bool ok = YuvStamper::Decode(mReceivingWidth, mReceivingHeight, mReceivingWidth, |
michael@0 | 1108 | buffer, |
michael@0 | 1109 | reinterpret_cast<unsigned char*>(×tamp), |
michael@0 | 1110 | sizeof(timestamp), 0, 0); |
michael@0 | 1111 | if (ok) { |
michael@0 | 1112 | VideoLatencyUpdate(now - timestamp); |
michael@0 | 1113 | } |
michael@0 | 1114 | } |
michael@0 | 1115 | |
michael@0 | 1116 | const ImageHandle img_h(img); |
michael@0 | 1117 | mRenderer->RenderVideoFrame(buffer, buffer_size, time_stamp, render_time, |
michael@0 | 1118 | img_h); |
michael@0 | 1119 | return 0; |
michael@0 | 1120 | } |
michael@0 | 1121 | |
michael@0 | 1122 | CSFLogError(logTag, "%s Renderer is NULL ", __FUNCTION__); |
michael@0 | 1123 | return -1; |
michael@0 | 1124 | } |
michael@0 | 1125 | |
michael@0 | 1126 | /** |
michael@0 | 1127 | * Copy the codec passed into Conduit's database |
michael@0 | 1128 | */ |
michael@0 | 1129 | |
michael@0 | 1130 | void |
michael@0 | 1131 | WebrtcVideoConduit::CodecConfigToWebRTCCodec(const VideoCodecConfig* codecInfo, |
michael@0 | 1132 | webrtc::VideoCodec& cinst) |
michael@0 | 1133 | { |
michael@0 | 1134 | cinst.plType = codecInfo->mType; |
michael@0 | 1135 | // leave width/height alone; they'll be overridden on the first frame |
michael@0 | 1136 | if (codecInfo->mMaxFrameRate > 0) |
michael@0 | 1137 | { |
michael@0 | 1138 | cinst.maxFramerate = codecInfo->mMaxFrameRate; |
michael@0 | 1139 | } |
michael@0 | 1140 | cinst.minBitrate = 200; |
michael@0 | 1141 | cinst.startBitrate = 300; |
michael@0 | 1142 | cinst.maxBitrate = 2000; |
michael@0 | 1143 | } |
michael@0 | 1144 | |
michael@0 | 1145 | //Copy the codec passed into Conduit's database |
michael@0 | 1146 | bool |
michael@0 | 1147 | WebrtcVideoConduit::CopyCodecToDB(const VideoCodecConfig* codecInfo) |
michael@0 | 1148 | { |
michael@0 | 1149 | VideoCodecConfig* cdcConfig = new VideoCodecConfig(*codecInfo); |
michael@0 | 1150 | mRecvCodecList.push_back(cdcConfig); |
michael@0 | 1151 | return true; |
michael@0 | 1152 | } |
michael@0 | 1153 | |
michael@0 | 1154 | bool |
michael@0 | 1155 | WebrtcVideoConduit::CheckCodecsForMatch(const VideoCodecConfig* curCodecConfig, |
michael@0 | 1156 | const VideoCodecConfig* codecInfo) const |
michael@0 | 1157 | { |
michael@0 | 1158 | if(!curCodecConfig) |
michael@0 | 1159 | { |
michael@0 | 1160 | return false; |
michael@0 | 1161 | } |
michael@0 | 1162 | |
michael@0 | 1163 | if(curCodecConfig->mType == codecInfo->mType && |
michael@0 | 1164 | curCodecConfig->mName.compare(codecInfo->mName) == 0 && |
michael@0 | 1165 | curCodecConfig->mMaxFrameSize == codecInfo->mMaxFrameSize && |
michael@0 | 1166 | curCodecConfig->mMaxFrameRate == codecInfo->mMaxFrameRate) |
michael@0 | 1167 | { |
michael@0 | 1168 | return true; |
michael@0 | 1169 | } |
michael@0 | 1170 | |
michael@0 | 1171 | return false; |
michael@0 | 1172 | } |
michael@0 | 1173 | |
michael@0 | 1174 | /** |
michael@0 | 1175 | * Checks if the codec is already in Conduit's database |
michael@0 | 1176 | */ |
michael@0 | 1177 | bool |
michael@0 | 1178 | WebrtcVideoConduit::CheckCodecForMatch(const VideoCodecConfig* codecInfo) const |
michael@0 | 1179 | { |
michael@0 | 1180 | //the db should have atleast one codec |
michael@0 | 1181 | for(std::vector<VideoCodecConfig*>::size_type i=0;i < mRecvCodecList.size();i++) |
michael@0 | 1182 | { |
michael@0 | 1183 | if(CheckCodecsForMatch(mRecvCodecList[i],codecInfo)) |
michael@0 | 1184 | { |
michael@0 | 1185 | //match |
michael@0 | 1186 | return true; |
michael@0 | 1187 | } |
michael@0 | 1188 | } |
michael@0 | 1189 | //no match or empty local db |
michael@0 | 1190 | return false; |
michael@0 | 1191 | } |
michael@0 | 1192 | |
michael@0 | 1193 | /** |
michael@0 | 1194 | * Perform validation on the codecConfig to be applied |
michael@0 | 1195 | * Verifies if the codec is already applied. |
michael@0 | 1196 | */ |
michael@0 | 1197 | MediaConduitErrorCode |
michael@0 | 1198 | WebrtcVideoConduit::ValidateCodecConfig(const VideoCodecConfig* codecInfo, |
michael@0 | 1199 | bool send) const |
michael@0 | 1200 | { |
michael@0 | 1201 | bool codecAppliedAlready = false; |
michael@0 | 1202 | |
michael@0 | 1203 | if(!codecInfo) |
michael@0 | 1204 | { |
michael@0 | 1205 | CSFLogError(logTag, "%s Null CodecConfig ", __FUNCTION__); |
michael@0 | 1206 | return kMediaConduitMalformedArgument; |
michael@0 | 1207 | } |
michael@0 | 1208 | |
michael@0 | 1209 | if((codecInfo->mName.empty()) || |
michael@0 | 1210 | (codecInfo->mName.length() >= CODEC_PLNAME_SIZE)) |
michael@0 | 1211 | { |
michael@0 | 1212 | CSFLogError(logTag, "%s Invalid Payload Name Length ", __FUNCTION__); |
michael@0 | 1213 | return kMediaConduitMalformedArgument; |
michael@0 | 1214 | } |
michael@0 | 1215 | |
michael@0 | 1216 | //check if we have the same codec already applied |
michael@0 | 1217 | if(send) |
michael@0 | 1218 | { |
michael@0 | 1219 | codecAppliedAlready = CheckCodecsForMatch(mCurSendCodecConfig,codecInfo); |
michael@0 | 1220 | } else { |
michael@0 | 1221 | codecAppliedAlready = CheckCodecForMatch(codecInfo); |
michael@0 | 1222 | } |
michael@0 | 1223 | |
michael@0 | 1224 | if(codecAppliedAlready) |
michael@0 | 1225 | { |
michael@0 | 1226 | CSFLogDebug(logTag, "%s Codec %s Already Applied ", __FUNCTION__, codecInfo->mName.c_str()); |
michael@0 | 1227 | return kMediaConduitCodecInUse; |
michael@0 | 1228 | } |
michael@0 | 1229 | return kMediaConduitNoError; |
michael@0 | 1230 | } |
michael@0 | 1231 | |
michael@0 | 1232 | void |
michael@0 | 1233 | WebrtcVideoConduit::DumpCodecDB() const |
michael@0 | 1234 | { |
michael@0 | 1235 | for(std::vector<VideoCodecConfig*>::size_type i=0;i<mRecvCodecList.size();i++) |
michael@0 | 1236 | { |
michael@0 | 1237 | CSFLogDebug(logTag,"Payload Name: %s", mRecvCodecList[i]->mName.c_str()); |
michael@0 | 1238 | CSFLogDebug(logTag,"Payload Type: %d", mRecvCodecList[i]->mType); |
michael@0 | 1239 | CSFLogDebug(logTag,"Payload Max Frame Size: %d", mRecvCodecList[i]->mMaxFrameSize); |
michael@0 | 1240 | CSFLogDebug(logTag,"Payload Max Frame Rate: %d", mRecvCodecList[i]->mMaxFrameRate); |
michael@0 | 1241 | } |
michael@0 | 1242 | } |
michael@0 | 1243 | |
michael@0 | 1244 | void |
michael@0 | 1245 | WebrtcVideoConduit::VideoLatencyUpdate(uint64_t newSample) |
michael@0 | 1246 | { |
michael@0 | 1247 | mVideoLatencyAvg = (sRoundingPadding * newSample + sAlphaNum * mVideoLatencyAvg) / sAlphaDen; |
michael@0 | 1248 | } |
michael@0 | 1249 | |
michael@0 | 1250 | uint64_t |
michael@0 | 1251 | WebrtcVideoConduit::MozVideoLatencyAvg() |
michael@0 | 1252 | { |
michael@0 | 1253 | return mVideoLatencyAvg / sRoundingPadding; |
michael@0 | 1254 | } |
michael@0 | 1255 | |
michael@0 | 1256 | }// end namespace |