Thu, 15 Jan 2015 15:59:08 +0100
Implement a real Private Browsing Mode condition by changing the API/ABI;
This solves Tor bug #9701, complying with disk avoidance documented in
https://www.torproject.org/projects/torbrowser/design/#disk-avoidance.
michael@0 | 1 | /* This Source Code Form is subject to the terms of the Mozilla Public |
michael@0 | 2 | * License, v. 2.0. If a copy of the MPL was not distributed with this file, |
michael@0 | 3 | * You can obtain one at http://mozilla.org/MPL/2.0/. */ |
michael@0 | 4 | |
michael@0 | 5 | #include "CSFLog.h" |
michael@0 | 6 | #include "nspr.h" |
michael@0 | 7 | |
michael@0 | 8 | #ifdef HAVE_NETINET_IN_H |
michael@0 | 9 | #include <netinet/in.h> |
michael@0 | 10 | #elif defined XP_WIN |
michael@0 | 11 | #include <winsock2.h> |
michael@0 | 12 | #endif |
michael@0 | 13 | |
michael@0 | 14 | #include "AudioConduit.h" |
michael@0 | 15 | #include "nsCOMPtr.h" |
michael@0 | 16 | #include "mozilla/Services.h" |
michael@0 | 17 | #include "nsServiceManagerUtils.h" |
michael@0 | 18 | #include "nsIPrefService.h" |
michael@0 | 19 | #include "nsIPrefBranch.h" |
michael@0 | 20 | #include "nsThreadUtils.h" |
michael@0 | 21 | #ifdef MOZILLA_INTERNAL_API |
michael@0 | 22 | #include "Latency.h" |
michael@0 | 23 | #include "mozilla/Telemetry.h" |
michael@0 | 24 | #endif |
michael@0 | 25 | |
michael@0 | 26 | #include "webrtc/voice_engine/include/voe_errors.h" |
michael@0 | 27 | #include "webrtc/system_wrappers/interface/clock.h" |
michael@0 | 28 | |
michael@0 | 29 | #ifdef MOZ_WIDGET_ANDROID |
michael@0 | 30 | #include "AndroidJNIWrapper.h" |
michael@0 | 31 | #endif |
michael@0 | 32 | |
michael@0 | 33 | namespace mozilla { |
michael@0 | 34 | |
michael@0 | 35 | static const char* logTag ="WebrtcAudioSessionConduit"; |
michael@0 | 36 | |
michael@0 | 37 | // 32 bytes is what WebRTC CodecInst expects |
michael@0 | 38 | const unsigned int WebrtcAudioConduit::CODEC_PLNAME_SIZE = 32; |
michael@0 | 39 | |
michael@0 | 40 | /** |
michael@0 | 41 | * Factory Method for AudioConduit |
michael@0 | 42 | */ |
michael@0 | 43 | mozilla::RefPtr<AudioSessionConduit> AudioSessionConduit::Create(AudioSessionConduit *aOther) |
michael@0 | 44 | { |
michael@0 | 45 | CSFLogDebug(logTag, "%s ", __FUNCTION__); |
michael@0 | 46 | #ifdef MOZILLA_INTERNAL_API |
michael@0 | 47 | // unit tests create their own "main thread" |
michael@0 | 48 | NS_ASSERTION(NS_IsMainThread(), "Only call on main thread"); |
michael@0 | 49 | #endif |
michael@0 | 50 | |
michael@0 | 51 | WebrtcAudioConduit* obj = new WebrtcAudioConduit(); |
michael@0 | 52 | if(obj->Init(static_cast<WebrtcAudioConduit*>(aOther)) != kMediaConduitNoError) |
michael@0 | 53 | { |
michael@0 | 54 | CSFLogError(logTag, "%s AudioConduit Init Failed ", __FUNCTION__); |
michael@0 | 55 | delete obj; |
michael@0 | 56 | return nullptr; |
michael@0 | 57 | } |
michael@0 | 58 | CSFLogDebug(logTag, "%s Successfully created AudioConduit ", __FUNCTION__); |
michael@0 | 59 | return obj; |
michael@0 | 60 | } |
michael@0 | 61 | |
michael@0 | 62 | /** |
michael@0 | 63 | * Destruction defines for our super-classes |
michael@0 | 64 | */ |
michael@0 | 65 | WebrtcAudioConduit::~WebrtcAudioConduit() |
michael@0 | 66 | { |
michael@0 | 67 | #ifdef MOZILLA_INTERNAL_API |
michael@0 | 68 | // unit tests create their own "main thread" |
michael@0 | 69 | NS_ASSERTION(NS_IsMainThread(), "Only call on main thread"); |
michael@0 | 70 | #endif |
michael@0 | 71 | |
michael@0 | 72 | CSFLogDebug(logTag, "%s ", __FUNCTION__); |
michael@0 | 73 | for(std::vector<AudioCodecConfig*>::size_type i=0;i < mRecvCodecList.size();i++) |
michael@0 | 74 | { |
michael@0 | 75 | delete mRecvCodecList[i]; |
michael@0 | 76 | } |
michael@0 | 77 | delete mCurSendCodecConfig; |
michael@0 | 78 | |
michael@0 | 79 | // The first one of a pair to be deleted shuts down media for both |
michael@0 | 80 | if(mPtrVoEXmedia) |
michael@0 | 81 | { |
michael@0 | 82 | if (!mShutDown) { |
michael@0 | 83 | mPtrVoEXmedia->SetExternalRecordingStatus(false); |
michael@0 | 84 | mPtrVoEXmedia->SetExternalPlayoutStatus(false); |
michael@0 | 85 | } |
michael@0 | 86 | } |
michael@0 | 87 | |
michael@0 | 88 | //Deal with the transport |
michael@0 | 89 | if(mPtrVoENetwork) |
michael@0 | 90 | { |
michael@0 | 91 | if (!mShutDown) { |
michael@0 | 92 | mPtrVoENetwork->DeRegisterExternalTransport(mChannel); |
michael@0 | 93 | } |
michael@0 | 94 | } |
michael@0 | 95 | |
michael@0 | 96 | if(mPtrVoEBase) |
michael@0 | 97 | { |
michael@0 | 98 | if (!mShutDown) { |
michael@0 | 99 | mPtrVoEBase->StopPlayout(mChannel); |
michael@0 | 100 | mPtrVoEBase->StopSend(mChannel); |
michael@0 | 101 | mPtrVoEBase->StopReceive(mChannel); |
michael@0 | 102 | mPtrVoEBase->DeleteChannel(mChannel); |
michael@0 | 103 | mPtrVoEBase->Terminate(); |
michael@0 | 104 | } |
michael@0 | 105 | } |
michael@0 | 106 | |
michael@0 | 107 | if (mOtherDirection) |
michael@0 | 108 | { |
michael@0 | 109 | // mOtherDirection owns these now! |
michael@0 | 110 | mOtherDirection->mOtherDirection = nullptr; |
michael@0 | 111 | // let other side we terminated the channel |
michael@0 | 112 | mOtherDirection->mShutDown = true; |
michael@0 | 113 | mVoiceEngine = nullptr; |
michael@0 | 114 | } else { |
michael@0 | 115 | // We shouldn't delete the VoiceEngine until all these are released! |
michael@0 | 116 | // And we can't use a Scoped ptr, since the order is arbitrary |
michael@0 | 117 | mPtrVoENetwork = nullptr; |
michael@0 | 118 | mPtrVoEBase = nullptr; |
michael@0 | 119 | mPtrVoECodec = nullptr; |
michael@0 | 120 | mPtrVoEXmedia = nullptr; |
michael@0 | 121 | mPtrVoEProcessing = nullptr; |
michael@0 | 122 | mPtrVoEVideoSync = nullptr; |
michael@0 | 123 | mPtrVoERTP_RTCP = nullptr; |
michael@0 | 124 | mPtrRTP = nullptr; |
michael@0 | 125 | |
michael@0 | 126 | // only one opener can call Delete. Have it be the last to close. |
michael@0 | 127 | if(mVoiceEngine) |
michael@0 | 128 | { |
michael@0 | 129 | webrtc::VoiceEngine::Delete(mVoiceEngine); |
michael@0 | 130 | } |
michael@0 | 131 | } |
michael@0 | 132 | } |
michael@0 | 133 | |
michael@0 | 134 | bool WebrtcAudioConduit::GetLocalSSRC(unsigned int* ssrc) { |
michael@0 | 135 | return !mPtrRTP->GetLocalSSRC(mChannel, *ssrc); |
michael@0 | 136 | } |
michael@0 | 137 | |
michael@0 | 138 | bool WebrtcAudioConduit::GetRemoteSSRC(unsigned int* ssrc) { |
michael@0 | 139 | return !mPtrRTP->GetRemoteSSRC(mChannel, *ssrc); |
michael@0 | 140 | } |
michael@0 | 141 | |
michael@0 | 142 | bool WebrtcAudioConduit::GetAVStats(int32_t* jitterBufferDelayMs, |
michael@0 | 143 | int32_t* playoutBufferDelayMs, |
michael@0 | 144 | int32_t* avSyncOffsetMs) { |
michael@0 | 145 | return !mPtrVoEVideoSync->GetDelayEstimate(mChannel, |
michael@0 | 146 | jitterBufferDelayMs, |
michael@0 | 147 | playoutBufferDelayMs, |
michael@0 | 148 | avSyncOffsetMs); |
michael@0 | 149 | } |
michael@0 | 150 | |
michael@0 | 151 | bool WebrtcAudioConduit::GetRTPStats(unsigned int* jitterMs, |
michael@0 | 152 | unsigned int* cumulativeLost) { |
michael@0 | 153 | unsigned int maxJitterMs = 0; |
michael@0 | 154 | unsigned int discardedPackets; |
michael@0 | 155 | *jitterMs = 0; |
michael@0 | 156 | *cumulativeLost = 0; |
michael@0 | 157 | return !mPtrRTP->GetRTPStatistics(mChannel, *jitterMs, maxJitterMs, |
michael@0 | 158 | discardedPackets, *cumulativeLost); |
michael@0 | 159 | } |
michael@0 | 160 | |
michael@0 | 161 | DOMHighResTimeStamp |
michael@0 | 162 | NTPtoDOMHighResTimeStamp(uint32_t ntpHigh, uint32_t ntpLow) { |
michael@0 | 163 | return (uint32_t(ntpHigh - webrtc::kNtpJan1970) + |
michael@0 | 164 | double(ntpLow) / webrtc::kMagicNtpFractionalUnit) * 1000; |
michael@0 | 165 | } |
michael@0 | 166 | |
michael@0 | 167 | bool WebrtcAudioConduit::GetRTCPReceiverReport(DOMHighResTimeStamp* timestamp, |
michael@0 | 168 | uint32_t* jitterMs, |
michael@0 | 169 | uint32_t* packetsReceived, |
michael@0 | 170 | uint64_t* bytesReceived, |
michael@0 | 171 | uint32_t* cumulativeLost, |
michael@0 | 172 | int32_t* rttMs) { |
michael@0 | 173 | uint32_t ntpHigh, ntpLow; |
michael@0 | 174 | uint16_t fractionLost; |
michael@0 | 175 | bool result = !mPtrRTP->GetRemoteRTCPReceiverInfo(mChannel, ntpHigh, ntpLow, |
michael@0 | 176 | *packetsReceived, |
michael@0 | 177 | *bytesReceived, |
michael@0 | 178 | *jitterMs, |
michael@0 | 179 | fractionLost, |
michael@0 | 180 | *cumulativeLost, |
michael@0 | 181 | *rttMs); |
michael@0 | 182 | if (result) { |
michael@0 | 183 | *timestamp = NTPtoDOMHighResTimeStamp(ntpHigh, ntpLow); |
michael@0 | 184 | } |
michael@0 | 185 | return result; |
michael@0 | 186 | } |
michael@0 | 187 | |
michael@0 | 188 | bool WebrtcAudioConduit::GetRTCPSenderReport(DOMHighResTimeStamp* timestamp, |
michael@0 | 189 | unsigned int* packetsSent, |
michael@0 | 190 | uint64_t* bytesSent) { |
michael@0 | 191 | struct webrtc::SenderInfo senderInfo; |
michael@0 | 192 | bool result = !mPtrRTP->GetRemoteRTCPSenderInfo(mChannel, &senderInfo); |
michael@0 | 193 | if (result) { |
michael@0 | 194 | *timestamp = NTPtoDOMHighResTimeStamp(senderInfo.NTP_timestamp_high, |
michael@0 | 195 | senderInfo.NTP_timestamp_low); |
michael@0 | 196 | *packetsSent = senderInfo.sender_packet_count; |
michael@0 | 197 | *bytesSent = senderInfo.sender_octet_count; |
michael@0 | 198 | } |
michael@0 | 199 | return result; |
michael@0 | 200 | } |
michael@0 | 201 | |
michael@0 | 202 | /* |
michael@0 | 203 | * WebRTCAudioConduit Implementation |
michael@0 | 204 | */ |
michael@0 | 205 | MediaConduitErrorCode WebrtcAudioConduit::Init(WebrtcAudioConduit *other) |
michael@0 | 206 | { |
michael@0 | 207 | CSFLogDebug(logTag, "%s this=%p other=%p", __FUNCTION__, this, other); |
michael@0 | 208 | |
michael@0 | 209 | if (other) { |
michael@0 | 210 | MOZ_ASSERT(!other->mOtherDirection); |
michael@0 | 211 | other->mOtherDirection = this; |
michael@0 | 212 | mOtherDirection = other; |
michael@0 | 213 | |
michael@0 | 214 | // only one can call ::Create()/GetVoiceEngine() |
michael@0 | 215 | MOZ_ASSERT(other->mVoiceEngine); |
michael@0 | 216 | mVoiceEngine = other->mVoiceEngine; |
michael@0 | 217 | } else { |
michael@0 | 218 | #ifdef MOZ_WIDGET_ANDROID |
michael@0 | 219 | jobject context = jsjni_GetGlobalContextRef(); |
michael@0 | 220 | |
michael@0 | 221 | // get the JVM |
michael@0 | 222 | JavaVM *jvm = jsjni_GetVM(); |
michael@0 | 223 | JNIEnv* jenv = jsjni_GetJNIForThread(); |
michael@0 | 224 | |
michael@0 | 225 | if (webrtc::VoiceEngine::SetAndroidObjects(jvm, jenv, (void*)context) != 0) { |
michael@0 | 226 | CSFLogError(logTag, "%s Unable to set Android objects", __FUNCTION__); |
michael@0 | 227 | return kMediaConduitSessionNotInited; |
michael@0 | 228 | } |
michael@0 | 229 | #endif |
michael@0 | 230 | |
michael@0 | 231 | // Per WebRTC APIs below function calls return nullptr on failure |
michael@0 | 232 | if(!(mVoiceEngine = webrtc::VoiceEngine::Create())) |
michael@0 | 233 | { |
michael@0 | 234 | CSFLogError(logTag, "%s Unable to create voice engine", __FUNCTION__); |
michael@0 | 235 | return kMediaConduitSessionNotInited; |
michael@0 | 236 | } |
michael@0 | 237 | |
michael@0 | 238 | PRLogModuleInfo *logs = GetWebRTCLogInfo(); |
michael@0 | 239 | if (!gWebrtcTraceLoggingOn && logs && logs->level > 0) { |
michael@0 | 240 | // no need to a critical section or lock here |
michael@0 | 241 | gWebrtcTraceLoggingOn = 1; |
michael@0 | 242 | |
michael@0 | 243 | const char *file = PR_GetEnv("WEBRTC_TRACE_FILE"); |
michael@0 | 244 | if (!file) { |
michael@0 | 245 | file = "WebRTC.log"; |
michael@0 | 246 | } |
michael@0 | 247 | CSFLogDebug(logTag, "%s Logging webrtc to %s level %d", __FUNCTION__, |
michael@0 | 248 | file, logs->level); |
michael@0 | 249 | mVoiceEngine->SetTraceFilter(logs->level); |
michael@0 | 250 | mVoiceEngine->SetTraceFile(file); |
michael@0 | 251 | } |
michael@0 | 252 | } |
michael@0 | 253 | |
michael@0 | 254 | if(!(mPtrVoEBase = VoEBase::GetInterface(mVoiceEngine))) |
michael@0 | 255 | { |
michael@0 | 256 | CSFLogError(logTag, "%s Unable to initialize VoEBase", __FUNCTION__); |
michael@0 | 257 | return kMediaConduitSessionNotInited; |
michael@0 | 258 | } |
michael@0 | 259 | |
michael@0 | 260 | if(!(mPtrVoENetwork = VoENetwork::GetInterface(mVoiceEngine))) |
michael@0 | 261 | { |
michael@0 | 262 | CSFLogError(logTag, "%s Unable to initialize VoENetwork", __FUNCTION__); |
michael@0 | 263 | return kMediaConduitSessionNotInited; |
michael@0 | 264 | } |
michael@0 | 265 | |
michael@0 | 266 | if(!(mPtrVoECodec = VoECodec::GetInterface(mVoiceEngine))) |
michael@0 | 267 | { |
michael@0 | 268 | CSFLogError(logTag, "%s Unable to initialize VoEBCodec", __FUNCTION__); |
michael@0 | 269 | return kMediaConduitSessionNotInited; |
michael@0 | 270 | } |
michael@0 | 271 | |
michael@0 | 272 | if(!(mPtrVoEProcessing = VoEAudioProcessing::GetInterface(mVoiceEngine))) |
michael@0 | 273 | { |
michael@0 | 274 | CSFLogError(logTag, "%s Unable to initialize VoEProcessing", __FUNCTION__); |
michael@0 | 275 | return kMediaConduitSessionNotInited; |
michael@0 | 276 | } |
michael@0 | 277 | if(!(mPtrVoEXmedia = VoEExternalMedia::GetInterface(mVoiceEngine))) |
michael@0 | 278 | { |
michael@0 | 279 | CSFLogError(logTag, "%s Unable to initialize VoEExternalMedia", __FUNCTION__); |
michael@0 | 280 | return kMediaConduitSessionNotInited; |
michael@0 | 281 | } |
michael@0 | 282 | if(!(mPtrVoERTP_RTCP = VoERTP_RTCP::GetInterface(mVoiceEngine))) |
michael@0 | 283 | { |
michael@0 | 284 | CSFLogError(logTag, "%s Unable to initialize VoERTP_RTCP", __FUNCTION__); |
michael@0 | 285 | return kMediaConduitSessionNotInited; |
michael@0 | 286 | } |
michael@0 | 287 | |
michael@0 | 288 | if(!(mPtrVoEVideoSync = VoEVideoSync::GetInterface(mVoiceEngine))) |
michael@0 | 289 | { |
michael@0 | 290 | CSFLogError(logTag, "%s Unable to initialize VoEVideoSync", __FUNCTION__); |
michael@0 | 291 | return kMediaConduitSessionNotInited; |
michael@0 | 292 | } |
michael@0 | 293 | if (!(mPtrRTP = webrtc::VoERTP_RTCP::GetInterface(mVoiceEngine))) |
michael@0 | 294 | { |
michael@0 | 295 | CSFLogError(logTag, "%s Unable to get audio RTP/RTCP interface ", |
michael@0 | 296 | __FUNCTION__); |
michael@0 | 297 | return kMediaConduitSessionNotInited; |
michael@0 | 298 | } |
michael@0 | 299 | |
michael@0 | 300 | if (other) { |
michael@0 | 301 | mChannel = other->mChannel; |
michael@0 | 302 | } else { |
michael@0 | 303 | // init the engine with our audio device layer |
michael@0 | 304 | if(mPtrVoEBase->Init() == -1) |
michael@0 | 305 | { |
michael@0 | 306 | CSFLogError(logTag, "%s VoiceEngine Base Not Initialized", __FUNCTION__); |
michael@0 | 307 | return kMediaConduitSessionNotInited; |
michael@0 | 308 | } |
michael@0 | 309 | |
michael@0 | 310 | if( (mChannel = mPtrVoEBase->CreateChannel()) == -1) |
michael@0 | 311 | { |
michael@0 | 312 | CSFLogError(logTag, "%s VoiceEngine Channel creation failed",__FUNCTION__); |
michael@0 | 313 | return kMediaConduitChannelError; |
michael@0 | 314 | } |
michael@0 | 315 | |
michael@0 | 316 | CSFLogDebug(logTag, "%s Channel Created %d ",__FUNCTION__, mChannel); |
michael@0 | 317 | |
michael@0 | 318 | if(mPtrVoENetwork->RegisterExternalTransport(mChannel, *this) == -1) |
michael@0 | 319 | { |
michael@0 | 320 | CSFLogError(logTag, "%s VoiceEngine, External Transport Failed",__FUNCTION__); |
michael@0 | 321 | return kMediaConduitTransportRegistrationFail; |
michael@0 | 322 | } |
michael@0 | 323 | |
michael@0 | 324 | if(mPtrVoEXmedia->SetExternalRecordingStatus(true) == -1) |
michael@0 | 325 | { |
michael@0 | 326 | CSFLogError(logTag, "%s SetExternalRecordingStatus Failed %d",__FUNCTION__, |
michael@0 | 327 | mPtrVoEBase->LastError()); |
michael@0 | 328 | return kMediaConduitExternalPlayoutError; |
michael@0 | 329 | } |
michael@0 | 330 | |
michael@0 | 331 | if(mPtrVoEXmedia->SetExternalPlayoutStatus(true) == -1) |
michael@0 | 332 | { |
michael@0 | 333 | CSFLogError(logTag, "%s SetExternalPlayoutStatus Failed %d ",__FUNCTION__, |
michael@0 | 334 | mPtrVoEBase->LastError()); |
michael@0 | 335 | return kMediaConduitExternalRecordingError; |
michael@0 | 336 | } |
michael@0 | 337 | CSFLogDebug(logTag , "%s AudioSessionConduit Initialization Done (%p)",__FUNCTION__, this); |
michael@0 | 338 | } |
michael@0 | 339 | return kMediaConduitNoError; |
michael@0 | 340 | } |
michael@0 | 341 | |
michael@0 | 342 | // AudioSessionConduit Implementation |
michael@0 | 343 | MediaConduitErrorCode |
michael@0 | 344 | WebrtcAudioConduit::AttachTransport(mozilla::RefPtr<TransportInterface> aTransport) |
michael@0 | 345 | { |
michael@0 | 346 | CSFLogDebug(logTag, "%s ", __FUNCTION__); |
michael@0 | 347 | |
michael@0 | 348 | if(!aTransport) |
michael@0 | 349 | { |
michael@0 | 350 | CSFLogError(logTag, "%s NULL Transport", __FUNCTION__); |
michael@0 | 351 | return kMediaConduitInvalidTransport; |
michael@0 | 352 | } |
michael@0 | 353 | // set the transport |
michael@0 | 354 | mTransport = aTransport; |
michael@0 | 355 | return kMediaConduitNoError; |
michael@0 | 356 | } |
michael@0 | 357 | |
michael@0 | 358 | MediaConduitErrorCode |
michael@0 | 359 | WebrtcAudioConduit::ConfigureSendMediaCodec(const AudioCodecConfig* codecConfig) |
michael@0 | 360 | { |
michael@0 | 361 | CSFLogDebug(logTag, "%s ", __FUNCTION__); |
michael@0 | 362 | MediaConduitErrorCode condError = kMediaConduitNoError; |
michael@0 | 363 | int error = 0;//webrtc engine errors |
michael@0 | 364 | webrtc::CodecInst cinst; |
michael@0 | 365 | |
michael@0 | 366 | //validate codec param |
michael@0 | 367 | if((condError = ValidateCodecConfig(codecConfig, true)) != kMediaConduitNoError) |
michael@0 | 368 | { |
michael@0 | 369 | return condError; |
michael@0 | 370 | } |
michael@0 | 371 | |
michael@0 | 372 | //are we transmitting already, stop and apply the send codec |
michael@0 | 373 | if(mEngineTransmitting) |
michael@0 | 374 | { |
michael@0 | 375 | CSFLogDebug(logTag, "%s Engine Already Sending. Attemping to Stop ", __FUNCTION__); |
michael@0 | 376 | if(mPtrVoEBase->StopSend(mChannel) == -1) |
michael@0 | 377 | { |
michael@0 | 378 | CSFLogError(logTag, "%s StopSend() Failed %d ", __FUNCTION__, |
michael@0 | 379 | mPtrVoEBase->LastError()); |
michael@0 | 380 | return kMediaConduitUnknownError; |
michael@0 | 381 | } |
michael@0 | 382 | } |
michael@0 | 383 | |
michael@0 | 384 | mEngineTransmitting = false; |
michael@0 | 385 | |
michael@0 | 386 | if(!CodecConfigToWebRTCCodec(codecConfig,cinst)) |
michael@0 | 387 | { |
michael@0 | 388 | CSFLogError(logTag,"%s CodecConfig to WebRTC Codec Failed ",__FUNCTION__); |
michael@0 | 389 | return kMediaConduitMalformedArgument; |
michael@0 | 390 | } |
michael@0 | 391 | |
michael@0 | 392 | if(mPtrVoECodec->SetSendCodec(mChannel, cinst) == -1) |
michael@0 | 393 | { |
michael@0 | 394 | error = mPtrVoEBase->LastError(); |
michael@0 | 395 | CSFLogError(logTag, "%s SetSendCodec - Invalid Codec %d ",__FUNCTION__, |
michael@0 | 396 | error); |
michael@0 | 397 | |
michael@0 | 398 | if(error == VE_CANNOT_SET_SEND_CODEC || error == VE_CODEC_ERROR) |
michael@0 | 399 | { |
michael@0 | 400 | CSFLogError(logTag, "%s Invalid Send Codec", __FUNCTION__); |
michael@0 | 401 | return kMediaConduitInvalidSendCodec; |
michael@0 | 402 | } |
michael@0 | 403 | CSFLogError(logTag, "%s SetSendCodec Failed %d ", __FUNCTION__, |
michael@0 | 404 | mPtrVoEBase->LastError()); |
michael@0 | 405 | return kMediaConduitUnknownError; |
michael@0 | 406 | } |
michael@0 | 407 | |
michael@0 | 408 | #ifdef MOZILLA_INTERNAL_API |
michael@0 | 409 | // TEMPORARY - see bug 694814 comment 2 |
michael@0 | 410 | nsresult rv; |
michael@0 | 411 | nsCOMPtr<nsIPrefService> prefs = do_GetService("@mozilla.org/preferences-service;1", &rv); |
michael@0 | 412 | if (NS_SUCCEEDED(rv)) { |
michael@0 | 413 | nsCOMPtr<nsIPrefBranch> branch = do_QueryInterface(prefs); |
michael@0 | 414 | |
michael@0 | 415 | if (branch) { |
michael@0 | 416 | branch->GetIntPref("media.peerconnection.capture_delay", &mCaptureDelay); |
michael@0 | 417 | } |
michael@0 | 418 | } |
michael@0 | 419 | #endif |
michael@0 | 420 | |
michael@0 | 421 | //Let's Send Transport State-machine on the Engine |
michael@0 | 422 | if(mPtrVoEBase->StartSend(mChannel) == -1) |
michael@0 | 423 | { |
michael@0 | 424 | error = mPtrVoEBase->LastError(); |
michael@0 | 425 | CSFLogError(logTag, "%s StartSend failed %d", __FUNCTION__, error); |
michael@0 | 426 | return kMediaConduitUnknownError; |
michael@0 | 427 | } |
michael@0 | 428 | |
michael@0 | 429 | //Copy the applied config for future reference. |
michael@0 | 430 | delete mCurSendCodecConfig; |
michael@0 | 431 | |
michael@0 | 432 | mCurSendCodecConfig = new AudioCodecConfig(codecConfig->mType, |
michael@0 | 433 | codecConfig->mName, |
michael@0 | 434 | codecConfig->mFreq, |
michael@0 | 435 | codecConfig->mPacSize, |
michael@0 | 436 | codecConfig->mChannels, |
michael@0 | 437 | codecConfig->mRate, |
michael@0 | 438 | codecConfig->mLoadManager); |
michael@0 | 439 | |
michael@0 | 440 | mEngineTransmitting = true; |
michael@0 | 441 | return kMediaConduitNoError; |
michael@0 | 442 | } |
michael@0 | 443 | |
michael@0 | 444 | MediaConduitErrorCode |
michael@0 | 445 | WebrtcAudioConduit::ConfigureRecvMediaCodecs( |
michael@0 | 446 | const std::vector<AudioCodecConfig*>& codecConfigList) |
michael@0 | 447 | { |
michael@0 | 448 | CSFLogDebug(logTag, "%s ", __FUNCTION__); |
michael@0 | 449 | MediaConduitErrorCode condError = kMediaConduitNoError; |
michael@0 | 450 | int error = 0; //webrtc engine errors |
michael@0 | 451 | bool success = false; |
michael@0 | 452 | |
michael@0 | 453 | // Are we receiving already? If so, stop receiving and playout |
michael@0 | 454 | // since we can't apply new recv codec when the engine is playing. |
michael@0 | 455 | if(mEngineReceiving) |
michael@0 | 456 | { |
michael@0 | 457 | CSFLogDebug(logTag, "%s Engine Already Receiving. Attemping to Stop ", __FUNCTION__); |
michael@0 | 458 | // AudioEngine doesn't fail fatally on stopping reception. Ref:voe_errors.h. |
michael@0 | 459 | // hence we need not be strict in failing here on errors |
michael@0 | 460 | mPtrVoEBase->StopReceive(mChannel); |
michael@0 | 461 | CSFLogDebug(logTag, "%s Attemping to Stop playout ", __FUNCTION__); |
michael@0 | 462 | if(mPtrVoEBase->StopPlayout(mChannel) == -1) |
michael@0 | 463 | { |
michael@0 | 464 | if( mPtrVoEBase->LastError() == VE_CANNOT_STOP_PLAYOUT) |
michael@0 | 465 | { |
michael@0 | 466 | CSFLogDebug(logTag, "%s Stop-Playout Failed %d", __FUNCTION__, mPtrVoEBase->LastError()); |
michael@0 | 467 | return kMediaConduitPlayoutError; |
michael@0 | 468 | } |
michael@0 | 469 | } |
michael@0 | 470 | } |
michael@0 | 471 | |
michael@0 | 472 | mEngineReceiving = false; |
michael@0 | 473 | |
michael@0 | 474 | if(codecConfigList.empty()) |
michael@0 | 475 | { |
michael@0 | 476 | CSFLogError(logTag, "%s Zero number of codecs to configure", __FUNCTION__); |
michael@0 | 477 | return kMediaConduitMalformedArgument; |
michael@0 | 478 | } |
michael@0 | 479 | |
michael@0 | 480 | // Try Applying the codecs in the list. |
michael@0 | 481 | // We succeed if at least one codec was applied and reception was |
michael@0 | 482 | // started successfully. |
michael@0 | 483 | for(std::vector<AudioCodecConfig*>::size_type i=0 ;i<codecConfigList.size();i++) |
michael@0 | 484 | { |
michael@0 | 485 | //if the codec param is invalid or diplicate, return error |
michael@0 | 486 | if((condError = ValidateCodecConfig(codecConfigList[i],false)) != kMediaConduitNoError) |
michael@0 | 487 | { |
michael@0 | 488 | return condError; |
michael@0 | 489 | } |
michael@0 | 490 | |
michael@0 | 491 | webrtc::CodecInst cinst; |
michael@0 | 492 | if(!CodecConfigToWebRTCCodec(codecConfigList[i],cinst)) |
michael@0 | 493 | { |
michael@0 | 494 | CSFLogError(logTag,"%s CodecConfig to WebRTC Codec Failed ",__FUNCTION__); |
michael@0 | 495 | continue; |
michael@0 | 496 | } |
michael@0 | 497 | |
michael@0 | 498 | if(mPtrVoECodec->SetRecPayloadType(mChannel,cinst) == -1) |
michael@0 | 499 | { |
michael@0 | 500 | error = mPtrVoEBase->LastError(); |
michael@0 | 501 | CSFLogError(logTag, "%s SetRecvCodec Failed %d ",__FUNCTION__, error); |
michael@0 | 502 | continue; |
michael@0 | 503 | } else { |
michael@0 | 504 | CSFLogDebug(logTag, "%s Successfully Set RecvCodec %s", __FUNCTION__, |
michael@0 | 505 | codecConfigList[i]->mName.c_str()); |
michael@0 | 506 | //copy this to local database |
michael@0 | 507 | if(CopyCodecToDB(codecConfigList[i])) |
michael@0 | 508 | { |
michael@0 | 509 | success = true; |
michael@0 | 510 | } else { |
michael@0 | 511 | CSFLogError(logTag,"%s Unable to updated Codec Database", __FUNCTION__); |
michael@0 | 512 | return kMediaConduitUnknownError; |
michael@0 | 513 | } |
michael@0 | 514 | |
michael@0 | 515 | } |
michael@0 | 516 | |
michael@0 | 517 | } //end for |
michael@0 | 518 | |
michael@0 | 519 | if(!success) |
michael@0 | 520 | { |
michael@0 | 521 | CSFLogError(logTag, "%s Setting Receive Codec Failed ", __FUNCTION__); |
michael@0 | 522 | return kMediaConduitInvalidReceiveCodec; |
michael@0 | 523 | } |
michael@0 | 524 | |
michael@0 | 525 | //If we are here, atleast one codec should have been set |
michael@0 | 526 | if(mPtrVoEBase->StartReceive(mChannel) == -1) |
michael@0 | 527 | { |
michael@0 | 528 | error = mPtrVoEBase->LastError(); |
michael@0 | 529 | CSFLogError(logTag , "%s StartReceive Failed %d ",__FUNCTION__, error); |
michael@0 | 530 | if(error == VE_RECV_SOCKET_ERROR) |
michael@0 | 531 | { |
michael@0 | 532 | return kMediaConduitSocketError; |
michael@0 | 533 | } |
michael@0 | 534 | return kMediaConduitUnknownError; |
michael@0 | 535 | } |
michael@0 | 536 | |
michael@0 | 537 | |
michael@0 | 538 | if(mPtrVoEBase->StartPlayout(mChannel) == -1) |
michael@0 | 539 | { |
michael@0 | 540 | CSFLogError(logTag, "%s Starting playout Failed", __FUNCTION__); |
michael@0 | 541 | return kMediaConduitPlayoutError; |
michael@0 | 542 | } |
michael@0 | 543 | //we should be good here for setting this. |
michael@0 | 544 | mEngineReceiving = true; |
michael@0 | 545 | DumpCodecDB(); |
michael@0 | 546 | return kMediaConduitNoError; |
michael@0 | 547 | } |
michael@0 | 548 | MediaConduitErrorCode |
michael@0 | 549 | WebrtcAudioConduit::EnableAudioLevelExtension(bool enabled, uint8_t id) |
michael@0 | 550 | { |
michael@0 | 551 | CSFLogDebug(logTag, "%s %d %d ", __FUNCTION__, enabled, id); |
michael@0 | 552 | |
michael@0 | 553 | if (mPtrVoERTP_RTCP->SetRTPAudioLevelIndicationStatus(mChannel, enabled, id) == -1) |
michael@0 | 554 | { |
michael@0 | 555 | CSFLogError(logTag, "%s SetRTPAudioLevelIndicationStatus Failed", __FUNCTION__); |
michael@0 | 556 | return kMediaConduitUnknownError; |
michael@0 | 557 | } |
michael@0 | 558 | |
michael@0 | 559 | return kMediaConduitNoError; |
michael@0 | 560 | } |
michael@0 | 561 | |
michael@0 | 562 | MediaConduitErrorCode |
michael@0 | 563 | WebrtcAudioConduit::SendAudioFrame(const int16_t audio_data[], |
michael@0 | 564 | int32_t lengthSamples, |
michael@0 | 565 | int32_t samplingFreqHz, |
michael@0 | 566 | int32_t capture_delay) |
michael@0 | 567 | { |
michael@0 | 568 | CSFLogDebug(logTag, "%s ", __FUNCTION__); |
michael@0 | 569 | // Following checks need to be performed |
michael@0 | 570 | // 1. Non null audio buffer pointer, |
michael@0 | 571 | // 2. invalid sampling frequency - less than 0 or unsupported ones |
michael@0 | 572 | // 3. Appropriate Sample Length for 10 ms audio-frame. This represents |
michael@0 | 573 | // block size the VoiceEngine feeds into encoder for passed in audio-frame |
michael@0 | 574 | // Ex: for 16000 sampling rate , valid block-length is 160 |
michael@0 | 575 | // Similarly for 32000 sampling rate, valid block length is 320 |
michael@0 | 576 | // We do the check by the verify modular operator below to be zero |
michael@0 | 577 | |
michael@0 | 578 | if(!audio_data || (lengthSamples <= 0) || |
michael@0 | 579 | (IsSamplingFreqSupported(samplingFreqHz) == false) || |
michael@0 | 580 | ((lengthSamples % (samplingFreqHz / 100) != 0)) ) |
michael@0 | 581 | { |
michael@0 | 582 | CSFLogError(logTag, "%s Invalid Parameters ",__FUNCTION__); |
michael@0 | 583 | MOZ_ASSERT(PR_FALSE); |
michael@0 | 584 | return kMediaConduitMalformedArgument; |
michael@0 | 585 | } |
michael@0 | 586 | |
michael@0 | 587 | //validate capture time |
michael@0 | 588 | if(capture_delay < 0 ) |
michael@0 | 589 | { |
michael@0 | 590 | CSFLogError(logTag,"%s Invalid Capture Delay ", __FUNCTION__); |
michael@0 | 591 | MOZ_ASSERT(PR_FALSE); |
michael@0 | 592 | return kMediaConduitMalformedArgument; |
michael@0 | 593 | } |
michael@0 | 594 | |
michael@0 | 595 | // if transmission is not started .. conduit cannot insert frames |
michael@0 | 596 | if(!mEngineTransmitting) |
michael@0 | 597 | { |
michael@0 | 598 | CSFLogError(logTag, "%s Engine not transmitting ", __FUNCTION__); |
michael@0 | 599 | return kMediaConduitSessionNotInited; |
michael@0 | 600 | } |
michael@0 | 601 | |
michael@0 | 602 | #ifdef MOZILLA_INTERNAL_API |
michael@0 | 603 | if (PR_LOG_TEST(GetLatencyLog(), PR_LOG_DEBUG)) { |
michael@0 | 604 | struct Processing insert = { TimeStamp::Now(), 0 }; |
michael@0 | 605 | mProcessing.AppendElement(insert); |
michael@0 | 606 | } |
michael@0 | 607 | #endif |
michael@0 | 608 | |
michael@0 | 609 | capture_delay = mCaptureDelay; |
michael@0 | 610 | //Insert the samples |
michael@0 | 611 | if(mPtrVoEXmedia->ExternalRecordingInsertData(audio_data, |
michael@0 | 612 | lengthSamples, |
michael@0 | 613 | samplingFreqHz, |
michael@0 | 614 | capture_delay) == -1) |
michael@0 | 615 | { |
michael@0 | 616 | int error = mPtrVoEBase->LastError(); |
michael@0 | 617 | CSFLogError(logTag, "%s Inserting audio data Failed %d", __FUNCTION__, error); |
michael@0 | 618 | if(error == VE_RUNTIME_REC_ERROR) |
michael@0 | 619 | { |
michael@0 | 620 | return kMediaConduitRecordingError; |
michael@0 | 621 | } |
michael@0 | 622 | return kMediaConduitUnknownError; |
michael@0 | 623 | } |
michael@0 | 624 | // we should be good here |
michael@0 | 625 | return kMediaConduitNoError; |
michael@0 | 626 | } |
michael@0 | 627 | |
michael@0 | 628 | MediaConduitErrorCode |
michael@0 | 629 | WebrtcAudioConduit::GetAudioFrame(int16_t speechData[], |
michael@0 | 630 | int32_t samplingFreqHz, |
michael@0 | 631 | int32_t capture_delay, |
michael@0 | 632 | int& lengthSamples) |
michael@0 | 633 | { |
michael@0 | 634 | |
michael@0 | 635 | CSFLogDebug(logTag, "%s ", __FUNCTION__); |
michael@0 | 636 | unsigned int numSamples = 0; |
michael@0 | 637 | |
michael@0 | 638 | //validate params |
michael@0 | 639 | if(!speechData ) |
michael@0 | 640 | { |
michael@0 | 641 | CSFLogError(logTag,"%s Null Audio Buffer Pointer", __FUNCTION__); |
michael@0 | 642 | MOZ_ASSERT(PR_FALSE); |
michael@0 | 643 | return kMediaConduitMalformedArgument; |
michael@0 | 644 | } |
michael@0 | 645 | |
michael@0 | 646 | // Validate sample length |
michael@0 | 647 | if((numSamples = GetNum10msSamplesForFrequency(samplingFreqHz)) == 0 ) |
michael@0 | 648 | { |
michael@0 | 649 | CSFLogError(logTag,"%s Invalid Sampling Frequency ", __FUNCTION__); |
michael@0 | 650 | MOZ_ASSERT(PR_FALSE); |
michael@0 | 651 | return kMediaConduitMalformedArgument; |
michael@0 | 652 | } |
michael@0 | 653 | |
michael@0 | 654 | //validate capture time |
michael@0 | 655 | if(capture_delay < 0 ) |
michael@0 | 656 | { |
michael@0 | 657 | CSFLogError(logTag,"%s Invalid Capture Delay ", __FUNCTION__); |
michael@0 | 658 | MOZ_ASSERT(PR_FALSE); |
michael@0 | 659 | return kMediaConduitMalformedArgument; |
michael@0 | 660 | } |
michael@0 | 661 | |
michael@0 | 662 | //Conduit should have reception enabled before we ask for decoded |
michael@0 | 663 | // samples |
michael@0 | 664 | if(!mEngineReceiving) |
michael@0 | 665 | { |
michael@0 | 666 | CSFLogError(logTag, "%s Engine not Receiving ", __FUNCTION__); |
michael@0 | 667 | return kMediaConduitSessionNotInited; |
michael@0 | 668 | } |
michael@0 | 669 | |
michael@0 | 670 | |
michael@0 | 671 | lengthSamples = 0; //output paramter |
michael@0 | 672 | |
michael@0 | 673 | if(mPtrVoEXmedia->ExternalPlayoutGetData( speechData, |
michael@0 | 674 | samplingFreqHz, |
michael@0 | 675 | capture_delay, |
michael@0 | 676 | lengthSamples) == -1) |
michael@0 | 677 | { |
michael@0 | 678 | int error = mPtrVoEBase->LastError(); |
michael@0 | 679 | CSFLogError(logTag, "%s Getting audio data Failed %d", __FUNCTION__, error); |
michael@0 | 680 | if(error == VE_RUNTIME_PLAY_ERROR) |
michael@0 | 681 | { |
michael@0 | 682 | return kMediaConduitPlayoutError; |
michael@0 | 683 | } |
michael@0 | 684 | return kMediaConduitUnknownError; |
michael@0 | 685 | } |
michael@0 | 686 | |
michael@0 | 687 | // Not #ifdef DEBUG or on a log module so we can use it for about:webrtc/etc |
michael@0 | 688 | mSamples += lengthSamples; |
michael@0 | 689 | if (mSamples >= mLastSyncLog + samplingFreqHz) { |
michael@0 | 690 | int jitter_buffer_delay_ms; |
michael@0 | 691 | int playout_buffer_delay_ms; |
michael@0 | 692 | int avsync_offset_ms; |
michael@0 | 693 | if (GetAVStats(&jitter_buffer_delay_ms, |
michael@0 | 694 | &playout_buffer_delay_ms, |
michael@0 | 695 | &avsync_offset_ms)) { |
michael@0 | 696 | #ifdef MOZILLA_INTERNAL_API |
michael@0 | 697 | if (avsync_offset_ms < 0) { |
michael@0 | 698 | Telemetry::Accumulate(Telemetry::WEBRTC_AVSYNC_WHEN_VIDEO_LAGS_AUDIO_MS, |
michael@0 | 699 | -avsync_offset_ms); |
michael@0 | 700 | } else { |
michael@0 | 701 | Telemetry::Accumulate(Telemetry::WEBRTC_AVSYNC_WHEN_AUDIO_LAGS_VIDEO_MS, |
michael@0 | 702 | avsync_offset_ms); |
michael@0 | 703 | } |
michael@0 | 704 | #endif |
michael@0 | 705 | CSFLogError(logTag, |
michael@0 | 706 | "A/V sync: sync delta: %dms, audio jitter delay %dms, playout delay %dms", |
michael@0 | 707 | avsync_offset_ms, jitter_buffer_delay_ms, playout_buffer_delay_ms); |
michael@0 | 708 | } else { |
michael@0 | 709 | CSFLogError(logTag, "A/V sync: GetAVStats failed"); |
michael@0 | 710 | } |
michael@0 | 711 | mLastSyncLog = mSamples; |
michael@0 | 712 | } |
michael@0 | 713 | |
michael@0 | 714 | #ifdef MOZILLA_INTERNAL_API |
michael@0 | 715 | if (PR_LOG_TEST(GetLatencyLog(), PR_LOG_DEBUG)) { |
michael@0 | 716 | if (mProcessing.Length() > 0) { |
michael@0 | 717 | unsigned int now; |
michael@0 | 718 | mPtrVoEVideoSync->GetPlayoutTimestamp(mChannel, now); |
michael@0 | 719 | if (static_cast<uint32_t>(now) != mLastTimestamp) { |
michael@0 | 720 | mLastTimestamp = static_cast<uint32_t>(now); |
michael@0 | 721 | // Find the block that includes this timestamp in the network input |
michael@0 | 722 | while (mProcessing.Length() > 0) { |
michael@0 | 723 | // FIX! assumes 20ms @ 48000Hz |
michael@0 | 724 | // FIX handle wrap-around |
michael@0 | 725 | if (mProcessing[0].mRTPTimeStamp + 20*(48000/1000) >= now) { |
michael@0 | 726 | TimeDuration t = TimeStamp::Now() - mProcessing[0].mTimeStamp; |
michael@0 | 727 | // Wrap-around? |
michael@0 | 728 | int64_t delta = t.ToMilliseconds() + (now - mProcessing[0].mRTPTimeStamp)/(48000/1000); |
michael@0 | 729 | LogTime(AsyncLatencyLogger::AudioRecvRTP, ((uint64_t) this), delta); |
michael@0 | 730 | break; |
michael@0 | 731 | } |
michael@0 | 732 | mProcessing.RemoveElementAt(0); |
michael@0 | 733 | } |
michael@0 | 734 | } |
michael@0 | 735 | } |
michael@0 | 736 | } |
michael@0 | 737 | #endif |
michael@0 | 738 | CSFLogDebug(logTag,"%s GetAudioFrame:Got samples: length %d ",__FUNCTION__, |
michael@0 | 739 | lengthSamples); |
michael@0 | 740 | return kMediaConduitNoError; |
michael@0 | 741 | } |
michael@0 | 742 | |
michael@0 | 743 | // Transport Layer Callbacks |
michael@0 | 744 | MediaConduitErrorCode |
michael@0 | 745 | WebrtcAudioConduit::ReceivedRTPPacket(const void *data, int len) |
michael@0 | 746 | { |
michael@0 | 747 | CSFLogDebug(logTag, "%s : channel %d", __FUNCTION__, mChannel); |
michael@0 | 748 | |
michael@0 | 749 | if(mEngineReceiving) |
michael@0 | 750 | { |
michael@0 | 751 | #ifdef MOZILLA_INTERNAL_API |
michael@0 | 752 | if (PR_LOG_TEST(GetLatencyLog(), PR_LOG_DEBUG)) { |
michael@0 | 753 | // timestamp is at 32 bits in ([1]) |
michael@0 | 754 | struct Processing insert = { TimeStamp::Now(), |
michael@0 | 755 | ntohl(static_cast<const uint32_t *>(data)[1]) }; |
michael@0 | 756 | mProcessing.AppendElement(insert); |
michael@0 | 757 | } |
michael@0 | 758 | #endif |
michael@0 | 759 | |
michael@0 | 760 | if(mPtrVoENetwork->ReceivedRTPPacket(mChannel,data,len) == -1) |
michael@0 | 761 | { |
michael@0 | 762 | int error = mPtrVoEBase->LastError(); |
michael@0 | 763 | CSFLogError(logTag, "%s RTP Processing Error %d", __FUNCTION__, error); |
michael@0 | 764 | if(error == VE_RTP_RTCP_MODULE_ERROR) |
michael@0 | 765 | { |
michael@0 | 766 | return kMediaConduitRTPRTCPModuleError; |
michael@0 | 767 | } |
michael@0 | 768 | return kMediaConduitUnknownError; |
michael@0 | 769 | } |
michael@0 | 770 | } else { |
michael@0 | 771 | CSFLogError(logTag, "Error: %s when not receiving", __FUNCTION__); |
michael@0 | 772 | return kMediaConduitSessionNotInited; |
michael@0 | 773 | } |
michael@0 | 774 | |
michael@0 | 775 | return kMediaConduitNoError; |
michael@0 | 776 | } |
michael@0 | 777 | |
michael@0 | 778 | MediaConduitErrorCode |
michael@0 | 779 | WebrtcAudioConduit::ReceivedRTCPPacket(const void *data, int len) |
michael@0 | 780 | { |
michael@0 | 781 | CSFLogDebug(logTag, "%s : channel %d",__FUNCTION__, mChannel); |
michael@0 | 782 | |
michael@0 | 783 | if(mEngineTransmitting) |
michael@0 | 784 | { |
michael@0 | 785 | if(mPtrVoENetwork->ReceivedRTCPPacket(mChannel, data, len) == -1) |
michael@0 | 786 | { |
michael@0 | 787 | int error = mPtrVoEBase->LastError(); |
michael@0 | 788 | CSFLogError(logTag, "%s RTCP Processing Error %d", __FUNCTION__, error); |
michael@0 | 789 | if(error == VE_RTP_RTCP_MODULE_ERROR) |
michael@0 | 790 | { |
michael@0 | 791 | return kMediaConduitRTPRTCPModuleError; |
michael@0 | 792 | } |
michael@0 | 793 | return kMediaConduitUnknownError; |
michael@0 | 794 | } |
michael@0 | 795 | } else { |
michael@0 | 796 | CSFLogError(logTag, "Error: %s when not receiving", __FUNCTION__); |
michael@0 | 797 | return kMediaConduitSessionNotInited; |
michael@0 | 798 | } |
michael@0 | 799 | return kMediaConduitNoError; |
michael@0 | 800 | } |
michael@0 | 801 | |
michael@0 | 802 | //WebRTC::RTP Callback Implementation |
michael@0 | 803 | int WebrtcAudioConduit::SendPacket(int channel, const void* data, int len) |
michael@0 | 804 | { |
michael@0 | 805 | CSFLogDebug(logTag, "%s : channel %d %s", __FUNCTION__, channel, |
michael@0 | 806 | (mEngineReceiving && mOtherDirection) ? "(using mOtherDirection)" : ""); |
michael@0 | 807 | |
michael@0 | 808 | if (mEngineReceiving) |
michael@0 | 809 | { |
michael@0 | 810 | if (mOtherDirection) |
michael@0 | 811 | { |
michael@0 | 812 | return mOtherDirection->SendPacket(channel, data, len); |
michael@0 | 813 | } |
michael@0 | 814 | CSFLogDebug(logTag, "%s : Asked to send RTP without an RTP sender on channel %d", |
michael@0 | 815 | __FUNCTION__, channel); |
michael@0 | 816 | return -1; |
michael@0 | 817 | } else { |
michael@0 | 818 | #ifdef MOZILLA_INTERNAL_API |
michael@0 | 819 | if (PR_LOG_TEST(GetLatencyLog(), PR_LOG_DEBUG)) { |
michael@0 | 820 | if (mProcessing.Length() > 0) { |
michael@0 | 821 | TimeStamp started = mProcessing[0].mTimeStamp; |
michael@0 | 822 | mProcessing.RemoveElementAt(0); |
michael@0 | 823 | mProcessing.RemoveElementAt(0); // 20ms packetization! Could automate this by watching sizes |
michael@0 | 824 | TimeDuration t = TimeStamp::Now() - started; |
michael@0 | 825 | int64_t delta = t.ToMilliseconds(); |
michael@0 | 826 | LogTime(AsyncLatencyLogger::AudioSendRTP, ((uint64_t) this), delta); |
michael@0 | 827 | } |
michael@0 | 828 | } |
michael@0 | 829 | #endif |
michael@0 | 830 | if(mTransport && (mTransport->SendRtpPacket(data, len) == NS_OK)) |
michael@0 | 831 | { |
michael@0 | 832 | CSFLogDebug(logTag, "%s Sent RTP Packet ", __FUNCTION__); |
michael@0 | 833 | return len; |
michael@0 | 834 | } else { |
michael@0 | 835 | CSFLogError(logTag, "%s RTP Packet Send Failed ", __FUNCTION__); |
michael@0 | 836 | return -1; |
michael@0 | 837 | } |
michael@0 | 838 | } |
michael@0 | 839 | } |
michael@0 | 840 | |
michael@0 | 841 | int WebrtcAudioConduit::SendRTCPPacket(int channel, const void* data, int len) |
michael@0 | 842 | { |
michael@0 | 843 | CSFLogDebug(logTag, "%s : channel %d", __FUNCTION__, channel); |
michael@0 | 844 | |
michael@0 | 845 | if (mEngineTransmitting) |
michael@0 | 846 | { |
michael@0 | 847 | if (mOtherDirection) |
michael@0 | 848 | { |
michael@0 | 849 | return mOtherDirection->SendRTCPPacket(channel, data, len); |
michael@0 | 850 | } |
michael@0 | 851 | } |
michael@0 | 852 | |
michael@0 | 853 | // We come here if we have only one pipeline/conduit setup, |
michael@0 | 854 | // such as for unidirectional streams. |
michael@0 | 855 | // We also end up here if we are receiving |
michael@0 | 856 | if(mTransport && mTransport->SendRtcpPacket(data, len) == NS_OK) |
michael@0 | 857 | { |
michael@0 | 858 | CSFLogDebug(logTag, "%s Sent RTCP Packet ", __FUNCTION__); |
michael@0 | 859 | return len; |
michael@0 | 860 | } else { |
michael@0 | 861 | CSFLogError(logTag, "%s RTCP Packet Send Failed ", __FUNCTION__); |
michael@0 | 862 | return -1; |
michael@0 | 863 | } |
michael@0 | 864 | } |
michael@0 | 865 | |
michael@0 | 866 | /** |
michael@0 | 867 | * Converts between CodecConfig to WebRTC Codec Structure. |
michael@0 | 868 | */ |
michael@0 | 869 | |
michael@0 | 870 | bool |
michael@0 | 871 | WebrtcAudioConduit::CodecConfigToWebRTCCodec(const AudioCodecConfig* codecInfo, |
michael@0 | 872 | webrtc::CodecInst& cinst) |
michael@0 | 873 | { |
michael@0 | 874 | const unsigned int plNameLength = codecInfo->mName.length()+1; |
michael@0 | 875 | memset(&cinst, 0, sizeof(webrtc::CodecInst)); |
michael@0 | 876 | if(sizeof(cinst.plname) < plNameLength) |
michael@0 | 877 | { |
michael@0 | 878 | CSFLogError(logTag, "%s Payload name buffer capacity mismatch ", |
michael@0 | 879 | __FUNCTION__); |
michael@0 | 880 | return false; |
michael@0 | 881 | } |
michael@0 | 882 | memcpy(cinst.plname, codecInfo->mName.c_str(),codecInfo->mName.length()); |
michael@0 | 883 | cinst.plname[plNameLength]='\0'; |
michael@0 | 884 | cinst.pltype = codecInfo->mType; |
michael@0 | 885 | cinst.rate = codecInfo->mRate; |
michael@0 | 886 | cinst.pacsize = codecInfo->mPacSize; |
michael@0 | 887 | cinst.plfreq = codecInfo->mFreq; |
michael@0 | 888 | cinst.channels = codecInfo->mChannels; |
michael@0 | 889 | return true; |
michael@0 | 890 | } |
michael@0 | 891 | |
michael@0 | 892 | /** |
michael@0 | 893 | * Supported Sampling Frequncies. |
michael@0 | 894 | */ |
michael@0 | 895 | bool |
michael@0 | 896 | WebrtcAudioConduit::IsSamplingFreqSupported(int freq) const |
michael@0 | 897 | { |
michael@0 | 898 | if(GetNum10msSamplesForFrequency(freq)) |
michael@0 | 899 | { |
michael@0 | 900 | return true; |
michael@0 | 901 | } else { |
michael@0 | 902 | return false; |
michael@0 | 903 | } |
michael@0 | 904 | } |
michael@0 | 905 | |
michael@0 | 906 | /* Return block-length of 10 ms audio frame in number of samples */ |
michael@0 | 907 | unsigned int |
michael@0 | 908 | WebrtcAudioConduit::GetNum10msSamplesForFrequency(int samplingFreqHz) const |
michael@0 | 909 | { |
michael@0 | 910 | switch(samplingFreqHz) |
michael@0 | 911 | { |
michael@0 | 912 | case 16000: return 160; //160 samples |
michael@0 | 913 | case 32000: return 320; //320 samples |
michael@0 | 914 | case 44100: return 441; //441 samples |
michael@0 | 915 | case 48000: return 480; //480 samples |
michael@0 | 916 | default: return 0; // invalid or unsupported |
michael@0 | 917 | } |
michael@0 | 918 | } |
michael@0 | 919 | |
michael@0 | 920 | //Copy the codec passed into Conduit's database |
michael@0 | 921 | bool |
michael@0 | 922 | WebrtcAudioConduit::CopyCodecToDB(const AudioCodecConfig* codecInfo) |
michael@0 | 923 | { |
michael@0 | 924 | |
michael@0 | 925 | AudioCodecConfig* cdcConfig = new AudioCodecConfig(codecInfo->mType, |
michael@0 | 926 | codecInfo->mName, |
michael@0 | 927 | codecInfo->mFreq, |
michael@0 | 928 | codecInfo->mPacSize, |
michael@0 | 929 | codecInfo->mChannels, |
michael@0 | 930 | codecInfo->mRate, |
michael@0 | 931 | codecInfo->mLoadManager); |
michael@0 | 932 | mRecvCodecList.push_back(cdcConfig); |
michael@0 | 933 | return true; |
michael@0 | 934 | } |
michael@0 | 935 | |
michael@0 | 936 | /** |
michael@0 | 937 | * Checks if 2 codec structs are same |
michael@0 | 938 | */ |
michael@0 | 939 | bool |
michael@0 | 940 | WebrtcAudioConduit::CheckCodecsForMatch(const AudioCodecConfig* curCodecConfig, |
michael@0 | 941 | const AudioCodecConfig* codecInfo) const |
michael@0 | 942 | { |
michael@0 | 943 | if(!curCodecConfig) |
michael@0 | 944 | { |
michael@0 | 945 | return false; |
michael@0 | 946 | } |
michael@0 | 947 | |
michael@0 | 948 | if(curCodecConfig->mType == codecInfo->mType && |
michael@0 | 949 | (curCodecConfig->mName.compare(codecInfo->mName) == 0) && |
michael@0 | 950 | curCodecConfig->mFreq == codecInfo->mFreq && |
michael@0 | 951 | curCodecConfig->mPacSize == codecInfo->mPacSize && |
michael@0 | 952 | curCodecConfig->mChannels == codecInfo->mChannels && |
michael@0 | 953 | curCodecConfig->mRate == codecInfo->mRate) |
michael@0 | 954 | { |
michael@0 | 955 | return true; |
michael@0 | 956 | } |
michael@0 | 957 | |
michael@0 | 958 | return false; |
michael@0 | 959 | } |
michael@0 | 960 | |
michael@0 | 961 | /** |
michael@0 | 962 | * Checks if the codec is already in Conduit's database |
michael@0 | 963 | */ |
michael@0 | 964 | bool |
michael@0 | 965 | WebrtcAudioConduit::CheckCodecForMatch(const AudioCodecConfig* codecInfo) const |
michael@0 | 966 | { |
michael@0 | 967 | //the db should have atleast one codec |
michael@0 | 968 | for(std::vector<AudioCodecConfig*>::size_type i=0;i < mRecvCodecList.size();i++) |
michael@0 | 969 | { |
michael@0 | 970 | if(CheckCodecsForMatch(mRecvCodecList[i],codecInfo)) |
michael@0 | 971 | { |
michael@0 | 972 | //match |
michael@0 | 973 | return true; |
michael@0 | 974 | } |
michael@0 | 975 | } |
michael@0 | 976 | //no match or empty local db |
michael@0 | 977 | return false; |
michael@0 | 978 | } |
michael@0 | 979 | |
michael@0 | 980 | |
michael@0 | 981 | /** |
michael@0 | 982 | * Perform validation on the codecConfig to be applied. |
michael@0 | 983 | * Verifies if the codec is already applied. |
michael@0 | 984 | */ |
michael@0 | 985 | MediaConduitErrorCode |
michael@0 | 986 | WebrtcAudioConduit::ValidateCodecConfig(const AudioCodecConfig* codecInfo, |
michael@0 | 987 | bool send) const |
michael@0 | 988 | { |
michael@0 | 989 | bool codecAppliedAlready = false; |
michael@0 | 990 | |
michael@0 | 991 | if(!codecInfo) |
michael@0 | 992 | { |
michael@0 | 993 | CSFLogError(logTag, "%s Null CodecConfig ", __FUNCTION__); |
michael@0 | 994 | return kMediaConduitMalformedArgument; |
michael@0 | 995 | } |
michael@0 | 996 | |
michael@0 | 997 | if((codecInfo->mName.empty()) || |
michael@0 | 998 | (codecInfo->mName.length() >= CODEC_PLNAME_SIZE)) |
michael@0 | 999 | { |
michael@0 | 1000 | CSFLogError(logTag, "%s Invalid Payload Name Length ", __FUNCTION__); |
michael@0 | 1001 | return kMediaConduitMalformedArgument; |
michael@0 | 1002 | } |
michael@0 | 1003 | |
michael@0 | 1004 | //Only mono or stereo channels supported |
michael@0 | 1005 | if( (codecInfo->mChannels != 1) && (codecInfo->mChannels != 2)) |
michael@0 | 1006 | { |
michael@0 | 1007 | CSFLogError(logTag, "%s Channel Unsupported ", __FUNCTION__); |
michael@0 | 1008 | return kMediaConduitMalformedArgument; |
michael@0 | 1009 | } |
michael@0 | 1010 | |
michael@0 | 1011 | //check if we have the same codec already applied |
michael@0 | 1012 | if(send) |
michael@0 | 1013 | { |
michael@0 | 1014 | codecAppliedAlready = CheckCodecsForMatch(mCurSendCodecConfig,codecInfo); |
michael@0 | 1015 | } else { |
michael@0 | 1016 | codecAppliedAlready = CheckCodecForMatch(codecInfo); |
michael@0 | 1017 | } |
michael@0 | 1018 | |
michael@0 | 1019 | if(codecAppliedAlready) |
michael@0 | 1020 | { |
michael@0 | 1021 | CSFLogDebug(logTag, "%s Codec %s Already Applied ", __FUNCTION__, codecInfo->mName.c_str()); |
michael@0 | 1022 | return kMediaConduitCodecInUse; |
michael@0 | 1023 | } |
michael@0 | 1024 | return kMediaConduitNoError; |
michael@0 | 1025 | } |
michael@0 | 1026 | |
michael@0 | 1027 | void |
michael@0 | 1028 | WebrtcAudioConduit::DumpCodecDB() const |
michael@0 | 1029 | { |
michael@0 | 1030 | for(std::vector<AudioCodecConfig*>::size_type i=0;i < mRecvCodecList.size();i++) |
michael@0 | 1031 | { |
michael@0 | 1032 | CSFLogDebug(logTag,"Payload Name: %s", mRecvCodecList[i]->mName.c_str()); |
michael@0 | 1033 | CSFLogDebug(logTag,"Payload Type: %d", mRecvCodecList[i]->mType); |
michael@0 | 1034 | CSFLogDebug(logTag,"Payload Frequency: %d", mRecvCodecList[i]->mFreq); |
michael@0 | 1035 | CSFLogDebug(logTag,"Payload PacketSize: %d", mRecvCodecList[i]->mPacSize); |
michael@0 | 1036 | CSFLogDebug(logTag,"Payload Channels: %d", mRecvCodecList[i]->mChannels); |
michael@0 | 1037 | CSFLogDebug(logTag,"Payload Sampling Rate: %d", mRecvCodecList[i]->mRate); |
michael@0 | 1038 | } |
michael@0 | 1039 | } |
michael@0 | 1040 | }// end namespace |