Tue, 06 Jan 2015 21:39:09 +0100
Conditionally force memory storage according to privacy.thirdparty.isolate;
This solves Tor bug #9701, complying with disk avoidance documented in
https://www.torproject.org/projects/torbrowser/design/#disk-avoidance.
michael@0 | 1 | /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ |
michael@0 | 2 | /* vim:set ts=2 sw=2 sts=2 et cindent: */ |
michael@0 | 3 | /* This Source Code Form is subject to the terms of the Mozilla Public |
michael@0 | 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this |
michael@0 | 5 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
michael@0 | 6 | |
michael@0 | 7 | #ifndef AudioEventTimeline_h_ |
michael@0 | 8 | #define AudioEventTimeline_h_ |
michael@0 | 9 | |
michael@0 | 10 | #include <algorithm> |
michael@0 | 11 | #include "mozilla/Assertions.h" |
michael@0 | 12 | #include "mozilla/FloatingPoint.h" |
michael@0 | 13 | #include "mozilla/TypedEnum.h" |
michael@0 | 14 | #include "mozilla/PodOperations.h" |
michael@0 | 15 | |
michael@0 | 16 | #include "nsTArray.h" |
michael@0 | 17 | #include "math.h" |
michael@0 | 18 | |
michael@0 | 19 | namespace mozilla { |
michael@0 | 20 | |
michael@0 | 21 | namespace dom { |
michael@0 | 22 | |
michael@0 | 23 | // This is an internal helper class and should not be used outside of this header. |
michael@0 | 24 | struct AudioTimelineEvent { |
michael@0 | 25 | enum Type MOZ_ENUM_TYPE(uint32_t) { |
michael@0 | 26 | SetValue, |
michael@0 | 27 | LinearRamp, |
michael@0 | 28 | ExponentialRamp, |
michael@0 | 29 | SetTarget, |
michael@0 | 30 | SetValueCurve |
michael@0 | 31 | }; |
michael@0 | 32 | |
michael@0 | 33 | AudioTimelineEvent(Type aType, double aTime, float aValue, double aTimeConstant = 0.0, |
michael@0 | 34 | float aDuration = 0.0, const float* aCurve = nullptr, uint32_t aCurveLength = 0) |
michael@0 | 35 | : mType(aType) |
michael@0 | 36 | , mTimeConstant(aTimeConstant) |
michael@0 | 37 | , mDuration(aDuration) |
michael@0 | 38 | #ifdef DEBUG |
michael@0 | 39 | , mTimeIsInTicks(false) |
michael@0 | 40 | #endif |
michael@0 | 41 | { |
michael@0 | 42 | mTime = aTime; |
michael@0 | 43 | if (aType == AudioTimelineEvent::SetValueCurve) { |
michael@0 | 44 | SetCurveParams(aCurve, aCurveLength); |
michael@0 | 45 | } else { |
michael@0 | 46 | mValue = aValue; |
michael@0 | 47 | } |
michael@0 | 48 | } |
michael@0 | 49 | |
michael@0 | 50 | AudioTimelineEvent(const AudioTimelineEvent& rhs) |
michael@0 | 51 | { |
michael@0 | 52 | PodCopy(this, &rhs, 1); |
michael@0 | 53 | if (rhs.mType == AudioTimelineEvent::SetValueCurve) { |
michael@0 | 54 | SetCurveParams(rhs.mCurve, rhs.mCurveLength); |
michael@0 | 55 | } |
michael@0 | 56 | } |
michael@0 | 57 | |
michael@0 | 58 | ~AudioTimelineEvent() |
michael@0 | 59 | { |
michael@0 | 60 | if (mType == AudioTimelineEvent::SetValueCurve) { |
michael@0 | 61 | delete[] mCurve; |
michael@0 | 62 | } |
michael@0 | 63 | } |
michael@0 | 64 | |
michael@0 | 65 | bool IsValid() const |
michael@0 | 66 | { |
michael@0 | 67 | if (mType == AudioTimelineEvent::SetValueCurve) { |
michael@0 | 68 | if (!mCurve || !mCurveLength) { |
michael@0 | 69 | return false; |
michael@0 | 70 | } |
michael@0 | 71 | for (uint32_t i = 0; i < mCurveLength; ++i) { |
michael@0 | 72 | if (!IsValid(mCurve[i])) { |
michael@0 | 73 | return false; |
michael@0 | 74 | } |
michael@0 | 75 | } |
michael@0 | 76 | } |
michael@0 | 77 | |
michael@0 | 78 | return IsValid(mTime) && |
michael@0 | 79 | IsValid(mValue) && |
michael@0 | 80 | IsValid(mTimeConstant) && |
michael@0 | 81 | IsValid(mDuration); |
michael@0 | 82 | } |
michael@0 | 83 | |
michael@0 | 84 | template <class TimeType> |
michael@0 | 85 | TimeType Time() const; |
michael@0 | 86 | |
michael@0 | 87 | void SetTimeInTicks(int64_t aTimeInTicks) |
michael@0 | 88 | { |
michael@0 | 89 | mTimeInTicks = aTimeInTicks; |
michael@0 | 90 | #ifdef DEBUG |
michael@0 | 91 | mTimeIsInTicks = true; |
michael@0 | 92 | #endif |
michael@0 | 93 | } |
michael@0 | 94 | |
michael@0 | 95 | void SetCurveParams(const float* aCurve, uint32_t aCurveLength) { |
michael@0 | 96 | mCurveLength = aCurveLength; |
michael@0 | 97 | if (aCurveLength) { |
michael@0 | 98 | mCurve = new float[aCurveLength]; |
michael@0 | 99 | PodCopy(mCurve, aCurve, aCurveLength); |
michael@0 | 100 | } else { |
michael@0 | 101 | mCurve = nullptr; |
michael@0 | 102 | } |
michael@0 | 103 | } |
michael@0 | 104 | |
michael@0 | 105 | Type mType; |
michael@0 | 106 | union { |
michael@0 | 107 | float mValue; |
michael@0 | 108 | uint32_t mCurveLength; |
michael@0 | 109 | }; |
michael@0 | 110 | // The time for an event can either be in absolute value or in ticks. |
michael@0 | 111 | // Initially the time of the event is always in absolute value. |
michael@0 | 112 | // In order to convert it to ticks, call SetTimeInTicks. Once this |
michael@0 | 113 | // method has been called for an event, the time cannot be converted |
michael@0 | 114 | // back to absolute value. |
michael@0 | 115 | union { |
michael@0 | 116 | double mTime; |
michael@0 | 117 | int64_t mTimeInTicks; |
michael@0 | 118 | }; |
michael@0 | 119 | // mCurve contains a buffer of SetValueCurve samples. We sample the |
michael@0 | 120 | // values in the buffer depending on how far along we are in time. |
michael@0 | 121 | // If we're at time T and the event has started as time T0 and has a |
michael@0 | 122 | // duration of D, we sample the buffer at floor(mCurveLength*(T-T0)/D) |
michael@0 | 123 | // if T<T0+D, and just take the last sample in the buffer otherwise. |
michael@0 | 124 | float* mCurve; |
michael@0 | 125 | double mTimeConstant; |
michael@0 | 126 | double mDuration; |
michael@0 | 127 | #ifdef DEBUG |
michael@0 | 128 | bool mTimeIsInTicks; |
michael@0 | 129 | #endif |
michael@0 | 130 | |
michael@0 | 131 | private: |
michael@0 | 132 | static bool IsValid(double value) |
michael@0 | 133 | { |
michael@0 | 134 | return mozilla::IsFinite(value); |
michael@0 | 135 | } |
michael@0 | 136 | }; |
michael@0 | 137 | |
michael@0 | 138 | template <> |
michael@0 | 139 | inline double AudioTimelineEvent::Time<double>() const |
michael@0 | 140 | { |
michael@0 | 141 | MOZ_ASSERT(!mTimeIsInTicks); |
michael@0 | 142 | return mTime; |
michael@0 | 143 | } |
michael@0 | 144 | |
michael@0 | 145 | template <> |
michael@0 | 146 | inline int64_t AudioTimelineEvent::Time<int64_t>() const |
michael@0 | 147 | { |
michael@0 | 148 | MOZ_ASSERT(mTimeIsInTicks); |
michael@0 | 149 | return mTimeInTicks; |
michael@0 | 150 | } |
michael@0 | 151 | |
michael@0 | 152 | /** |
michael@0 | 153 | * This class will be instantiated with different template arguments for testing and |
michael@0 | 154 | * production code. |
michael@0 | 155 | * |
michael@0 | 156 | * ErrorResult is a type which satisfies the following: |
michael@0 | 157 | * - Implements a Throw() method taking an nsresult argument, representing an error code. |
michael@0 | 158 | */ |
michael@0 | 159 | template <class ErrorResult> |
michael@0 | 160 | class AudioEventTimeline |
michael@0 | 161 | { |
michael@0 | 162 | public: |
michael@0 | 163 | explicit AudioEventTimeline(float aDefaultValue) |
michael@0 | 164 | : mValue(aDefaultValue), |
michael@0 | 165 | mComputedValue(aDefaultValue), |
michael@0 | 166 | mLastComputedValue(aDefaultValue) |
michael@0 | 167 | { |
michael@0 | 168 | } |
michael@0 | 169 | |
michael@0 | 170 | bool HasSimpleValue() const |
michael@0 | 171 | { |
michael@0 | 172 | return mEvents.IsEmpty(); |
michael@0 | 173 | } |
michael@0 | 174 | |
michael@0 | 175 | float GetValue() const |
michael@0 | 176 | { |
michael@0 | 177 | // This method should only be called if HasSimpleValue() returns true |
michael@0 | 178 | MOZ_ASSERT(HasSimpleValue()); |
michael@0 | 179 | return mValue; |
michael@0 | 180 | } |
michael@0 | 181 | |
michael@0 | 182 | float Value() const |
michael@0 | 183 | { |
michael@0 | 184 | // TODO: Return the current value based on the timeline of the AudioContext |
michael@0 | 185 | return mValue; |
michael@0 | 186 | } |
michael@0 | 187 | |
michael@0 | 188 | void SetValue(float aValue) |
michael@0 | 189 | { |
michael@0 | 190 | // Silently don't change anything if there are any events |
michael@0 | 191 | if (mEvents.IsEmpty()) { |
michael@0 | 192 | mLastComputedValue = mComputedValue = mValue = aValue; |
michael@0 | 193 | } |
michael@0 | 194 | } |
michael@0 | 195 | |
michael@0 | 196 | void SetValueAtTime(float aValue, double aStartTime, ErrorResult& aRv) |
michael@0 | 197 | { |
michael@0 | 198 | InsertEvent(AudioTimelineEvent(AudioTimelineEvent::SetValue, aStartTime, aValue), aRv); |
michael@0 | 199 | } |
michael@0 | 200 | |
michael@0 | 201 | void LinearRampToValueAtTime(float aValue, double aEndTime, ErrorResult& aRv) |
michael@0 | 202 | { |
michael@0 | 203 | InsertEvent(AudioTimelineEvent(AudioTimelineEvent::LinearRamp, aEndTime, aValue), aRv); |
michael@0 | 204 | } |
michael@0 | 205 | |
michael@0 | 206 | void ExponentialRampToValueAtTime(float aValue, double aEndTime, ErrorResult& aRv) |
michael@0 | 207 | { |
michael@0 | 208 | InsertEvent(AudioTimelineEvent(AudioTimelineEvent::ExponentialRamp, aEndTime, aValue), aRv); |
michael@0 | 209 | } |
michael@0 | 210 | |
michael@0 | 211 | void SetTargetAtTime(float aTarget, double aStartTime, double aTimeConstant, ErrorResult& aRv) |
michael@0 | 212 | { |
michael@0 | 213 | InsertEvent(AudioTimelineEvent(AudioTimelineEvent::SetTarget, aStartTime, aTarget, aTimeConstant), aRv); |
michael@0 | 214 | } |
michael@0 | 215 | |
michael@0 | 216 | void SetValueCurveAtTime(const float* aValues, uint32_t aValuesLength, double aStartTime, double aDuration, ErrorResult& aRv) |
michael@0 | 217 | { |
michael@0 | 218 | InsertEvent(AudioTimelineEvent(AudioTimelineEvent::SetValueCurve, aStartTime, 0.0f, 0.0f, aDuration, aValues, aValuesLength), aRv); |
michael@0 | 219 | } |
michael@0 | 220 | |
michael@0 | 221 | void CancelScheduledValues(double aStartTime) |
michael@0 | 222 | { |
michael@0 | 223 | for (unsigned i = 0; i < mEvents.Length(); ++i) { |
michael@0 | 224 | if (mEvents[i].mTime >= aStartTime) { |
michael@0 | 225 | #ifdef DEBUG |
michael@0 | 226 | // Sanity check: the array should be sorted, so all of the following |
michael@0 | 227 | // events should have a time greater than aStartTime too. |
michael@0 | 228 | for (unsigned j = i + 1; j < mEvents.Length(); ++j) { |
michael@0 | 229 | MOZ_ASSERT(mEvents[j].mTime >= aStartTime); |
michael@0 | 230 | } |
michael@0 | 231 | #endif |
michael@0 | 232 | mEvents.TruncateLength(i); |
michael@0 | 233 | break; |
michael@0 | 234 | } |
michael@0 | 235 | } |
michael@0 | 236 | } |
michael@0 | 237 | |
michael@0 | 238 | void CancelAllEvents() |
michael@0 | 239 | { |
michael@0 | 240 | mEvents.Clear(); |
michael@0 | 241 | } |
michael@0 | 242 | |
michael@0 | 243 | static bool TimesEqual(int64_t aLhs, int64_t aRhs) |
michael@0 | 244 | { |
michael@0 | 245 | return aLhs == aRhs; |
michael@0 | 246 | } |
michael@0 | 247 | |
michael@0 | 248 | // Since we are going to accumulate error by adding 0.01 multiple time in a |
michael@0 | 249 | // loop, we want to fuzz the equality check in GetValueAtTime. |
michael@0 | 250 | static bool TimesEqual(double aLhs, double aRhs) |
michael@0 | 251 | { |
michael@0 | 252 | const float kEpsilon = 0.0000000001f; |
michael@0 | 253 | return fabs(aLhs - aRhs) < kEpsilon; |
michael@0 | 254 | } |
michael@0 | 255 | |
michael@0 | 256 | template<class TimeType> |
michael@0 | 257 | float GetValueAtTime(TimeType aTime) |
michael@0 | 258 | { |
michael@0 | 259 | mComputedValue = GetValueAtTimeHelper(aTime); |
michael@0 | 260 | return mComputedValue; |
michael@0 | 261 | } |
michael@0 | 262 | |
michael@0 | 263 | // This method computes the AudioParam value at a given time based on the event timeline |
michael@0 | 264 | template<class TimeType> |
michael@0 | 265 | float GetValueAtTimeHelper(TimeType aTime) |
michael@0 | 266 | { |
michael@0 | 267 | const AudioTimelineEvent* previous = nullptr; |
michael@0 | 268 | const AudioTimelineEvent* next = nullptr; |
michael@0 | 269 | |
michael@0 | 270 | bool bailOut = false; |
michael@0 | 271 | for (unsigned i = 0; !bailOut && i < mEvents.Length(); ++i) { |
michael@0 | 272 | switch (mEvents[i].mType) { |
michael@0 | 273 | case AudioTimelineEvent::SetValue: |
michael@0 | 274 | case AudioTimelineEvent::SetTarget: |
michael@0 | 275 | case AudioTimelineEvent::LinearRamp: |
michael@0 | 276 | case AudioTimelineEvent::ExponentialRamp: |
michael@0 | 277 | case AudioTimelineEvent::SetValueCurve: |
michael@0 | 278 | if (TimesEqual(aTime, mEvents[i].template Time<TimeType>())) { |
michael@0 | 279 | mLastComputedValue = mComputedValue; |
michael@0 | 280 | // Find the last event with the same time |
michael@0 | 281 | do { |
michael@0 | 282 | ++i; |
michael@0 | 283 | } while (i < mEvents.Length() && |
michael@0 | 284 | aTime == mEvents[i].template Time<TimeType>()); |
michael@0 | 285 | |
michael@0 | 286 | // SetTarget nodes can be handled no matter what their next node is (if they have one) |
michael@0 | 287 | if (mEvents[i - 1].mType == AudioTimelineEvent::SetTarget) { |
michael@0 | 288 | // Follow the curve, without regard to the next event, starting at |
michael@0 | 289 | // the last value of the last event. |
michael@0 | 290 | return ExponentialApproach(mEvents[i - 1].template Time<TimeType>(), |
michael@0 | 291 | mLastComputedValue, mEvents[i - 1].mValue, |
michael@0 | 292 | mEvents[i - 1].mTimeConstant, aTime); |
michael@0 | 293 | } |
michael@0 | 294 | |
michael@0 | 295 | // SetValueCurve events can be handled no matter what their event node is (if they have one) |
michael@0 | 296 | if (mEvents[i - 1].mType == AudioTimelineEvent::SetValueCurve) { |
michael@0 | 297 | return ExtractValueFromCurve(mEvents[i - 1].template Time<TimeType>(), |
michael@0 | 298 | mEvents[i - 1].mCurve, |
michael@0 | 299 | mEvents[i - 1].mCurveLength, |
michael@0 | 300 | mEvents[i - 1].mDuration, aTime); |
michael@0 | 301 | } |
michael@0 | 302 | |
michael@0 | 303 | // For other event types |
michael@0 | 304 | return mEvents[i - 1].mValue; |
michael@0 | 305 | } |
michael@0 | 306 | previous = next; |
michael@0 | 307 | next = &mEvents[i]; |
michael@0 | 308 | if (aTime < mEvents[i].template Time<TimeType>()) { |
michael@0 | 309 | bailOut = true; |
michael@0 | 310 | } |
michael@0 | 311 | break; |
michael@0 | 312 | default: |
michael@0 | 313 | MOZ_ASSERT(false, "unreached"); |
michael@0 | 314 | } |
michael@0 | 315 | } |
michael@0 | 316 | // Handle the case where the time is past all of the events |
michael@0 | 317 | if (!bailOut) { |
michael@0 | 318 | previous = next; |
michael@0 | 319 | next = nullptr; |
michael@0 | 320 | } |
michael@0 | 321 | |
michael@0 | 322 | // Just return the default value if we did not find anything |
michael@0 | 323 | if (!previous && !next) { |
michael@0 | 324 | return mValue; |
michael@0 | 325 | } |
michael@0 | 326 | |
michael@0 | 327 | // If the requested time is before all of the existing events |
michael@0 | 328 | if (!previous) { |
michael@0 | 329 | return mValue; |
michael@0 | 330 | } |
michael@0 | 331 | |
michael@0 | 332 | // SetTarget nodes can be handled no matter what their next node is (if they have one) |
michael@0 | 333 | if (previous->mType == AudioTimelineEvent::SetTarget) { |
michael@0 | 334 | return ExponentialApproach(previous->template Time<TimeType>(), |
michael@0 | 335 | mLastComputedValue, previous->mValue, |
michael@0 | 336 | previous->mTimeConstant, aTime); |
michael@0 | 337 | } |
michael@0 | 338 | |
michael@0 | 339 | // SetValueCurve events can be handled no mattar what their next node is (if they have one) |
michael@0 | 340 | if (previous->mType == AudioTimelineEvent::SetValueCurve) { |
michael@0 | 341 | return ExtractValueFromCurve(previous->template Time<TimeType>(), |
michael@0 | 342 | previous->mCurve, previous->mCurveLength, |
michael@0 | 343 | previous->mDuration, aTime); |
michael@0 | 344 | } |
michael@0 | 345 | |
michael@0 | 346 | // If the requested time is after all of the existing events |
michael@0 | 347 | if (!next) { |
michael@0 | 348 | switch (previous->mType) { |
michael@0 | 349 | case AudioTimelineEvent::SetValue: |
michael@0 | 350 | case AudioTimelineEvent::LinearRamp: |
michael@0 | 351 | case AudioTimelineEvent::ExponentialRamp: |
michael@0 | 352 | // The value will be constant after the last event |
michael@0 | 353 | return previous->mValue; |
michael@0 | 354 | case AudioTimelineEvent::SetValueCurve: |
michael@0 | 355 | return ExtractValueFromCurve(previous->template Time<TimeType>(), |
michael@0 | 356 | previous->mCurve, previous->mCurveLength, |
michael@0 | 357 | previous->mDuration, aTime); |
michael@0 | 358 | case AudioTimelineEvent::SetTarget: |
michael@0 | 359 | MOZ_ASSERT(false, "unreached"); |
michael@0 | 360 | } |
michael@0 | 361 | MOZ_ASSERT(false, "unreached"); |
michael@0 | 362 | } |
michael@0 | 363 | |
michael@0 | 364 | // Finally, handle the case where we have both a previous and a next event |
michael@0 | 365 | |
michael@0 | 366 | // First, handle the case where our range ends up in a ramp event |
michael@0 | 367 | switch (next->mType) { |
michael@0 | 368 | case AudioTimelineEvent::LinearRamp: |
michael@0 | 369 | return LinearInterpolate(previous->template Time<TimeType>(), previous->mValue, next->template Time<TimeType>(), next->mValue, aTime); |
michael@0 | 370 | case AudioTimelineEvent::ExponentialRamp: |
michael@0 | 371 | return ExponentialInterpolate(previous->template Time<TimeType>(), previous->mValue, next->template Time<TimeType>(), next->mValue, aTime); |
michael@0 | 372 | case AudioTimelineEvent::SetValue: |
michael@0 | 373 | case AudioTimelineEvent::SetTarget: |
michael@0 | 374 | case AudioTimelineEvent::SetValueCurve: |
michael@0 | 375 | break; |
michael@0 | 376 | } |
michael@0 | 377 | |
michael@0 | 378 | // Now handle all other cases |
michael@0 | 379 | switch (previous->mType) { |
michael@0 | 380 | case AudioTimelineEvent::SetValue: |
michael@0 | 381 | case AudioTimelineEvent::LinearRamp: |
michael@0 | 382 | case AudioTimelineEvent::ExponentialRamp: |
michael@0 | 383 | // If the next event type is neither linear or exponential ramp, the |
michael@0 | 384 | // value is constant. |
michael@0 | 385 | return previous->mValue; |
michael@0 | 386 | case AudioTimelineEvent::SetValueCurve: |
michael@0 | 387 | return ExtractValueFromCurve(previous->template Time<TimeType>(), |
michael@0 | 388 | previous->mCurve, previous->mCurveLength, |
michael@0 | 389 | previous->mDuration, aTime); |
michael@0 | 390 | case AudioTimelineEvent::SetTarget: |
michael@0 | 391 | MOZ_ASSERT(false, "unreached"); |
michael@0 | 392 | } |
michael@0 | 393 | |
michael@0 | 394 | MOZ_ASSERT(false, "unreached"); |
michael@0 | 395 | return 0.0f; |
michael@0 | 396 | } |
michael@0 | 397 | |
michael@0 | 398 | // Return the number of events scheduled |
michael@0 | 399 | uint32_t GetEventCount() const |
michael@0 | 400 | { |
michael@0 | 401 | return mEvents.Length(); |
michael@0 | 402 | } |
michael@0 | 403 | |
michael@0 | 404 | static float LinearInterpolate(double t0, float v0, double t1, float v1, double t) |
michael@0 | 405 | { |
michael@0 | 406 | return v0 + (v1 - v0) * ((t - t0) / (t1 - t0)); |
michael@0 | 407 | } |
michael@0 | 408 | |
michael@0 | 409 | static float ExponentialInterpolate(double t0, float v0, double t1, float v1, double t) |
michael@0 | 410 | { |
michael@0 | 411 | return v0 * powf(v1 / v0, (t - t0) / (t1 - t0)); |
michael@0 | 412 | } |
michael@0 | 413 | |
michael@0 | 414 | static float ExponentialApproach(double t0, double v0, float v1, double timeConstant, double t) |
michael@0 | 415 | { |
michael@0 | 416 | return v1 + (v0 - v1) * expf(-(t - t0) / timeConstant); |
michael@0 | 417 | } |
michael@0 | 418 | |
michael@0 | 419 | static float ExtractValueFromCurve(double startTime, float* aCurve, uint32_t aCurveLength, double duration, double t) |
michael@0 | 420 | { |
michael@0 | 421 | if (t >= startTime + duration) { |
michael@0 | 422 | // After the duration, return the last curve value |
michael@0 | 423 | return aCurve[aCurveLength - 1]; |
michael@0 | 424 | } |
michael@0 | 425 | double ratio = std::max((t - startTime) / duration, 0.0); |
michael@0 | 426 | if (ratio >= 1.0) { |
michael@0 | 427 | return aCurve[aCurveLength - 1]; |
michael@0 | 428 | } |
michael@0 | 429 | return aCurve[uint32_t(aCurveLength * ratio)]; |
michael@0 | 430 | } |
michael@0 | 431 | |
michael@0 | 432 | void ConvertEventTimesToTicks(int64_t (*aConvertor)(double aTime, void* aClosure), void* aClosure, |
michael@0 | 433 | int32_t aSampleRate) |
michael@0 | 434 | { |
michael@0 | 435 | for (unsigned i = 0; i < mEvents.Length(); ++i) { |
michael@0 | 436 | mEvents[i].SetTimeInTicks(aConvertor(mEvents[i].template Time<double>(), aClosure)); |
michael@0 | 437 | mEvents[i].mTimeConstant *= aSampleRate; |
michael@0 | 438 | mEvents[i].mDuration *= aSampleRate; |
michael@0 | 439 | } |
michael@0 | 440 | } |
michael@0 | 441 | |
michael@0 | 442 | private: |
michael@0 | 443 | const AudioTimelineEvent* GetPreviousEvent(double aTime) const |
michael@0 | 444 | { |
michael@0 | 445 | const AudioTimelineEvent* previous = nullptr; |
michael@0 | 446 | const AudioTimelineEvent* next = nullptr; |
michael@0 | 447 | |
michael@0 | 448 | bool bailOut = false; |
michael@0 | 449 | for (unsigned i = 0; !bailOut && i < mEvents.Length(); ++i) { |
michael@0 | 450 | switch (mEvents[i].mType) { |
michael@0 | 451 | case AudioTimelineEvent::SetValue: |
michael@0 | 452 | case AudioTimelineEvent::SetTarget: |
michael@0 | 453 | case AudioTimelineEvent::LinearRamp: |
michael@0 | 454 | case AudioTimelineEvent::ExponentialRamp: |
michael@0 | 455 | case AudioTimelineEvent::SetValueCurve: |
michael@0 | 456 | if (aTime == mEvents[i].mTime) { |
michael@0 | 457 | // Find the last event with the same time |
michael@0 | 458 | do { |
michael@0 | 459 | ++i; |
michael@0 | 460 | } while (i < mEvents.Length() && |
michael@0 | 461 | aTime == mEvents[i].mTime); |
michael@0 | 462 | return &mEvents[i - 1]; |
michael@0 | 463 | } |
michael@0 | 464 | previous = next; |
michael@0 | 465 | next = &mEvents[i]; |
michael@0 | 466 | if (aTime < mEvents[i].mTime) { |
michael@0 | 467 | bailOut = true; |
michael@0 | 468 | } |
michael@0 | 469 | break; |
michael@0 | 470 | default: |
michael@0 | 471 | MOZ_ASSERT(false, "unreached"); |
michael@0 | 472 | } |
michael@0 | 473 | } |
michael@0 | 474 | // Handle the case where the time is past all of the events |
michael@0 | 475 | if (!bailOut) { |
michael@0 | 476 | previous = next; |
michael@0 | 477 | } |
michael@0 | 478 | |
michael@0 | 479 | return previous; |
michael@0 | 480 | } |
michael@0 | 481 | |
michael@0 | 482 | void InsertEvent(const AudioTimelineEvent& aEvent, ErrorResult& aRv) |
michael@0 | 483 | { |
michael@0 | 484 | if (!aEvent.IsValid()) { |
michael@0 | 485 | aRv.Throw(NS_ERROR_DOM_SYNTAX_ERR); |
michael@0 | 486 | return; |
michael@0 | 487 | } |
michael@0 | 488 | |
michael@0 | 489 | // Make sure that non-curve events don't fall within the duration of a |
michael@0 | 490 | // curve event. |
michael@0 | 491 | for (unsigned i = 0; i < mEvents.Length(); ++i) { |
michael@0 | 492 | if (mEvents[i].mType == AudioTimelineEvent::SetValueCurve && |
michael@0 | 493 | mEvents[i].mTime <= aEvent.mTime && |
michael@0 | 494 | (mEvents[i].mTime + mEvents[i].mDuration) >= aEvent.mTime) { |
michael@0 | 495 | aRv.Throw(NS_ERROR_DOM_SYNTAX_ERR); |
michael@0 | 496 | return; |
michael@0 | 497 | } |
michael@0 | 498 | } |
michael@0 | 499 | |
michael@0 | 500 | // Make sure that curve events don't fall in a range which includes other |
michael@0 | 501 | // events. |
michael@0 | 502 | if (aEvent.mType == AudioTimelineEvent::SetValueCurve) { |
michael@0 | 503 | for (unsigned i = 0; i < mEvents.Length(); ++i) { |
michael@0 | 504 | if (mEvents[i].mTime > aEvent.mTime && |
michael@0 | 505 | mEvents[i].mTime < (aEvent.mTime + aEvent.mDuration)) { |
michael@0 | 506 | aRv.Throw(NS_ERROR_DOM_SYNTAX_ERR); |
michael@0 | 507 | return; |
michael@0 | 508 | } |
michael@0 | 509 | } |
michael@0 | 510 | } |
michael@0 | 511 | |
michael@0 | 512 | // Make sure that invalid values are not used for exponential curves |
michael@0 | 513 | if (aEvent.mType == AudioTimelineEvent::ExponentialRamp) { |
michael@0 | 514 | if (aEvent.mValue <= 0.f) { |
michael@0 | 515 | aRv.Throw(NS_ERROR_DOM_SYNTAX_ERR); |
michael@0 | 516 | return; |
michael@0 | 517 | } |
michael@0 | 518 | const AudioTimelineEvent* previousEvent = GetPreviousEvent(aEvent.mTime); |
michael@0 | 519 | if (previousEvent) { |
michael@0 | 520 | if (previousEvent->mValue <= 0.f) { |
michael@0 | 521 | aRv.Throw(NS_ERROR_DOM_SYNTAX_ERR); |
michael@0 | 522 | return; |
michael@0 | 523 | } |
michael@0 | 524 | } else { |
michael@0 | 525 | if (mValue <= 0.f) { |
michael@0 | 526 | aRv.Throw(NS_ERROR_DOM_SYNTAX_ERR); |
michael@0 | 527 | return; |
michael@0 | 528 | } |
michael@0 | 529 | } |
michael@0 | 530 | } |
michael@0 | 531 | |
michael@0 | 532 | for (unsigned i = 0; i < mEvents.Length(); ++i) { |
michael@0 | 533 | if (aEvent.mTime == mEvents[i].mTime) { |
michael@0 | 534 | if (aEvent.mType == mEvents[i].mType) { |
michael@0 | 535 | // If times and types are equal, replace the event |
michael@0 | 536 | mEvents.ReplaceElementAt(i, aEvent); |
michael@0 | 537 | } else { |
michael@0 | 538 | // Otherwise, place the element after the last event of another type |
michael@0 | 539 | do { |
michael@0 | 540 | ++i; |
michael@0 | 541 | } while (i < mEvents.Length() && |
michael@0 | 542 | aEvent.mType != mEvents[i].mType && |
michael@0 | 543 | aEvent.mTime == mEvents[i].mTime); |
michael@0 | 544 | mEvents.InsertElementAt(i, aEvent); |
michael@0 | 545 | } |
michael@0 | 546 | return; |
michael@0 | 547 | } |
michael@0 | 548 | // Otherwise, place the event right after the latest existing event |
michael@0 | 549 | if (aEvent.mTime < mEvents[i].mTime) { |
michael@0 | 550 | mEvents.InsertElementAt(i, aEvent); |
michael@0 | 551 | return; |
michael@0 | 552 | } |
michael@0 | 553 | } |
michael@0 | 554 | |
michael@0 | 555 | // If we couldn't find a place for the event, just append it to the list |
michael@0 | 556 | mEvents.AppendElement(aEvent); |
michael@0 | 557 | } |
michael@0 | 558 | |
michael@0 | 559 | private: |
michael@0 | 560 | // This is a sorted array of the events in the timeline. Queries of this |
michael@0 | 561 | // data structure should probably be more frequent than modifications to it, |
michael@0 | 562 | // and that is the reason why we're using a simple array as the data structure. |
michael@0 | 563 | // We can optimize this in the future if the performance of the array ends up |
michael@0 | 564 | // being a bottleneck. |
michael@0 | 565 | nsTArray<AudioTimelineEvent> mEvents; |
michael@0 | 566 | float mValue; |
michael@0 | 567 | // This is the value of this AudioParam we computed at the last call. |
michael@0 | 568 | float mComputedValue; |
michael@0 | 569 | // This is the value of this AudioParam at the last tick of the previous event. |
michael@0 | 570 | float mLastComputedValue; |
michael@0 | 571 | }; |
michael@0 | 572 | |
michael@0 | 573 | } |
michael@0 | 574 | } |
michael@0 | 575 | |
michael@0 | 576 | #endif |
michael@0 | 577 |