Sat, 03 Jan 2015 20:18:00 +0100
Conditionally enable double key logic according to:
private browsing mode or privacy.thirdparty.isolate preference and
implement in GetCookieStringCommon and FindCookie where it counts...
With some reservations of how to convince FindCookie users to test
condition and pass a nullptr when disabling double key logic.
michael@0 | 1 | /* |
michael@0 | 2 | * Copyright 2013 Google Inc. |
michael@0 | 3 | * |
michael@0 | 4 | * Use of this source code is governed by a BSD-style license that can be |
michael@0 | 5 | * found in the LICENSE file. |
michael@0 | 6 | */ |
michael@0 | 7 | |
michael@0 | 8 | #ifndef SkAtomics_android_DEFINED |
michael@0 | 9 | #define SkAtomics_android_DEFINED |
michael@0 | 10 | |
michael@0 | 11 | /** Android framework atomics. */ |
michael@0 | 12 | |
michael@0 | 13 | #include <cutils/atomic.h> |
michael@0 | 14 | #include <stdint.h> |
michael@0 | 15 | |
michael@0 | 16 | static inline __attribute__((always_inline)) int32_t sk_atomic_inc(int32_t* addr) { |
michael@0 | 17 | return android_atomic_inc(addr); |
michael@0 | 18 | } |
michael@0 | 19 | |
michael@0 | 20 | static inline __attribute__((always_inline)) int32_t sk_atomic_add(int32_t* addr, int32_t inc) { |
michael@0 | 21 | return android_atomic_add(inc, addr); |
michael@0 | 22 | } |
michael@0 | 23 | |
michael@0 | 24 | static inline __attribute__((always_inline)) int32_t sk_atomic_dec(int32_t* addr) { |
michael@0 | 25 | return android_atomic_dec(addr); |
michael@0 | 26 | } |
michael@0 | 27 | |
michael@0 | 28 | static inline __attribute__((always_inline)) void sk_membar_acquire__after_atomic_dec() { |
michael@0 | 29 | //HACK: Android is actually using full memory barriers. |
michael@0 | 30 | // Should this change, uncomment below. |
michael@0 | 31 | //int dummy; |
michael@0 | 32 | //android_atomic_acquire_store(0, &dummy); |
michael@0 | 33 | } |
michael@0 | 34 | |
michael@0 | 35 | static inline __attribute__((always_inline)) int32_t sk_atomic_conditional_inc(int32_t* addr) { |
michael@0 | 36 | while (true) { |
michael@0 | 37 | int32_t value = *addr; |
michael@0 | 38 | if (value == 0) { |
michael@0 | 39 | return 0; |
michael@0 | 40 | } |
michael@0 | 41 | if (0 == android_atomic_release_cas(value, value + 1, addr)) { |
michael@0 | 42 | return value; |
michael@0 | 43 | } |
michael@0 | 44 | } |
michael@0 | 45 | } |
michael@0 | 46 | |
michael@0 | 47 | static inline __attribute__((always_inline)) bool sk_atomic_cas(int32_t* addr, |
michael@0 | 48 | int32_t before, |
michael@0 | 49 | int32_t after) { |
michael@0 | 50 | // android_atomic_release_cas returns 0 for success (if *addr == before and it wrote after). |
michael@0 | 51 | return android_atomic_release_cas(before, after, addr) == 0; |
michael@0 | 52 | } |
michael@0 | 53 | |
michael@0 | 54 | static inline __attribute__((always_inline)) void sk_membar_acquire__after_atomic_conditional_inc() { |
michael@0 | 55 | //HACK: Android is actually using full memory barriers. |
michael@0 | 56 | // Should this change, uncomment below. |
michael@0 | 57 | //int dummy; |
michael@0 | 58 | //android_atomic_acquire_store(0, &dummy); |
michael@0 | 59 | } |
michael@0 | 60 | |
michael@0 | 61 | #endif |