ipc/chromium/src/base/atomicops_internals_x86_macosx.h

Wed, 31 Dec 2014 06:09:35 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Wed, 31 Dec 2014 06:09:35 +0100
changeset 0
6474c204b198
permissions
-rw-r--r--

Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.

michael@0 1 // Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
michael@0 2 // Use of this source code is governed by a BSD-style license that can be
michael@0 3 // found in the LICENSE file.
michael@0 4
michael@0 5 // This file is an internal atomic implementation, use base/atomicops.h instead.
michael@0 6
michael@0 7 #ifndef BASE_ATOMICOPS_INTERNALS_X86_MACOSX_H_
michael@0 8 #define BASE_ATOMICOPS_INTERNALS_X86_MACOSX_H_
michael@0 9
michael@0 10 #include <libkern/OSAtomic.h>
michael@0 11
michael@0 12 namespace base {
michael@0 13 namespace subtle {
michael@0 14
michael@0 15 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr,
michael@0 16 Atomic32 old_value,
michael@0 17 Atomic32 new_value) {
michael@0 18 Atomic32 prev_value;
michael@0 19 do {
michael@0 20 if (OSAtomicCompareAndSwap32(old_value, new_value,
michael@0 21 const_cast<Atomic32*>(ptr))) {
michael@0 22 return old_value;
michael@0 23 }
michael@0 24 prev_value = *ptr;
michael@0 25 } while (prev_value == old_value);
michael@0 26 return prev_value;
michael@0 27 }
michael@0 28
michael@0 29 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32 *ptr,
michael@0 30 Atomic32 new_value) {
michael@0 31 Atomic32 old_value;
michael@0 32 do {
michael@0 33 old_value = *ptr;
michael@0 34 } while (!OSAtomicCompareAndSwap32(old_value, new_value,
michael@0 35 const_cast<Atomic32*>(ptr)));
michael@0 36 return old_value;
michael@0 37 }
michael@0 38
michael@0 39 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32 *ptr,
michael@0 40 Atomic32 increment) {
michael@0 41 return OSAtomicAdd32(increment, const_cast<Atomic32*>(ptr));
michael@0 42 }
michael@0 43
michael@0 44 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32 *ptr,
michael@0 45 Atomic32 increment) {
michael@0 46 return OSAtomicAdd32Barrier(increment, const_cast<Atomic32*>(ptr));
michael@0 47 }
michael@0 48
michael@0 49 inline void MemoryBarrier() {
michael@0 50 OSMemoryBarrier();
michael@0 51 }
michael@0 52
michael@0 53 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32 *ptr,
michael@0 54 Atomic32 old_value,
michael@0 55 Atomic32 new_value) {
michael@0 56 Atomic32 prev_value;
michael@0 57 do {
michael@0 58 if (OSAtomicCompareAndSwap32Barrier(old_value, new_value,
michael@0 59 const_cast<Atomic32*>(ptr))) {
michael@0 60 return old_value;
michael@0 61 }
michael@0 62 prev_value = *ptr;
michael@0 63 } while (prev_value == old_value);
michael@0 64 return prev_value;
michael@0 65 }
michael@0 66
michael@0 67 inline Atomic32 Release_CompareAndSwap(volatile Atomic32 *ptr,
michael@0 68 Atomic32 old_value,
michael@0 69 Atomic32 new_value) {
michael@0 70 return Acquire_CompareAndSwap(ptr, old_value, new_value);
michael@0 71 }
michael@0 72
michael@0 73 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
michael@0 74 *ptr = value;
michael@0 75 }
michael@0 76
michael@0 77 inline void Acquire_Store(volatile Atomic32 *ptr, Atomic32 value) {
michael@0 78 *ptr = value;
michael@0 79 MemoryBarrier();
michael@0 80 }
michael@0 81
michael@0 82 inline void Release_Store(volatile Atomic32 *ptr, Atomic32 value) {
michael@0 83 MemoryBarrier();
michael@0 84 *ptr = value;
michael@0 85 }
michael@0 86
michael@0 87 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
michael@0 88 return *ptr;
michael@0 89 }
michael@0 90
michael@0 91 inline Atomic32 Acquire_Load(volatile const Atomic32 *ptr) {
michael@0 92 Atomic32 value = *ptr;
michael@0 93 MemoryBarrier();
michael@0 94 return value;
michael@0 95 }
michael@0 96
michael@0 97 inline Atomic32 Release_Load(volatile const Atomic32 *ptr) {
michael@0 98 MemoryBarrier();
michael@0 99 return *ptr;
michael@0 100 }
michael@0 101
michael@0 102 #ifdef __LP64__
michael@0 103
michael@0 104 // 64-bit implementation on 64-bit platform
michael@0 105
michael@0 106 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64 *ptr,
michael@0 107 Atomic64 old_value,
michael@0 108 Atomic64 new_value) {
michael@0 109 Atomic64 prev_value;
michael@0 110 do {
michael@0 111 if (OSAtomicCompareAndSwap64(old_value, new_value,
michael@0 112 const_cast<Atomic64*>(ptr))) {
michael@0 113 return old_value;
michael@0 114 }
michael@0 115 prev_value = *ptr;
michael@0 116 } while (prev_value == old_value);
michael@0 117 return prev_value;
michael@0 118 }
michael@0 119
michael@0 120 inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64 *ptr,
michael@0 121 Atomic64 new_value) {
michael@0 122 Atomic64 old_value;
michael@0 123 do {
michael@0 124 old_value = *ptr;
michael@0 125 } while (!OSAtomicCompareAndSwap64(old_value, new_value,
michael@0 126 const_cast<Atomic64*>(ptr)));
michael@0 127 return old_value;
michael@0 128 }
michael@0 129
michael@0 130 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64 *ptr,
michael@0 131 Atomic64 increment) {
michael@0 132 return OSAtomicAdd64(increment, const_cast<Atomic64*>(ptr));
michael@0 133 }
michael@0 134
michael@0 135 inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64 *ptr,
michael@0 136 Atomic64 increment) {
michael@0 137 return OSAtomicAdd64Barrier(increment, const_cast<Atomic64*>(ptr));
michael@0 138 }
michael@0 139
michael@0 140 inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64 *ptr,
michael@0 141 Atomic64 old_value,
michael@0 142 Atomic64 new_value) {
michael@0 143 Atomic64 prev_value;
michael@0 144 do {
michael@0 145 if (OSAtomicCompareAndSwap64Barrier(old_value, new_value,
michael@0 146 const_cast<Atomic64*>(ptr))) {
michael@0 147 return old_value;
michael@0 148 }
michael@0 149 prev_value = *ptr;
michael@0 150 } while (prev_value == old_value);
michael@0 151 return prev_value;
michael@0 152 }
michael@0 153
michael@0 154 inline Atomic64 Release_CompareAndSwap(volatile Atomic64 *ptr,
michael@0 155 Atomic64 old_value,
michael@0 156 Atomic64 new_value) {
michael@0 157 // The lib kern interface does not distinguish between
michael@0 158 // Acquire and Release memory barriers; they are equivalent.
michael@0 159 return Acquire_CompareAndSwap(ptr, old_value, new_value);
michael@0 160 }
michael@0 161
michael@0 162 inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
michael@0 163 *ptr = value;
michael@0 164 }
michael@0 165
michael@0 166 inline void Acquire_Store(volatile Atomic64 *ptr, Atomic64 value) {
michael@0 167 *ptr = value;
michael@0 168 MemoryBarrier();
michael@0 169 }
michael@0 170
michael@0 171 inline void Release_Store(volatile Atomic64 *ptr, Atomic64 value) {
michael@0 172 MemoryBarrier();
michael@0 173 *ptr = value;
michael@0 174 }
michael@0 175
michael@0 176 inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
michael@0 177 return *ptr;
michael@0 178 }
michael@0 179
michael@0 180 inline Atomic64 Acquire_Load(volatile const Atomic64 *ptr) {
michael@0 181 Atomic64 value = *ptr;
michael@0 182 MemoryBarrier();
michael@0 183 return value;
michael@0 184 }
michael@0 185
michael@0 186 inline Atomic64 Release_Load(volatile const Atomic64 *ptr) {
michael@0 187 MemoryBarrier();
michael@0 188 return *ptr;
michael@0 189 }
michael@0 190
michael@0 191 #endif // defined(__LP64__)
michael@0 192
michael@0 193 // MacOS uses long for intptr_t, AtomicWord and Atomic32 are always different
michael@0 194 // on the Mac, even when they are the same size. We need to explicitly cast
michael@0 195 // from AtomicWord to Atomic32/64 to implement the AtomicWord interface.
michael@0 196 #ifdef __LP64__
michael@0 197 #define AtomicWordCastType Atomic64
michael@0 198 #else
michael@0 199 #define AtomicWordCastType Atomic32
michael@0 200 #endif
michael@0 201
michael@0 202 inline AtomicWord NoBarrier_CompareAndSwap(volatile AtomicWord* ptr,
michael@0 203 AtomicWord old_value,
michael@0 204 AtomicWord new_value) {
michael@0 205 return NoBarrier_CompareAndSwap(
michael@0 206 reinterpret_cast<volatile AtomicWordCastType*>(ptr),
michael@0 207 old_value, new_value);
michael@0 208 }
michael@0 209
michael@0 210 inline AtomicWord NoBarrier_AtomicExchange(volatile AtomicWord* ptr,
michael@0 211 AtomicWord new_value) {
michael@0 212 return NoBarrier_AtomicExchange(
michael@0 213 reinterpret_cast<volatile AtomicWordCastType*>(ptr), new_value);
michael@0 214 }
michael@0 215
michael@0 216 inline AtomicWord NoBarrier_AtomicIncrement(volatile AtomicWord* ptr,
michael@0 217 AtomicWord increment) {
michael@0 218 return NoBarrier_AtomicIncrement(
michael@0 219 reinterpret_cast<volatile AtomicWordCastType*>(ptr), increment);
michael@0 220 }
michael@0 221
michael@0 222 inline AtomicWord Barrier_AtomicIncrement(volatile AtomicWord* ptr,
michael@0 223 AtomicWord increment) {
michael@0 224 return Barrier_AtomicIncrement(
michael@0 225 reinterpret_cast<volatile AtomicWordCastType*>(ptr), increment);
michael@0 226 }
michael@0 227
michael@0 228 inline AtomicWord Acquire_CompareAndSwap(volatile AtomicWord* ptr,
michael@0 229 AtomicWord old_value,
michael@0 230 AtomicWord new_value) {
michael@0 231 return base::subtle::Acquire_CompareAndSwap(
michael@0 232 reinterpret_cast<volatile AtomicWordCastType*>(ptr),
michael@0 233 old_value, new_value);
michael@0 234 }
michael@0 235
michael@0 236 inline AtomicWord Release_CompareAndSwap(volatile AtomicWord* ptr,
michael@0 237 AtomicWord old_value,
michael@0 238 AtomicWord new_value) {
michael@0 239 return base::subtle::Release_CompareAndSwap(
michael@0 240 reinterpret_cast<volatile AtomicWordCastType*>(ptr),
michael@0 241 old_value, new_value);
michael@0 242 }
michael@0 243
michael@0 244 inline void NoBarrier_Store(volatile AtomicWord *ptr, AtomicWord value) {
michael@0 245 NoBarrier_Store(
michael@0 246 reinterpret_cast<volatile AtomicWordCastType*>(ptr), value);
michael@0 247 }
michael@0 248
michael@0 249 inline void Acquire_Store(volatile AtomicWord* ptr, AtomicWord value) {
michael@0 250 return base::subtle::Acquire_Store(
michael@0 251 reinterpret_cast<volatile AtomicWordCastType*>(ptr), value);
michael@0 252 }
michael@0 253
michael@0 254 inline void Release_Store(volatile AtomicWord* ptr, AtomicWord value) {
michael@0 255 return base::subtle::Release_Store(
michael@0 256 reinterpret_cast<volatile AtomicWordCastType*>(ptr), value);
michael@0 257 }
michael@0 258
michael@0 259 inline AtomicWord NoBarrier_Load(volatile const AtomicWord *ptr) {
michael@0 260 return NoBarrier_Load(
michael@0 261 reinterpret_cast<volatile const AtomicWordCastType*>(ptr));
michael@0 262 }
michael@0 263
michael@0 264 inline AtomicWord Acquire_Load(volatile const AtomicWord* ptr) {
michael@0 265 return base::subtle::Acquire_Load(
michael@0 266 reinterpret_cast<volatile const AtomicWordCastType*>(ptr));
michael@0 267 }
michael@0 268
michael@0 269 inline AtomicWord Release_Load(volatile const AtomicWord* ptr) {
michael@0 270 return base::subtle::Release_Load(
michael@0 271 reinterpret_cast<volatile const AtomicWordCastType*>(ptr));
michael@0 272 }
michael@0 273
michael@0 274 #undef AtomicWordCastType
michael@0 275
michael@0 276 } // namespace base::subtle
michael@0 277 } // namespace base
michael@0 278
michael@0 279 #endif // BASE_ATOMICOPS_INTERNALS_X86_MACOSX_H_

mercurial