Wed, 31 Dec 2014 06:09:35 +0100
Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.
michael@0 | 1 | /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- |
michael@0 | 2 | * vim: set ts=8 sts=4 et sw=4 tw=99: |
michael@0 | 3 | * This Source Code Form is subject to the terms of the Mozilla Public |
michael@0 | 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this |
michael@0 | 5 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
michael@0 | 6 | |
michael@0 | 7 | #include "jit/IonMacroAssembler.h" |
michael@0 | 8 | |
michael@0 | 9 | #include "jsinfer.h" |
michael@0 | 10 | #include "jsprf.h" |
michael@0 | 11 | |
michael@0 | 12 | #include "builtin/TypedObject.h" |
michael@0 | 13 | #include "jit/Bailouts.h" |
michael@0 | 14 | #include "jit/BaselineFrame.h" |
michael@0 | 15 | #include "jit/BaselineIC.h" |
michael@0 | 16 | #include "jit/BaselineJIT.h" |
michael@0 | 17 | #include "jit/Lowering.h" |
michael@0 | 18 | #include "jit/MIR.h" |
michael@0 | 19 | #include "jit/ParallelFunctions.h" |
michael@0 | 20 | #include "vm/ForkJoin.h" |
michael@0 | 21 | #include "vm/TraceLogging.h" |
michael@0 | 22 | |
michael@0 | 23 | #ifdef JSGC_GENERATIONAL |
michael@0 | 24 | # include "jsgcinlines.h" |
michael@0 | 25 | #endif |
michael@0 | 26 | #include "jsinferinlines.h" |
michael@0 | 27 | #include "jsobjinlines.h" |
michael@0 | 28 | |
michael@0 | 29 | using namespace js; |
michael@0 | 30 | using namespace js::jit; |
michael@0 | 31 | |
michael@0 | 32 | using JS::GenericNaN; |
michael@0 | 33 | |
michael@0 | 34 | namespace { |
michael@0 | 35 | |
michael@0 | 36 | // Emulate a TypeSet logic from a Type object to avoid duplicating the guard |
michael@0 | 37 | // logic. |
michael@0 | 38 | class TypeWrapper { |
michael@0 | 39 | types::Type t_; |
michael@0 | 40 | |
michael@0 | 41 | public: |
michael@0 | 42 | TypeWrapper(types::Type t) : t_(t) {} |
michael@0 | 43 | |
michael@0 | 44 | inline bool unknown() const { |
michael@0 | 45 | return t_.isUnknown(); |
michael@0 | 46 | } |
michael@0 | 47 | inline bool hasType(types::Type t) const { |
michael@0 | 48 | if (t == types::Type::Int32Type()) |
michael@0 | 49 | return t == t_ || t_ == types::Type::DoubleType(); |
michael@0 | 50 | return t == t_; |
michael@0 | 51 | } |
michael@0 | 52 | inline unsigned getObjectCount() const { |
michael@0 | 53 | if (t_.isAnyObject() || t_.isUnknown() || !t_.isObject()) |
michael@0 | 54 | return 0; |
michael@0 | 55 | return 1; |
michael@0 | 56 | } |
michael@0 | 57 | inline JSObject *getSingleObject(unsigned) const { |
michael@0 | 58 | if (t_.isSingleObject()) |
michael@0 | 59 | return t_.singleObject(); |
michael@0 | 60 | return nullptr; |
michael@0 | 61 | } |
michael@0 | 62 | inline types::TypeObject *getTypeObject(unsigned) const { |
michael@0 | 63 | if (t_.isTypeObject()) |
michael@0 | 64 | return t_.typeObject(); |
michael@0 | 65 | return nullptr; |
michael@0 | 66 | } |
michael@0 | 67 | }; |
michael@0 | 68 | |
michael@0 | 69 | } /* anonymous namespace */ |
michael@0 | 70 | |
michael@0 | 71 | template <typename Source, typename TypeSet> void |
michael@0 | 72 | MacroAssembler::guardTypeSet(const Source &address, const TypeSet *types, |
michael@0 | 73 | Register scratch, Label *miss) |
michael@0 | 74 | { |
michael@0 | 75 | JS_ASSERT(!types->unknown()); |
michael@0 | 76 | |
michael@0 | 77 | Label matched; |
michael@0 | 78 | types::Type tests[7] = { |
michael@0 | 79 | types::Type::Int32Type(), |
michael@0 | 80 | types::Type::UndefinedType(), |
michael@0 | 81 | types::Type::BooleanType(), |
michael@0 | 82 | types::Type::StringType(), |
michael@0 | 83 | types::Type::NullType(), |
michael@0 | 84 | types::Type::MagicArgType(), |
michael@0 | 85 | types::Type::AnyObjectType() |
michael@0 | 86 | }; |
michael@0 | 87 | |
michael@0 | 88 | // The double type also implies Int32. |
michael@0 | 89 | // So replace the int32 test with the double one. |
michael@0 | 90 | if (types->hasType(types::Type::DoubleType())) { |
michael@0 | 91 | JS_ASSERT(types->hasType(types::Type::Int32Type())); |
michael@0 | 92 | tests[0] = types::Type::DoubleType(); |
michael@0 | 93 | } |
michael@0 | 94 | |
michael@0 | 95 | Register tag = extractTag(address, scratch); |
michael@0 | 96 | |
michael@0 | 97 | // Emit all typed tests. |
michael@0 | 98 | BranchType lastBranch; |
michael@0 | 99 | for (size_t i = 0; i < 7; i++) { |
michael@0 | 100 | if (!types->hasType(tests[i])) |
michael@0 | 101 | continue; |
michael@0 | 102 | |
michael@0 | 103 | if (lastBranch.isInitialized()) |
michael@0 | 104 | lastBranch.emit(*this); |
michael@0 | 105 | lastBranch = BranchType(Equal, tag, tests[i], &matched); |
michael@0 | 106 | } |
michael@0 | 107 | |
michael@0 | 108 | // If this is the last check, invert the last branch. |
michael@0 | 109 | if (types->hasType(types::Type::AnyObjectType()) || !types->getObjectCount()) { |
michael@0 | 110 | if (!lastBranch.isInitialized()) { |
michael@0 | 111 | jump(miss); |
michael@0 | 112 | return; |
michael@0 | 113 | } |
michael@0 | 114 | |
michael@0 | 115 | lastBranch.invertCondition(); |
michael@0 | 116 | lastBranch.relink(miss); |
michael@0 | 117 | lastBranch.emit(*this); |
michael@0 | 118 | |
michael@0 | 119 | bind(&matched); |
michael@0 | 120 | return; |
michael@0 | 121 | } |
michael@0 | 122 | |
michael@0 | 123 | if (lastBranch.isInitialized()) |
michael@0 | 124 | lastBranch.emit(*this); |
michael@0 | 125 | |
michael@0 | 126 | // Test specific objects. |
michael@0 | 127 | JS_ASSERT(scratch != InvalidReg); |
michael@0 | 128 | branchTestObject(NotEqual, tag, miss); |
michael@0 | 129 | Register obj = extractObject(address, scratch); |
michael@0 | 130 | guardObjectType(obj, types, scratch, miss); |
michael@0 | 131 | |
michael@0 | 132 | bind(&matched); |
michael@0 | 133 | } |
michael@0 | 134 | |
michael@0 | 135 | template <typename TypeSet> void |
michael@0 | 136 | MacroAssembler::guardObjectType(Register obj, const TypeSet *types, |
michael@0 | 137 | Register scratch, Label *miss) |
michael@0 | 138 | { |
michael@0 | 139 | JS_ASSERT(!types->unknown()); |
michael@0 | 140 | JS_ASSERT(!types->hasType(types::Type::AnyObjectType())); |
michael@0 | 141 | JS_ASSERT(types->getObjectCount()); |
michael@0 | 142 | JS_ASSERT(scratch != InvalidReg); |
michael@0 | 143 | |
michael@0 | 144 | Label matched; |
michael@0 | 145 | |
michael@0 | 146 | BranchGCPtr lastBranch; |
michael@0 | 147 | JS_ASSERT(!lastBranch.isInitialized()); |
michael@0 | 148 | bool hasTypeObjects = false; |
michael@0 | 149 | unsigned count = types->getObjectCount(); |
michael@0 | 150 | for (unsigned i = 0; i < count; i++) { |
michael@0 | 151 | if (!types->getSingleObject(i)) { |
michael@0 | 152 | hasTypeObjects = hasTypeObjects || types->getTypeObject(i); |
michael@0 | 153 | continue; |
michael@0 | 154 | } |
michael@0 | 155 | |
michael@0 | 156 | if (lastBranch.isInitialized()) |
michael@0 | 157 | lastBranch.emit(*this); |
michael@0 | 158 | |
michael@0 | 159 | JSObject *object = types->getSingleObject(i); |
michael@0 | 160 | lastBranch = BranchGCPtr(Equal, obj, ImmGCPtr(object), &matched); |
michael@0 | 161 | } |
michael@0 | 162 | |
michael@0 | 163 | if (hasTypeObjects) { |
michael@0 | 164 | // We are possibly going to overwrite the obj register. So already |
michael@0 | 165 | // emit the branch, since branch depends on previous value of obj |
michael@0 | 166 | // register and there is definitely a branch following. So no need |
michael@0 | 167 | // to invert the condition. |
michael@0 | 168 | if (lastBranch.isInitialized()) |
michael@0 | 169 | lastBranch.emit(*this); |
michael@0 | 170 | lastBranch = BranchGCPtr(); |
michael@0 | 171 | |
michael@0 | 172 | // Note: Some platforms give the same register for obj and scratch. |
michael@0 | 173 | // Make sure when writing to scratch, the obj register isn't used anymore! |
michael@0 | 174 | loadPtr(Address(obj, JSObject::offsetOfType()), scratch); |
michael@0 | 175 | |
michael@0 | 176 | for (unsigned i = 0; i < count; i++) { |
michael@0 | 177 | if (!types->getTypeObject(i)) |
michael@0 | 178 | continue; |
michael@0 | 179 | |
michael@0 | 180 | if (lastBranch.isInitialized()) |
michael@0 | 181 | lastBranch.emit(*this); |
michael@0 | 182 | |
michael@0 | 183 | types::TypeObject *object = types->getTypeObject(i); |
michael@0 | 184 | lastBranch = BranchGCPtr(Equal, scratch, ImmGCPtr(object), &matched); |
michael@0 | 185 | } |
michael@0 | 186 | } |
michael@0 | 187 | |
michael@0 | 188 | if (!lastBranch.isInitialized()) { |
michael@0 | 189 | jump(miss); |
michael@0 | 190 | return; |
michael@0 | 191 | } |
michael@0 | 192 | |
michael@0 | 193 | lastBranch.invertCondition(); |
michael@0 | 194 | lastBranch.relink(miss); |
michael@0 | 195 | lastBranch.emit(*this); |
michael@0 | 196 | |
michael@0 | 197 | bind(&matched); |
michael@0 | 198 | return; |
michael@0 | 199 | } |
michael@0 | 200 | |
michael@0 | 201 | template <typename Source> void |
michael@0 | 202 | MacroAssembler::guardType(const Source &address, types::Type type, |
michael@0 | 203 | Register scratch, Label *miss) |
michael@0 | 204 | { |
michael@0 | 205 | TypeWrapper wrapper(type); |
michael@0 | 206 | guardTypeSet(address, &wrapper, scratch, miss); |
michael@0 | 207 | } |
michael@0 | 208 | |
michael@0 | 209 | template void MacroAssembler::guardTypeSet(const Address &address, const types::TemporaryTypeSet *types, |
michael@0 | 210 | Register scratch, Label *miss); |
michael@0 | 211 | template void MacroAssembler::guardTypeSet(const ValueOperand &value, const types::TemporaryTypeSet *types, |
michael@0 | 212 | Register scratch, Label *miss); |
michael@0 | 213 | |
michael@0 | 214 | template void MacroAssembler::guardTypeSet(const Address &address, const types::HeapTypeSet *types, |
michael@0 | 215 | Register scratch, Label *miss); |
michael@0 | 216 | template void MacroAssembler::guardTypeSet(const ValueOperand &value, const types::HeapTypeSet *types, |
michael@0 | 217 | Register scratch, Label *miss); |
michael@0 | 218 | template void MacroAssembler::guardTypeSet(const TypedOrValueRegister ®, const types::HeapTypeSet *types, |
michael@0 | 219 | Register scratch, Label *miss); |
michael@0 | 220 | |
michael@0 | 221 | template void MacroAssembler::guardTypeSet(const Address &address, const types::TypeSet *types, |
michael@0 | 222 | Register scratch, Label *miss); |
michael@0 | 223 | template void MacroAssembler::guardTypeSet(const ValueOperand &value, const types::TypeSet *types, |
michael@0 | 224 | Register scratch, Label *miss); |
michael@0 | 225 | |
michael@0 | 226 | template void MacroAssembler::guardTypeSet(const Address &address, const TypeWrapper *types, |
michael@0 | 227 | Register scratch, Label *miss); |
michael@0 | 228 | template void MacroAssembler::guardTypeSet(const ValueOperand &value, const TypeWrapper *types, |
michael@0 | 229 | Register scratch, Label *miss); |
michael@0 | 230 | |
michael@0 | 231 | template void MacroAssembler::guardObjectType(Register obj, const types::TemporaryTypeSet *types, |
michael@0 | 232 | Register scratch, Label *miss); |
michael@0 | 233 | template void MacroAssembler::guardObjectType(Register obj, const types::TypeSet *types, |
michael@0 | 234 | Register scratch, Label *miss); |
michael@0 | 235 | template void MacroAssembler::guardObjectType(Register obj, const TypeWrapper *types, |
michael@0 | 236 | Register scratch, Label *miss); |
michael@0 | 237 | |
michael@0 | 238 | template void MacroAssembler::guardType(const Address &address, types::Type type, |
michael@0 | 239 | Register scratch, Label *miss); |
michael@0 | 240 | template void MacroAssembler::guardType(const ValueOperand &value, types::Type type, |
michael@0 | 241 | Register scratch, Label *miss); |
michael@0 | 242 | |
michael@0 | 243 | void |
michael@0 | 244 | MacroAssembler::branchNurseryPtr(Condition cond, const Address &ptr1, const ImmMaybeNurseryPtr &ptr2, |
michael@0 | 245 | Label *label) |
michael@0 | 246 | { |
michael@0 | 247 | #ifdef JSGC_GENERATIONAL |
michael@0 | 248 | if (ptr2.value && gc::IsInsideNursery(GetIonContext()->cx->runtime(), (void *)ptr2.value)) |
michael@0 | 249 | embedsNurseryPointers_ = true; |
michael@0 | 250 | #endif |
michael@0 | 251 | branchPtr(cond, ptr1, ptr2, label); |
michael@0 | 252 | } |
michael@0 | 253 | |
michael@0 | 254 | void |
michael@0 | 255 | MacroAssembler::moveNurseryPtr(const ImmMaybeNurseryPtr &ptr, Register reg) |
michael@0 | 256 | { |
michael@0 | 257 | #ifdef JSGC_GENERATIONAL |
michael@0 | 258 | if (ptr.value && gc::IsInsideNursery(GetIonContext()->cx->runtime(), (void *)ptr.value)) |
michael@0 | 259 | embedsNurseryPointers_ = true; |
michael@0 | 260 | #endif |
michael@0 | 261 | movePtr(ptr, reg); |
michael@0 | 262 | } |
michael@0 | 263 | |
michael@0 | 264 | template<typename S, typename T> |
michael@0 | 265 | static void |
michael@0 | 266 | StoreToTypedFloatArray(MacroAssembler &masm, int arrayType, const S &value, const T &dest) |
michael@0 | 267 | { |
michael@0 | 268 | switch (arrayType) { |
michael@0 | 269 | case ScalarTypeDescr::TYPE_FLOAT32: |
michael@0 | 270 | if (LIRGenerator::allowFloat32Optimizations()) { |
michael@0 | 271 | masm.storeFloat32(value, dest); |
michael@0 | 272 | } else { |
michael@0 | 273 | #ifdef JS_MORE_DETERMINISTIC |
michael@0 | 274 | // See the comment in TypedArrayObjectTemplate::doubleToNative. |
michael@0 | 275 | masm.canonicalizeDouble(value); |
michael@0 | 276 | #endif |
michael@0 | 277 | masm.convertDoubleToFloat32(value, ScratchFloatReg); |
michael@0 | 278 | masm.storeFloat32(ScratchFloatReg, dest); |
michael@0 | 279 | } |
michael@0 | 280 | break; |
michael@0 | 281 | case ScalarTypeDescr::TYPE_FLOAT64: |
michael@0 | 282 | #ifdef JS_MORE_DETERMINISTIC |
michael@0 | 283 | // See the comment in TypedArrayObjectTemplate::doubleToNative. |
michael@0 | 284 | masm.canonicalizeDouble(value); |
michael@0 | 285 | #endif |
michael@0 | 286 | masm.storeDouble(value, dest); |
michael@0 | 287 | break; |
michael@0 | 288 | default: |
michael@0 | 289 | MOZ_ASSUME_UNREACHABLE("Invalid typed array type"); |
michael@0 | 290 | } |
michael@0 | 291 | } |
michael@0 | 292 | |
michael@0 | 293 | void |
michael@0 | 294 | MacroAssembler::storeToTypedFloatArray(int arrayType, const FloatRegister &value, |
michael@0 | 295 | const BaseIndex &dest) |
michael@0 | 296 | { |
michael@0 | 297 | StoreToTypedFloatArray(*this, arrayType, value, dest); |
michael@0 | 298 | } |
michael@0 | 299 | void |
michael@0 | 300 | MacroAssembler::storeToTypedFloatArray(int arrayType, const FloatRegister &value, |
michael@0 | 301 | const Address &dest) |
michael@0 | 302 | { |
michael@0 | 303 | StoreToTypedFloatArray(*this, arrayType, value, dest); |
michael@0 | 304 | } |
michael@0 | 305 | |
michael@0 | 306 | template<typename T> |
michael@0 | 307 | void |
michael@0 | 308 | MacroAssembler::loadFromTypedArray(int arrayType, const T &src, AnyRegister dest, Register temp, |
michael@0 | 309 | Label *fail) |
michael@0 | 310 | { |
michael@0 | 311 | switch (arrayType) { |
michael@0 | 312 | case ScalarTypeDescr::TYPE_INT8: |
michael@0 | 313 | load8SignExtend(src, dest.gpr()); |
michael@0 | 314 | break; |
michael@0 | 315 | case ScalarTypeDescr::TYPE_UINT8: |
michael@0 | 316 | case ScalarTypeDescr::TYPE_UINT8_CLAMPED: |
michael@0 | 317 | load8ZeroExtend(src, dest.gpr()); |
michael@0 | 318 | break; |
michael@0 | 319 | case ScalarTypeDescr::TYPE_INT16: |
michael@0 | 320 | load16SignExtend(src, dest.gpr()); |
michael@0 | 321 | break; |
michael@0 | 322 | case ScalarTypeDescr::TYPE_UINT16: |
michael@0 | 323 | load16ZeroExtend(src, dest.gpr()); |
michael@0 | 324 | break; |
michael@0 | 325 | case ScalarTypeDescr::TYPE_INT32: |
michael@0 | 326 | load32(src, dest.gpr()); |
michael@0 | 327 | break; |
michael@0 | 328 | case ScalarTypeDescr::TYPE_UINT32: |
michael@0 | 329 | if (dest.isFloat()) { |
michael@0 | 330 | load32(src, temp); |
michael@0 | 331 | convertUInt32ToDouble(temp, dest.fpu()); |
michael@0 | 332 | } else { |
michael@0 | 333 | load32(src, dest.gpr()); |
michael@0 | 334 | |
michael@0 | 335 | // Bail out if the value doesn't fit into a signed int32 value. This |
michael@0 | 336 | // is what allows MLoadTypedArrayElement to have a type() of |
michael@0 | 337 | // MIRType_Int32 for UInt32 array loads. |
michael@0 | 338 | branchTest32(Assembler::Signed, dest.gpr(), dest.gpr(), fail); |
michael@0 | 339 | } |
michael@0 | 340 | break; |
michael@0 | 341 | case ScalarTypeDescr::TYPE_FLOAT32: |
michael@0 | 342 | if (LIRGenerator::allowFloat32Optimizations()) { |
michael@0 | 343 | loadFloat32(src, dest.fpu()); |
michael@0 | 344 | canonicalizeFloat(dest.fpu()); |
michael@0 | 345 | } else { |
michael@0 | 346 | loadFloatAsDouble(src, dest.fpu()); |
michael@0 | 347 | canonicalizeDouble(dest.fpu()); |
michael@0 | 348 | } |
michael@0 | 349 | break; |
michael@0 | 350 | case ScalarTypeDescr::TYPE_FLOAT64: |
michael@0 | 351 | loadDouble(src, dest.fpu()); |
michael@0 | 352 | canonicalizeDouble(dest.fpu()); |
michael@0 | 353 | break; |
michael@0 | 354 | default: |
michael@0 | 355 | MOZ_ASSUME_UNREACHABLE("Invalid typed array type"); |
michael@0 | 356 | } |
michael@0 | 357 | } |
michael@0 | 358 | |
michael@0 | 359 | template void MacroAssembler::loadFromTypedArray(int arrayType, const Address &src, AnyRegister dest, |
michael@0 | 360 | Register temp, Label *fail); |
michael@0 | 361 | template void MacroAssembler::loadFromTypedArray(int arrayType, const BaseIndex &src, AnyRegister dest, |
michael@0 | 362 | Register temp, Label *fail); |
michael@0 | 363 | |
michael@0 | 364 | template<typename T> |
michael@0 | 365 | void |
michael@0 | 366 | MacroAssembler::loadFromTypedArray(int arrayType, const T &src, const ValueOperand &dest, |
michael@0 | 367 | bool allowDouble, Register temp, Label *fail) |
michael@0 | 368 | { |
michael@0 | 369 | switch (arrayType) { |
michael@0 | 370 | case ScalarTypeDescr::TYPE_INT8: |
michael@0 | 371 | case ScalarTypeDescr::TYPE_UINT8: |
michael@0 | 372 | case ScalarTypeDescr::TYPE_UINT8_CLAMPED: |
michael@0 | 373 | case ScalarTypeDescr::TYPE_INT16: |
michael@0 | 374 | case ScalarTypeDescr::TYPE_UINT16: |
michael@0 | 375 | case ScalarTypeDescr::TYPE_INT32: |
michael@0 | 376 | loadFromTypedArray(arrayType, src, AnyRegister(dest.scratchReg()), InvalidReg, nullptr); |
michael@0 | 377 | tagValue(JSVAL_TYPE_INT32, dest.scratchReg(), dest); |
michael@0 | 378 | break; |
michael@0 | 379 | case ScalarTypeDescr::TYPE_UINT32: |
michael@0 | 380 | // Don't clobber dest when we could fail, instead use temp. |
michael@0 | 381 | load32(src, temp); |
michael@0 | 382 | if (allowDouble) { |
michael@0 | 383 | // If the value fits in an int32, store an int32 type tag. |
michael@0 | 384 | // Else, convert the value to double and box it. |
michael@0 | 385 | Label done, isDouble; |
michael@0 | 386 | branchTest32(Assembler::Signed, temp, temp, &isDouble); |
michael@0 | 387 | { |
michael@0 | 388 | tagValue(JSVAL_TYPE_INT32, temp, dest); |
michael@0 | 389 | jump(&done); |
michael@0 | 390 | } |
michael@0 | 391 | bind(&isDouble); |
michael@0 | 392 | { |
michael@0 | 393 | convertUInt32ToDouble(temp, ScratchFloatReg); |
michael@0 | 394 | boxDouble(ScratchFloatReg, dest); |
michael@0 | 395 | } |
michael@0 | 396 | bind(&done); |
michael@0 | 397 | } else { |
michael@0 | 398 | // Bailout if the value does not fit in an int32. |
michael@0 | 399 | branchTest32(Assembler::Signed, temp, temp, fail); |
michael@0 | 400 | tagValue(JSVAL_TYPE_INT32, temp, dest); |
michael@0 | 401 | } |
michael@0 | 402 | break; |
michael@0 | 403 | case ScalarTypeDescr::TYPE_FLOAT32: |
michael@0 | 404 | loadFromTypedArray(arrayType, src, AnyRegister(ScratchFloatReg), dest.scratchReg(), |
michael@0 | 405 | nullptr); |
michael@0 | 406 | if (LIRGenerator::allowFloat32Optimizations()) |
michael@0 | 407 | convertFloat32ToDouble(ScratchFloatReg, ScratchFloatReg); |
michael@0 | 408 | boxDouble(ScratchFloatReg, dest); |
michael@0 | 409 | break; |
michael@0 | 410 | case ScalarTypeDescr::TYPE_FLOAT64: |
michael@0 | 411 | loadFromTypedArray(arrayType, src, AnyRegister(ScratchFloatReg), dest.scratchReg(), |
michael@0 | 412 | nullptr); |
michael@0 | 413 | boxDouble(ScratchFloatReg, dest); |
michael@0 | 414 | break; |
michael@0 | 415 | default: |
michael@0 | 416 | MOZ_ASSUME_UNREACHABLE("Invalid typed array type"); |
michael@0 | 417 | } |
michael@0 | 418 | } |
michael@0 | 419 | |
michael@0 | 420 | template void MacroAssembler::loadFromTypedArray(int arrayType, const Address &src, const ValueOperand &dest, |
michael@0 | 421 | bool allowDouble, Register temp, Label *fail); |
michael@0 | 422 | template void MacroAssembler::loadFromTypedArray(int arrayType, const BaseIndex &src, const ValueOperand &dest, |
michael@0 | 423 | bool allowDouble, Register temp, Label *fail); |
michael@0 | 424 | |
michael@0 | 425 | void |
michael@0 | 426 | MacroAssembler::newGCThing(Register result, Register temp, gc::AllocKind allocKind, Label *fail, |
michael@0 | 427 | gc::InitialHeap initialHeap /* = gc::DefaultHeap */) |
michael@0 | 428 | { |
michael@0 | 429 | // Inlined equivalent of js::gc::NewGCThing() without failure case handling. |
michael@0 | 430 | |
michael@0 | 431 | int thingSize = int(gc::Arena::thingSize(allocKind)); |
michael@0 | 432 | |
michael@0 | 433 | #ifdef JS_GC_ZEAL |
michael@0 | 434 | // Don't execute the inline path if gcZeal is active. |
michael@0 | 435 | branch32(Assembler::NotEqual, |
michael@0 | 436 | AbsoluteAddress(GetIonContext()->runtime->addressOfGCZeal()), Imm32(0), |
michael@0 | 437 | fail); |
michael@0 | 438 | #endif |
michael@0 | 439 | |
michael@0 | 440 | // Don't execute the inline path if the compartment has an object metadata callback, |
michael@0 | 441 | // as the metadata to use for the object may vary between executions of the op. |
michael@0 | 442 | if (GetIonContext()->compartment->hasObjectMetadataCallback()) |
michael@0 | 443 | jump(fail); |
michael@0 | 444 | |
michael@0 | 445 | #ifdef JSGC_GENERATIONAL |
michael@0 | 446 | // Always use nursery allocation if it is possible to do so. The jit |
michael@0 | 447 | // assumes a nursery pointer is returned to avoid barriers. |
michael@0 | 448 | if (allocKind <= gc::FINALIZE_OBJECT_LAST && initialHeap != gc::TenuredHeap) { |
michael@0 | 449 | // Inline Nursery::allocate. No explicit check for nursery.isEnabled() |
michael@0 | 450 | // is needed, as the comparison with the nursery's end will always fail |
michael@0 | 451 | // in such cases. |
michael@0 | 452 | const Nursery &nursery = GetIonContext()->runtime->gcNursery(); |
michael@0 | 453 | loadPtr(AbsoluteAddress(nursery.addressOfPosition()), result); |
michael@0 | 454 | computeEffectiveAddress(Address(result, thingSize), temp); |
michael@0 | 455 | branchPtr(Assembler::BelowOrEqual, AbsoluteAddress(nursery.addressOfCurrentEnd()), temp, fail); |
michael@0 | 456 | storePtr(temp, AbsoluteAddress(nursery.addressOfPosition())); |
michael@0 | 457 | return; |
michael@0 | 458 | } |
michael@0 | 459 | #endif // JSGC_GENERATIONAL |
michael@0 | 460 | |
michael@0 | 461 | CompileZone *zone = GetIonContext()->compartment->zone(); |
michael@0 | 462 | |
michael@0 | 463 | // Inline FreeSpan::allocate. |
michael@0 | 464 | // There is always exactly one FreeSpan per allocKind per JSCompartment. |
michael@0 | 465 | // If a FreeSpan is replaced, its members are updated in the freeLists table, |
michael@0 | 466 | // which the code below always re-reads. |
michael@0 | 467 | loadPtr(AbsoluteAddress(zone->addressOfFreeListFirst(allocKind)), result); |
michael@0 | 468 | branchPtr(Assembler::BelowOrEqual, AbsoluteAddress(zone->addressOfFreeListLast(allocKind)), result, fail); |
michael@0 | 469 | computeEffectiveAddress(Address(result, thingSize), temp); |
michael@0 | 470 | storePtr(temp, AbsoluteAddress(zone->addressOfFreeListFirst(allocKind))); |
michael@0 | 471 | } |
michael@0 | 472 | |
michael@0 | 473 | void |
michael@0 | 474 | MacroAssembler::newGCThing(Register result, Register temp, JSObject *templateObject, Label *fail, |
michael@0 | 475 | gc::InitialHeap initialHeap) |
michael@0 | 476 | { |
michael@0 | 477 | gc::AllocKind allocKind = templateObject->tenuredGetAllocKind(); |
michael@0 | 478 | JS_ASSERT(allocKind >= gc::FINALIZE_OBJECT0 && allocKind <= gc::FINALIZE_OBJECT_LAST); |
michael@0 | 479 | |
michael@0 | 480 | newGCThing(result, temp, allocKind, fail, initialHeap); |
michael@0 | 481 | } |
michael@0 | 482 | |
michael@0 | 483 | void |
michael@0 | 484 | MacroAssembler::newGCString(Register result, Register temp, Label *fail) |
michael@0 | 485 | { |
michael@0 | 486 | newGCThing(result, temp, js::gc::FINALIZE_STRING, fail); |
michael@0 | 487 | } |
michael@0 | 488 | |
michael@0 | 489 | void |
michael@0 | 490 | MacroAssembler::newGCFatInlineString(Register result, Register temp, Label *fail) |
michael@0 | 491 | { |
michael@0 | 492 | newGCThing(result, temp, js::gc::FINALIZE_FAT_INLINE_STRING, fail); |
michael@0 | 493 | } |
michael@0 | 494 | |
michael@0 | 495 | void |
michael@0 | 496 | MacroAssembler::newGCThingPar(Register result, Register cx, Register tempReg1, Register tempReg2, |
michael@0 | 497 | gc::AllocKind allocKind, Label *fail) |
michael@0 | 498 | { |
michael@0 | 499 | // Similar to ::newGCThing(), except that it allocates from a custom |
michael@0 | 500 | // Allocator in the ForkJoinContext*, rather than being hardcoded to the |
michael@0 | 501 | // compartment allocator. This requires two temporary registers. |
michael@0 | 502 | // |
michael@0 | 503 | // Subtle: I wanted to reuse `result` for one of the temporaries, but the |
michael@0 | 504 | // register allocator was assigning it to the same register as `cx`. |
michael@0 | 505 | // Then we overwrite that register which messed up the OOL code. |
michael@0 | 506 | |
michael@0 | 507 | uint32_t thingSize = (uint32_t)gc::Arena::thingSize(allocKind); |
michael@0 | 508 | |
michael@0 | 509 | // Load the allocator: |
michael@0 | 510 | // tempReg1 = (Allocator*) forkJoinCx->allocator() |
michael@0 | 511 | loadPtr(Address(cx, ThreadSafeContext::offsetOfAllocator()), |
michael@0 | 512 | tempReg1); |
michael@0 | 513 | |
michael@0 | 514 | // Get a pointer to the relevant free list: |
michael@0 | 515 | // tempReg1 = (FreeSpan*) &tempReg1->arenas.freeLists[(allocKind)] |
michael@0 | 516 | uint32_t offset = (offsetof(Allocator, arenas) + |
michael@0 | 517 | js::gc::ArenaLists::getFreeListOffset(allocKind)); |
michael@0 | 518 | addPtr(Imm32(offset), tempReg1); |
michael@0 | 519 | |
michael@0 | 520 | // Load first item on the list |
michael@0 | 521 | // tempReg2 = tempReg1->first |
michael@0 | 522 | loadPtr(Address(tempReg1, offsetof(gc::FreeSpan, first)), tempReg2); |
michael@0 | 523 | |
michael@0 | 524 | // Check whether list is empty |
michael@0 | 525 | // if tempReg1->last <= tempReg2, fail |
michael@0 | 526 | branchPtr(Assembler::BelowOrEqual, |
michael@0 | 527 | Address(tempReg1, offsetof(gc::FreeSpan, last)), |
michael@0 | 528 | tempReg2, |
michael@0 | 529 | fail); |
michael@0 | 530 | |
michael@0 | 531 | // If not, take first and advance pointer by thingSize bytes. |
michael@0 | 532 | // result = tempReg2; |
michael@0 | 533 | // tempReg2 += thingSize; |
michael@0 | 534 | movePtr(tempReg2, result); |
michael@0 | 535 | addPtr(Imm32(thingSize), tempReg2); |
michael@0 | 536 | |
michael@0 | 537 | // Update `first` |
michael@0 | 538 | // tempReg1->first = tempReg2; |
michael@0 | 539 | storePtr(tempReg2, Address(tempReg1, offsetof(gc::FreeSpan, first))); |
michael@0 | 540 | } |
michael@0 | 541 | |
michael@0 | 542 | void |
michael@0 | 543 | MacroAssembler::newGCThingPar(Register result, Register cx, Register tempReg1, Register tempReg2, |
michael@0 | 544 | JSObject *templateObject, Label *fail) |
michael@0 | 545 | { |
michael@0 | 546 | gc::AllocKind allocKind = templateObject->tenuredGetAllocKind(); |
michael@0 | 547 | JS_ASSERT(allocKind >= gc::FINALIZE_OBJECT0 && allocKind <= gc::FINALIZE_OBJECT_LAST); |
michael@0 | 548 | |
michael@0 | 549 | newGCThingPar(result, cx, tempReg1, tempReg2, allocKind, fail); |
michael@0 | 550 | } |
michael@0 | 551 | |
michael@0 | 552 | void |
michael@0 | 553 | MacroAssembler::newGCStringPar(Register result, Register cx, Register tempReg1, Register tempReg2, |
michael@0 | 554 | Label *fail) |
michael@0 | 555 | { |
michael@0 | 556 | newGCThingPar(result, cx, tempReg1, tempReg2, js::gc::FINALIZE_STRING, fail); |
michael@0 | 557 | } |
michael@0 | 558 | |
michael@0 | 559 | void |
michael@0 | 560 | MacroAssembler::newGCFatInlineStringPar(Register result, Register cx, Register tempReg1, |
michael@0 | 561 | Register tempReg2, Label *fail) |
michael@0 | 562 | { |
michael@0 | 563 | newGCThingPar(result, cx, tempReg1, tempReg2, js::gc::FINALIZE_FAT_INLINE_STRING, fail); |
michael@0 | 564 | } |
michael@0 | 565 | |
michael@0 | 566 | void |
michael@0 | 567 | MacroAssembler::copySlotsFromTemplate(Register obj, Register temp, const JSObject *templateObj, |
michael@0 | 568 | uint32_t start, uint32_t end) |
michael@0 | 569 | { |
michael@0 | 570 | uint32_t nfixed = Min(templateObj->numFixedSlots(), end); |
michael@0 | 571 | for (unsigned i = start; i < nfixed; i++) |
michael@0 | 572 | storeValue(templateObj->getFixedSlot(i), Address(obj, JSObject::getFixedSlotOffset(i))); |
michael@0 | 573 | } |
michael@0 | 574 | |
michael@0 | 575 | void |
michael@0 | 576 | MacroAssembler::fillSlotsWithUndefined(Register obj, Register temp, const JSObject *templateObj, |
michael@0 | 577 | uint32_t start, uint32_t end) |
michael@0 | 578 | { |
michael@0 | 579 | #ifdef JS_NUNBOX32 |
michael@0 | 580 | // We only have a single spare register, so do the initialization as two |
michael@0 | 581 | // strided writes of the tag and body. |
michael@0 | 582 | jsval_layout jv = JSVAL_TO_IMPL(UndefinedValue()); |
michael@0 | 583 | uint32_t nfixed = Min(templateObj->numFixedSlots(), end); |
michael@0 | 584 | |
michael@0 | 585 | mov(ImmWord(jv.s.tag), temp); |
michael@0 | 586 | for (unsigned i = start; i < nfixed; i++) |
michael@0 | 587 | store32(temp, ToType(Address(obj, JSObject::getFixedSlotOffset(i)))); |
michael@0 | 588 | |
michael@0 | 589 | mov(ImmWord(jv.s.payload.i32), temp); |
michael@0 | 590 | for (unsigned i = start; i < nfixed; i++) |
michael@0 | 591 | store32(temp, ToPayload(Address(obj, JSObject::getFixedSlotOffset(i)))); |
michael@0 | 592 | #else |
michael@0 | 593 | moveValue(UndefinedValue(), temp); |
michael@0 | 594 | uint32_t nfixed = Min(templateObj->numFixedSlots(), end); |
michael@0 | 595 | for (unsigned i = start; i < nfixed; i++) |
michael@0 | 596 | storePtr(temp, Address(obj, JSObject::getFixedSlotOffset(i))); |
michael@0 | 597 | #endif |
michael@0 | 598 | } |
michael@0 | 599 | |
michael@0 | 600 | static uint32_t |
michael@0 | 601 | FindStartOfUndefinedSlots(JSObject *templateObj, uint32_t nslots) |
michael@0 | 602 | { |
michael@0 | 603 | JS_ASSERT(nslots == templateObj->lastProperty()->slotSpan(templateObj->getClass())); |
michael@0 | 604 | JS_ASSERT(nslots > 0); |
michael@0 | 605 | for (uint32_t first = nslots; first != 0; --first) { |
michael@0 | 606 | if (templateObj->getSlot(first - 1) != UndefinedValue()) |
michael@0 | 607 | return first; |
michael@0 | 608 | } |
michael@0 | 609 | return 0; |
michael@0 | 610 | } |
michael@0 | 611 | |
michael@0 | 612 | void |
michael@0 | 613 | MacroAssembler::initGCSlots(Register obj, Register temp, JSObject *templateObj) |
michael@0 | 614 | { |
michael@0 | 615 | // Slots of non-array objects are required to be initialized. |
michael@0 | 616 | // Use the values currently in the template object. |
michael@0 | 617 | uint32_t nslots = templateObj->lastProperty()->slotSpan(templateObj->getClass()); |
michael@0 | 618 | if (nslots == 0) |
michael@0 | 619 | return; |
michael@0 | 620 | |
michael@0 | 621 | // Attempt to group slot writes such that we minimize the amount of |
michael@0 | 622 | // duplicated data we need to embed in code and load into registers. In |
michael@0 | 623 | // general, most template object slots will be undefined except for any |
michael@0 | 624 | // reserved slots. Since reserved slots come first, we split the object |
michael@0 | 625 | // logically into independent non-UndefinedValue writes to the head and |
michael@0 | 626 | // duplicated writes of UndefinedValue to the tail. For the majority of |
michael@0 | 627 | // objects, the "tail" will be the entire slot range. |
michael@0 | 628 | uint32_t startOfUndefined = FindStartOfUndefinedSlots(templateObj, nslots); |
michael@0 | 629 | copySlotsFromTemplate(obj, temp, templateObj, 0, startOfUndefined); |
michael@0 | 630 | fillSlotsWithUndefined(obj, temp, templateObj, startOfUndefined, nslots); |
michael@0 | 631 | } |
michael@0 | 632 | |
michael@0 | 633 | void |
michael@0 | 634 | MacroAssembler::initGCThing(Register obj, Register temp, JSObject *templateObj) |
michael@0 | 635 | { |
michael@0 | 636 | // Fast initialization of an empty object returned by NewGCThing(). |
michael@0 | 637 | |
michael@0 | 638 | JS_ASSERT(!templateObj->hasDynamicElements()); |
michael@0 | 639 | |
michael@0 | 640 | storePtr(ImmGCPtr(templateObj->lastProperty()), Address(obj, JSObject::offsetOfShape())); |
michael@0 | 641 | storePtr(ImmGCPtr(templateObj->type()), Address(obj, JSObject::offsetOfType())); |
michael@0 | 642 | storePtr(ImmPtr(nullptr), Address(obj, JSObject::offsetOfSlots())); |
michael@0 | 643 | |
michael@0 | 644 | if (templateObj->is<ArrayObject>()) { |
michael@0 | 645 | JS_ASSERT(!templateObj->getDenseInitializedLength()); |
michael@0 | 646 | |
michael@0 | 647 | int elementsOffset = JSObject::offsetOfFixedElements(); |
michael@0 | 648 | |
michael@0 | 649 | computeEffectiveAddress(Address(obj, elementsOffset), temp); |
michael@0 | 650 | storePtr(temp, Address(obj, JSObject::offsetOfElements())); |
michael@0 | 651 | |
michael@0 | 652 | // Fill in the elements header. |
michael@0 | 653 | store32(Imm32(templateObj->getDenseCapacity()), |
michael@0 | 654 | Address(obj, elementsOffset + ObjectElements::offsetOfCapacity())); |
michael@0 | 655 | store32(Imm32(templateObj->getDenseInitializedLength()), |
michael@0 | 656 | Address(obj, elementsOffset + ObjectElements::offsetOfInitializedLength())); |
michael@0 | 657 | store32(Imm32(templateObj->as<ArrayObject>().length()), |
michael@0 | 658 | Address(obj, elementsOffset + ObjectElements::offsetOfLength())); |
michael@0 | 659 | store32(Imm32(templateObj->shouldConvertDoubleElements() |
michael@0 | 660 | ? ObjectElements::CONVERT_DOUBLE_ELEMENTS |
michael@0 | 661 | : 0), |
michael@0 | 662 | Address(obj, elementsOffset + ObjectElements::offsetOfFlags())); |
michael@0 | 663 | JS_ASSERT(!templateObj->hasPrivate()); |
michael@0 | 664 | } else { |
michael@0 | 665 | storePtr(ImmPtr(emptyObjectElements), Address(obj, JSObject::offsetOfElements())); |
michael@0 | 666 | |
michael@0 | 667 | initGCSlots(obj, temp, templateObj); |
michael@0 | 668 | |
michael@0 | 669 | if (templateObj->hasPrivate()) { |
michael@0 | 670 | uint32_t nfixed = templateObj->numFixedSlots(); |
michael@0 | 671 | storePtr(ImmPtr(templateObj->getPrivate()), |
michael@0 | 672 | Address(obj, JSObject::getPrivateDataOffset(nfixed))); |
michael@0 | 673 | } |
michael@0 | 674 | } |
michael@0 | 675 | } |
michael@0 | 676 | |
michael@0 | 677 | void |
michael@0 | 678 | MacroAssembler::compareStrings(JSOp op, Register left, Register right, Register result, |
michael@0 | 679 | Register temp, Label *fail) |
michael@0 | 680 | { |
michael@0 | 681 | JS_ASSERT(IsEqualityOp(op)); |
michael@0 | 682 | |
michael@0 | 683 | Label done; |
michael@0 | 684 | Label notPointerEqual; |
michael@0 | 685 | // Fast path for identical strings. |
michael@0 | 686 | branchPtr(Assembler::NotEqual, left, right, ¬PointerEqual); |
michael@0 | 687 | move32(Imm32(op == JSOP_EQ || op == JSOP_STRICTEQ), result); |
michael@0 | 688 | jump(&done); |
michael@0 | 689 | |
michael@0 | 690 | bind(¬PointerEqual); |
michael@0 | 691 | loadPtr(Address(left, JSString::offsetOfLengthAndFlags()), result); |
michael@0 | 692 | loadPtr(Address(right, JSString::offsetOfLengthAndFlags()), temp); |
michael@0 | 693 | |
michael@0 | 694 | Label notAtom; |
michael@0 | 695 | // Optimize the equality operation to a pointer compare for two atoms. |
michael@0 | 696 | Imm32 atomBit(JSString::ATOM_BIT); |
michael@0 | 697 | branchTest32(Assembler::Zero, result, atomBit, ¬Atom); |
michael@0 | 698 | branchTest32(Assembler::Zero, temp, atomBit, ¬Atom); |
michael@0 | 699 | |
michael@0 | 700 | cmpPtrSet(JSOpToCondition(MCompare::Compare_String, op), left, right, result); |
michael@0 | 701 | jump(&done); |
michael@0 | 702 | |
michael@0 | 703 | bind(¬Atom); |
michael@0 | 704 | // Strings of different length can never be equal. |
michael@0 | 705 | rshiftPtr(Imm32(JSString::LENGTH_SHIFT), result); |
michael@0 | 706 | rshiftPtr(Imm32(JSString::LENGTH_SHIFT), temp); |
michael@0 | 707 | branchPtr(Assembler::Equal, result, temp, fail); |
michael@0 | 708 | move32(Imm32(op == JSOP_NE || op == JSOP_STRICTNE), result); |
michael@0 | 709 | |
michael@0 | 710 | bind(&done); |
michael@0 | 711 | } |
michael@0 | 712 | |
michael@0 | 713 | void |
michael@0 | 714 | MacroAssembler::checkInterruptFlagPar(Register tempReg, Label *fail) |
michael@0 | 715 | { |
michael@0 | 716 | #ifdef JS_THREADSAFE |
michael@0 | 717 | movePtr(ImmPtr(GetIonContext()->runtime->addressOfInterruptPar()), tempReg); |
michael@0 | 718 | branch32(Assembler::NonZero, Address(tempReg, 0), Imm32(0), fail); |
michael@0 | 719 | #else |
michael@0 | 720 | MOZ_ASSUME_UNREACHABLE("JSRuntime::interruptPar doesn't exist on non-threadsafe builds."); |
michael@0 | 721 | #endif |
michael@0 | 722 | } |
michael@0 | 723 | |
michael@0 | 724 | static void |
michael@0 | 725 | ReportOverRecursed(JSContext *cx) |
michael@0 | 726 | { |
michael@0 | 727 | js_ReportOverRecursed(cx); |
michael@0 | 728 | } |
michael@0 | 729 | |
michael@0 | 730 | void |
michael@0 | 731 | MacroAssembler::generateBailoutTail(Register scratch, Register bailoutInfo) |
michael@0 | 732 | { |
michael@0 | 733 | enterExitFrame(); |
michael@0 | 734 | |
michael@0 | 735 | Label baseline; |
michael@0 | 736 | |
michael@0 | 737 | // The return value from Bailout is tagged as: |
michael@0 | 738 | // - 0x0: done (enter baseline) |
michael@0 | 739 | // - 0x1: error (handle exception) |
michael@0 | 740 | // - 0x2: overrecursed |
michael@0 | 741 | JS_STATIC_ASSERT(BAILOUT_RETURN_OK == 0); |
michael@0 | 742 | JS_STATIC_ASSERT(BAILOUT_RETURN_FATAL_ERROR == 1); |
michael@0 | 743 | JS_STATIC_ASSERT(BAILOUT_RETURN_OVERRECURSED == 2); |
michael@0 | 744 | |
michael@0 | 745 | branch32(Equal, ReturnReg, Imm32(BAILOUT_RETURN_OK), &baseline); |
michael@0 | 746 | branch32(Equal, ReturnReg, Imm32(BAILOUT_RETURN_FATAL_ERROR), exceptionLabel()); |
michael@0 | 747 | |
michael@0 | 748 | // Fall-through: overrecursed. |
michael@0 | 749 | { |
michael@0 | 750 | loadJSContext(ReturnReg); |
michael@0 | 751 | setupUnalignedABICall(1, scratch); |
michael@0 | 752 | passABIArg(ReturnReg); |
michael@0 | 753 | callWithABI(JS_FUNC_TO_DATA_PTR(void *, ReportOverRecursed)); |
michael@0 | 754 | jump(exceptionLabel()); |
michael@0 | 755 | } |
michael@0 | 756 | |
michael@0 | 757 | bind(&baseline); |
michael@0 | 758 | { |
michael@0 | 759 | // Prepare a register set for use in this case. |
michael@0 | 760 | GeneralRegisterSet regs(GeneralRegisterSet::All()); |
michael@0 | 761 | JS_ASSERT(!regs.has(BaselineStackReg)); |
michael@0 | 762 | regs.take(bailoutInfo); |
michael@0 | 763 | |
michael@0 | 764 | // Reset SP to the point where clobbering starts. |
michael@0 | 765 | loadPtr(Address(bailoutInfo, offsetof(BaselineBailoutInfo, incomingStack)), |
michael@0 | 766 | BaselineStackReg); |
michael@0 | 767 | |
michael@0 | 768 | Register copyCur = regs.takeAny(); |
michael@0 | 769 | Register copyEnd = regs.takeAny(); |
michael@0 | 770 | Register temp = regs.takeAny(); |
michael@0 | 771 | |
michael@0 | 772 | // Copy data onto stack. |
michael@0 | 773 | loadPtr(Address(bailoutInfo, offsetof(BaselineBailoutInfo, copyStackTop)), copyCur); |
michael@0 | 774 | loadPtr(Address(bailoutInfo, offsetof(BaselineBailoutInfo, copyStackBottom)), copyEnd); |
michael@0 | 775 | { |
michael@0 | 776 | Label copyLoop; |
michael@0 | 777 | Label endOfCopy; |
michael@0 | 778 | bind(©Loop); |
michael@0 | 779 | branchPtr(Assembler::BelowOrEqual, copyCur, copyEnd, &endOfCopy); |
michael@0 | 780 | subPtr(Imm32(4), copyCur); |
michael@0 | 781 | subPtr(Imm32(4), BaselineStackReg); |
michael@0 | 782 | load32(Address(copyCur, 0), temp); |
michael@0 | 783 | store32(temp, Address(BaselineStackReg, 0)); |
michael@0 | 784 | jump(©Loop); |
michael@0 | 785 | bind(&endOfCopy); |
michael@0 | 786 | } |
michael@0 | 787 | |
michael@0 | 788 | // Enter exit frame for the FinishBailoutToBaseline call. |
michael@0 | 789 | loadPtr(Address(bailoutInfo, offsetof(BaselineBailoutInfo, resumeFramePtr)), temp); |
michael@0 | 790 | load32(Address(temp, BaselineFrame::reverseOffsetOfFrameSize()), temp); |
michael@0 | 791 | makeFrameDescriptor(temp, JitFrame_BaselineJS); |
michael@0 | 792 | push(temp); |
michael@0 | 793 | push(Address(bailoutInfo, offsetof(BaselineBailoutInfo, resumeAddr))); |
michael@0 | 794 | enterFakeExitFrame(); |
michael@0 | 795 | |
michael@0 | 796 | // If monitorStub is non-null, handle resumeAddr appropriately. |
michael@0 | 797 | Label noMonitor; |
michael@0 | 798 | Label done; |
michael@0 | 799 | branchPtr(Assembler::Equal, |
michael@0 | 800 | Address(bailoutInfo, offsetof(BaselineBailoutInfo, monitorStub)), |
michael@0 | 801 | ImmPtr(nullptr), |
michael@0 | 802 | &noMonitor); |
michael@0 | 803 | |
michael@0 | 804 | // |
michael@0 | 805 | // Resuming into a monitoring stub chain. |
michael@0 | 806 | // |
michael@0 | 807 | { |
michael@0 | 808 | // Save needed values onto stack temporarily. |
michael@0 | 809 | pushValue(Address(bailoutInfo, offsetof(BaselineBailoutInfo, valueR0))); |
michael@0 | 810 | push(Address(bailoutInfo, offsetof(BaselineBailoutInfo, resumeFramePtr))); |
michael@0 | 811 | push(Address(bailoutInfo, offsetof(BaselineBailoutInfo, resumeAddr))); |
michael@0 | 812 | push(Address(bailoutInfo, offsetof(BaselineBailoutInfo, monitorStub))); |
michael@0 | 813 | |
michael@0 | 814 | // Call a stub to free allocated memory and create arguments objects. |
michael@0 | 815 | setupUnalignedABICall(1, temp); |
michael@0 | 816 | passABIArg(bailoutInfo); |
michael@0 | 817 | callWithABI(JS_FUNC_TO_DATA_PTR(void *, FinishBailoutToBaseline)); |
michael@0 | 818 | branchTest32(Zero, ReturnReg, ReturnReg, exceptionLabel()); |
michael@0 | 819 | |
michael@0 | 820 | // Restore values where they need to be and resume execution. |
michael@0 | 821 | GeneralRegisterSet enterMonRegs(GeneralRegisterSet::All()); |
michael@0 | 822 | enterMonRegs.take(R0); |
michael@0 | 823 | enterMonRegs.take(BaselineStubReg); |
michael@0 | 824 | enterMonRegs.take(BaselineFrameReg); |
michael@0 | 825 | enterMonRegs.takeUnchecked(BaselineTailCallReg); |
michael@0 | 826 | |
michael@0 | 827 | pop(BaselineStubReg); |
michael@0 | 828 | pop(BaselineTailCallReg); |
michael@0 | 829 | pop(BaselineFrameReg); |
michael@0 | 830 | popValue(R0); |
michael@0 | 831 | |
michael@0 | 832 | // Discard exit frame. |
michael@0 | 833 | addPtr(Imm32(IonExitFrameLayout::SizeWithFooter()), StackPointer); |
michael@0 | 834 | |
michael@0 | 835 | #if defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64) |
michael@0 | 836 | push(BaselineTailCallReg); |
michael@0 | 837 | #endif |
michael@0 | 838 | jump(Address(BaselineStubReg, ICStub::offsetOfStubCode())); |
michael@0 | 839 | } |
michael@0 | 840 | |
michael@0 | 841 | // |
michael@0 | 842 | // Resuming into main jitcode. |
michael@0 | 843 | // |
michael@0 | 844 | bind(&noMonitor); |
michael@0 | 845 | { |
michael@0 | 846 | // Save needed values onto stack temporarily. |
michael@0 | 847 | pushValue(Address(bailoutInfo, offsetof(BaselineBailoutInfo, valueR0))); |
michael@0 | 848 | pushValue(Address(bailoutInfo, offsetof(BaselineBailoutInfo, valueR1))); |
michael@0 | 849 | push(Address(bailoutInfo, offsetof(BaselineBailoutInfo, resumeFramePtr))); |
michael@0 | 850 | push(Address(bailoutInfo, offsetof(BaselineBailoutInfo, resumeAddr))); |
michael@0 | 851 | |
michael@0 | 852 | // Call a stub to free allocated memory and create arguments objects. |
michael@0 | 853 | setupUnalignedABICall(1, temp); |
michael@0 | 854 | passABIArg(bailoutInfo); |
michael@0 | 855 | callWithABI(JS_FUNC_TO_DATA_PTR(void *, FinishBailoutToBaseline)); |
michael@0 | 856 | branchTest32(Zero, ReturnReg, ReturnReg, exceptionLabel()); |
michael@0 | 857 | |
michael@0 | 858 | // Restore values where they need to be and resume execution. |
michael@0 | 859 | GeneralRegisterSet enterRegs(GeneralRegisterSet::All()); |
michael@0 | 860 | enterRegs.take(R0); |
michael@0 | 861 | enterRegs.take(R1); |
michael@0 | 862 | enterRegs.take(BaselineFrameReg); |
michael@0 | 863 | Register jitcodeReg = enterRegs.takeAny(); |
michael@0 | 864 | |
michael@0 | 865 | pop(jitcodeReg); |
michael@0 | 866 | pop(BaselineFrameReg); |
michael@0 | 867 | popValue(R1); |
michael@0 | 868 | popValue(R0); |
michael@0 | 869 | |
michael@0 | 870 | // Discard exit frame. |
michael@0 | 871 | addPtr(Imm32(IonExitFrameLayout::SizeWithFooter()), StackPointer); |
michael@0 | 872 | |
michael@0 | 873 | jump(jitcodeReg); |
michael@0 | 874 | } |
michael@0 | 875 | } |
michael@0 | 876 | } |
michael@0 | 877 | |
michael@0 | 878 | void |
michael@0 | 879 | MacroAssembler::loadBaselineOrIonRaw(Register script, Register dest, ExecutionMode mode, |
michael@0 | 880 | Label *failure) |
michael@0 | 881 | { |
michael@0 | 882 | if (mode == SequentialExecution) { |
michael@0 | 883 | loadPtr(Address(script, JSScript::offsetOfBaselineOrIonRaw()), dest); |
michael@0 | 884 | if (failure) |
michael@0 | 885 | branchTestPtr(Assembler::Zero, dest, dest, failure); |
michael@0 | 886 | } else { |
michael@0 | 887 | loadPtr(Address(script, JSScript::offsetOfParallelIonScript()), dest); |
michael@0 | 888 | if (failure) |
michael@0 | 889 | branchPtr(Assembler::BelowOrEqual, dest, ImmPtr(ION_COMPILING_SCRIPT), failure); |
michael@0 | 890 | loadPtr(Address(dest, IonScript::offsetOfMethod()), dest); |
michael@0 | 891 | loadPtr(Address(dest, JitCode::offsetOfCode()), dest); |
michael@0 | 892 | } |
michael@0 | 893 | } |
michael@0 | 894 | |
michael@0 | 895 | void |
michael@0 | 896 | MacroAssembler::loadBaselineOrIonNoArgCheck(Register script, Register dest, ExecutionMode mode, |
michael@0 | 897 | Label *failure) |
michael@0 | 898 | { |
michael@0 | 899 | if (mode == SequentialExecution) { |
michael@0 | 900 | loadPtr(Address(script, JSScript::offsetOfBaselineOrIonSkipArgCheck()), dest); |
michael@0 | 901 | if (failure) |
michael@0 | 902 | branchTestPtr(Assembler::Zero, dest, dest, failure); |
michael@0 | 903 | } else { |
michael@0 | 904 | // Find second register to get the offset to skip argument check |
michael@0 | 905 | Register offset = script; |
michael@0 | 906 | if (script == dest) { |
michael@0 | 907 | GeneralRegisterSet regs(GeneralRegisterSet::All()); |
michael@0 | 908 | regs.take(dest); |
michael@0 | 909 | offset = regs.takeAny(); |
michael@0 | 910 | } |
michael@0 | 911 | |
michael@0 | 912 | loadPtr(Address(script, JSScript::offsetOfParallelIonScript()), dest); |
michael@0 | 913 | if (failure) |
michael@0 | 914 | branchPtr(Assembler::BelowOrEqual, dest, ImmPtr(ION_COMPILING_SCRIPT), failure); |
michael@0 | 915 | |
michael@0 | 916 | Push(offset); |
michael@0 | 917 | load32(Address(script, IonScript::offsetOfSkipArgCheckEntryOffset()), offset); |
michael@0 | 918 | |
michael@0 | 919 | loadPtr(Address(dest, IonScript::offsetOfMethod()), dest); |
michael@0 | 920 | loadPtr(Address(dest, JitCode::offsetOfCode()), dest); |
michael@0 | 921 | addPtr(offset, dest); |
michael@0 | 922 | |
michael@0 | 923 | Pop(offset); |
michael@0 | 924 | } |
michael@0 | 925 | } |
michael@0 | 926 | |
michael@0 | 927 | void |
michael@0 | 928 | MacroAssembler::loadBaselineFramePtr(Register framePtr, Register dest) |
michael@0 | 929 | { |
michael@0 | 930 | if (framePtr != dest) |
michael@0 | 931 | movePtr(framePtr, dest); |
michael@0 | 932 | subPtr(Imm32(BaselineFrame::Size()), dest); |
michael@0 | 933 | } |
michael@0 | 934 | |
michael@0 | 935 | void |
michael@0 | 936 | MacroAssembler::loadForkJoinContext(Register cx, Register scratch) |
michael@0 | 937 | { |
michael@0 | 938 | // Load the current ForkJoinContext *. If we need a parallel exit frame, |
michael@0 | 939 | // chances are we are about to do something very slow anyways, so just |
michael@0 | 940 | // call ForkJoinContextPar again instead of using the cached version. |
michael@0 | 941 | setupUnalignedABICall(0, scratch); |
michael@0 | 942 | callWithABI(JS_FUNC_TO_DATA_PTR(void *, ForkJoinContextPar)); |
michael@0 | 943 | if (ReturnReg != cx) |
michael@0 | 944 | movePtr(ReturnReg, cx); |
michael@0 | 945 | } |
michael@0 | 946 | |
michael@0 | 947 | void |
michael@0 | 948 | MacroAssembler::loadContext(Register cxReg, Register scratch, ExecutionMode executionMode) |
michael@0 | 949 | { |
michael@0 | 950 | switch (executionMode) { |
michael@0 | 951 | case SequentialExecution: |
michael@0 | 952 | // The scratch register is not used for sequential execution. |
michael@0 | 953 | loadJSContext(cxReg); |
michael@0 | 954 | break; |
michael@0 | 955 | case ParallelExecution: |
michael@0 | 956 | loadForkJoinContext(cxReg, scratch); |
michael@0 | 957 | break; |
michael@0 | 958 | default: |
michael@0 | 959 | MOZ_ASSUME_UNREACHABLE("No such execution mode"); |
michael@0 | 960 | } |
michael@0 | 961 | } |
michael@0 | 962 | |
michael@0 | 963 | void |
michael@0 | 964 | MacroAssembler::enterParallelExitFrameAndLoadContext(const VMFunction *f, Register cx, |
michael@0 | 965 | Register scratch) |
michael@0 | 966 | { |
michael@0 | 967 | loadForkJoinContext(cx, scratch); |
michael@0 | 968 | // Load the PerThreadData from from the cx. |
michael@0 | 969 | loadPtr(Address(cx, offsetof(ForkJoinContext, perThreadData)), scratch); |
michael@0 | 970 | linkParallelExitFrame(scratch); |
michael@0 | 971 | // Push the ioncode. |
michael@0 | 972 | exitCodePatch_ = PushWithPatch(ImmWord(-1)); |
michael@0 | 973 | // Push the VMFunction pointer, to mark arguments. |
michael@0 | 974 | Push(ImmPtr(f)); |
michael@0 | 975 | } |
michael@0 | 976 | |
michael@0 | 977 | void |
michael@0 | 978 | MacroAssembler::enterFakeParallelExitFrame(Register cx, Register scratch, |
michael@0 | 979 | JitCode *codeVal) |
michael@0 | 980 | { |
michael@0 | 981 | // Load the PerThreadData from from the cx. |
michael@0 | 982 | loadPtr(Address(cx, offsetof(ForkJoinContext, perThreadData)), scratch); |
michael@0 | 983 | linkParallelExitFrame(scratch); |
michael@0 | 984 | Push(ImmPtr(codeVal)); |
michael@0 | 985 | Push(ImmPtr(nullptr)); |
michael@0 | 986 | } |
michael@0 | 987 | |
michael@0 | 988 | void |
michael@0 | 989 | MacroAssembler::enterExitFrameAndLoadContext(const VMFunction *f, Register cxReg, Register scratch, |
michael@0 | 990 | ExecutionMode executionMode) |
michael@0 | 991 | { |
michael@0 | 992 | switch (executionMode) { |
michael@0 | 993 | case SequentialExecution: |
michael@0 | 994 | // The scratch register is not used for sequential execution. |
michael@0 | 995 | enterExitFrame(f); |
michael@0 | 996 | loadJSContext(cxReg); |
michael@0 | 997 | break; |
michael@0 | 998 | case ParallelExecution: |
michael@0 | 999 | enterParallelExitFrameAndLoadContext(f, cxReg, scratch); |
michael@0 | 1000 | break; |
michael@0 | 1001 | default: |
michael@0 | 1002 | MOZ_ASSUME_UNREACHABLE("No such execution mode"); |
michael@0 | 1003 | } |
michael@0 | 1004 | } |
michael@0 | 1005 | |
michael@0 | 1006 | void |
michael@0 | 1007 | MacroAssembler::enterFakeExitFrame(Register cxReg, Register scratch, |
michael@0 | 1008 | ExecutionMode executionMode, |
michael@0 | 1009 | JitCode *codeVal) |
michael@0 | 1010 | { |
michael@0 | 1011 | switch (executionMode) { |
michael@0 | 1012 | case SequentialExecution: |
michael@0 | 1013 | // The cx and scratch registers are not used for sequential execution. |
michael@0 | 1014 | enterFakeExitFrame(codeVal); |
michael@0 | 1015 | break; |
michael@0 | 1016 | case ParallelExecution: |
michael@0 | 1017 | enterFakeParallelExitFrame(cxReg, scratch, codeVal); |
michael@0 | 1018 | break; |
michael@0 | 1019 | default: |
michael@0 | 1020 | MOZ_ASSUME_UNREACHABLE("No such execution mode"); |
michael@0 | 1021 | } |
michael@0 | 1022 | } |
michael@0 | 1023 | |
michael@0 | 1024 | void |
michael@0 | 1025 | MacroAssembler::handleFailure(ExecutionMode executionMode) |
michael@0 | 1026 | { |
michael@0 | 1027 | // Re-entry code is irrelevant because the exception will leave the |
michael@0 | 1028 | // running function and never come back |
michael@0 | 1029 | if (sps_) |
michael@0 | 1030 | sps_->skipNextReenter(); |
michael@0 | 1031 | leaveSPSFrame(); |
michael@0 | 1032 | |
michael@0 | 1033 | void *handler; |
michael@0 | 1034 | switch (executionMode) { |
michael@0 | 1035 | case SequentialExecution: |
michael@0 | 1036 | handler = JS_FUNC_TO_DATA_PTR(void *, jit::HandleException); |
michael@0 | 1037 | break; |
michael@0 | 1038 | case ParallelExecution: |
michael@0 | 1039 | handler = JS_FUNC_TO_DATA_PTR(void *, jit::HandleParallelFailure); |
michael@0 | 1040 | break; |
michael@0 | 1041 | default: |
michael@0 | 1042 | MOZ_ASSUME_UNREACHABLE("No such execution mode"); |
michael@0 | 1043 | } |
michael@0 | 1044 | MacroAssemblerSpecific::handleFailureWithHandler(handler); |
michael@0 | 1045 | |
michael@0 | 1046 | // Doesn't actually emit code, but balances the leave() |
michael@0 | 1047 | if (sps_) |
michael@0 | 1048 | sps_->reenter(*this, InvalidReg); |
michael@0 | 1049 | } |
michael@0 | 1050 | |
michael@0 | 1051 | #ifdef DEBUG |
michael@0 | 1052 | static inline bool |
michael@0 | 1053 | IsCompilingAsmJS() |
michael@0 | 1054 | { |
michael@0 | 1055 | // asm.js compilation pushes an IonContext with a null JSCompartment. |
michael@0 | 1056 | IonContext *ictx = MaybeGetIonContext(); |
michael@0 | 1057 | return ictx && ictx->compartment == nullptr; |
michael@0 | 1058 | } |
michael@0 | 1059 | |
michael@0 | 1060 | static void |
michael@0 | 1061 | AssumeUnreachable_(const char *output) { |
michael@0 | 1062 | MOZ_ReportAssertionFailure(output, __FILE__, __LINE__); |
michael@0 | 1063 | } |
michael@0 | 1064 | #endif |
michael@0 | 1065 | |
michael@0 | 1066 | void |
michael@0 | 1067 | MacroAssembler::assumeUnreachable(const char *output) |
michael@0 | 1068 | { |
michael@0 | 1069 | #ifdef DEBUG |
michael@0 | 1070 | RegisterSet regs = RegisterSet::Volatile(); |
michael@0 | 1071 | PushRegsInMask(regs); |
michael@0 | 1072 | Register temp = regs.takeGeneral(); |
michael@0 | 1073 | |
michael@0 | 1074 | // With ASLR, we can't rely on 'output' to point to the |
michael@0 | 1075 | // same char array after serialization/deserialization. |
michael@0 | 1076 | // It is not possible until we modify AsmJsImmPtr and |
michael@0 | 1077 | // the underlying "patching" mechanism. |
michael@0 | 1078 | if (IsCompilingAsmJS()) { |
michael@0 | 1079 | setupUnalignedABICall(0, temp); |
michael@0 | 1080 | callWithABINoProfiling(AsmJSImm_AssumeUnreachable); |
michael@0 | 1081 | } else { |
michael@0 | 1082 | setupUnalignedABICall(1, temp); |
michael@0 | 1083 | movePtr(ImmPtr(output), temp); |
michael@0 | 1084 | passABIArg(temp); |
michael@0 | 1085 | callWithABINoProfiling(JS_FUNC_TO_DATA_PTR(void *, AssumeUnreachable_)); |
michael@0 | 1086 | } |
michael@0 | 1087 | PopRegsInMask(RegisterSet::Volatile()); |
michael@0 | 1088 | #endif |
michael@0 | 1089 | |
michael@0 | 1090 | breakpoint(); |
michael@0 | 1091 | } |
michael@0 | 1092 | |
michael@0 | 1093 | static void |
michael@0 | 1094 | Printf0_(const char *output) { |
michael@0 | 1095 | printf("%s", output); |
michael@0 | 1096 | } |
michael@0 | 1097 | |
michael@0 | 1098 | void |
michael@0 | 1099 | MacroAssembler::printf(const char *output) |
michael@0 | 1100 | { |
michael@0 | 1101 | RegisterSet regs = RegisterSet::Volatile(); |
michael@0 | 1102 | PushRegsInMask(regs); |
michael@0 | 1103 | |
michael@0 | 1104 | Register temp = regs.takeGeneral(); |
michael@0 | 1105 | |
michael@0 | 1106 | setupUnalignedABICall(1, temp); |
michael@0 | 1107 | movePtr(ImmPtr(output), temp); |
michael@0 | 1108 | passABIArg(temp); |
michael@0 | 1109 | callWithABINoProfiling(JS_FUNC_TO_DATA_PTR(void *, Printf0_)); |
michael@0 | 1110 | |
michael@0 | 1111 | PopRegsInMask(RegisterSet::Volatile()); |
michael@0 | 1112 | } |
michael@0 | 1113 | |
michael@0 | 1114 | static void |
michael@0 | 1115 | Printf1_(const char *output, uintptr_t value) { |
michael@0 | 1116 | char *line = JS_sprintf_append(nullptr, output, value); |
michael@0 | 1117 | printf("%s", line); |
michael@0 | 1118 | js_free(line); |
michael@0 | 1119 | } |
michael@0 | 1120 | |
michael@0 | 1121 | void |
michael@0 | 1122 | MacroAssembler::printf(const char *output, Register value) |
michael@0 | 1123 | { |
michael@0 | 1124 | RegisterSet regs = RegisterSet::Volatile(); |
michael@0 | 1125 | PushRegsInMask(regs); |
michael@0 | 1126 | |
michael@0 | 1127 | regs.takeUnchecked(value); |
michael@0 | 1128 | |
michael@0 | 1129 | Register temp = regs.takeGeneral(); |
michael@0 | 1130 | |
michael@0 | 1131 | setupUnalignedABICall(2, temp); |
michael@0 | 1132 | movePtr(ImmPtr(output), temp); |
michael@0 | 1133 | passABIArg(temp); |
michael@0 | 1134 | passABIArg(value); |
michael@0 | 1135 | callWithABINoProfiling(JS_FUNC_TO_DATA_PTR(void *, Printf1_)); |
michael@0 | 1136 | |
michael@0 | 1137 | PopRegsInMask(RegisterSet::Volatile()); |
michael@0 | 1138 | } |
michael@0 | 1139 | |
michael@0 | 1140 | #ifdef JS_TRACE_LOGGING |
michael@0 | 1141 | void |
michael@0 | 1142 | MacroAssembler::tracelogStart(Register logger, uint32_t textId) |
michael@0 | 1143 | { |
michael@0 | 1144 | void (&TraceLogFunc)(TraceLogger*, uint32_t) = TraceLogStartEvent; |
michael@0 | 1145 | |
michael@0 | 1146 | PushRegsInMask(RegisterSet::Volatile()); |
michael@0 | 1147 | |
michael@0 | 1148 | RegisterSet regs = RegisterSet::Volatile(); |
michael@0 | 1149 | regs.takeUnchecked(logger); |
michael@0 | 1150 | |
michael@0 | 1151 | Register temp = regs.takeGeneral(); |
michael@0 | 1152 | |
michael@0 | 1153 | setupUnalignedABICall(2, temp); |
michael@0 | 1154 | passABIArg(logger); |
michael@0 | 1155 | move32(Imm32(textId), temp); |
michael@0 | 1156 | passABIArg(temp); |
michael@0 | 1157 | callWithABINoProfiling(JS_FUNC_TO_DATA_PTR(void *, TraceLogFunc)); |
michael@0 | 1158 | |
michael@0 | 1159 | PopRegsInMask(RegisterSet::Volatile()); |
michael@0 | 1160 | } |
michael@0 | 1161 | |
michael@0 | 1162 | void |
michael@0 | 1163 | MacroAssembler::tracelogStart(Register logger, Register textId) |
michael@0 | 1164 | { |
michael@0 | 1165 | void (&TraceLogFunc)(TraceLogger*, uint32_t) = TraceLogStartEvent; |
michael@0 | 1166 | |
michael@0 | 1167 | PushRegsInMask(RegisterSet::Volatile()); |
michael@0 | 1168 | |
michael@0 | 1169 | RegisterSet regs = RegisterSet::Volatile(); |
michael@0 | 1170 | regs.takeUnchecked(logger); |
michael@0 | 1171 | regs.takeUnchecked(textId); |
michael@0 | 1172 | |
michael@0 | 1173 | Register temp = regs.takeGeneral(); |
michael@0 | 1174 | |
michael@0 | 1175 | setupUnalignedABICall(2, temp); |
michael@0 | 1176 | passABIArg(logger); |
michael@0 | 1177 | passABIArg(textId); |
michael@0 | 1178 | callWithABINoProfiling(JS_FUNC_TO_DATA_PTR(void *, TraceLogFunc)); |
michael@0 | 1179 | |
michael@0 | 1180 | regs.add(temp); |
michael@0 | 1181 | |
michael@0 | 1182 | PopRegsInMask(RegisterSet::Volatile()); |
michael@0 | 1183 | } |
michael@0 | 1184 | |
michael@0 | 1185 | void |
michael@0 | 1186 | MacroAssembler::tracelogStop(Register logger, uint32_t textId) |
michael@0 | 1187 | { |
michael@0 | 1188 | void (&TraceLogFunc)(TraceLogger*, uint32_t) = TraceLogStopEvent; |
michael@0 | 1189 | |
michael@0 | 1190 | PushRegsInMask(RegisterSet::Volatile()); |
michael@0 | 1191 | |
michael@0 | 1192 | RegisterSet regs = RegisterSet::Volatile(); |
michael@0 | 1193 | regs.takeUnchecked(logger); |
michael@0 | 1194 | |
michael@0 | 1195 | Register temp = regs.takeGeneral(); |
michael@0 | 1196 | |
michael@0 | 1197 | setupUnalignedABICall(2, temp); |
michael@0 | 1198 | passABIArg(logger); |
michael@0 | 1199 | move32(Imm32(textId), temp); |
michael@0 | 1200 | passABIArg(temp); |
michael@0 | 1201 | callWithABINoProfiling(JS_FUNC_TO_DATA_PTR(void *, TraceLogFunc)); |
michael@0 | 1202 | |
michael@0 | 1203 | regs.add(temp); |
michael@0 | 1204 | |
michael@0 | 1205 | PopRegsInMask(RegisterSet::Volatile()); |
michael@0 | 1206 | } |
michael@0 | 1207 | |
michael@0 | 1208 | void |
michael@0 | 1209 | MacroAssembler::tracelogStop(Register logger, Register textId) |
michael@0 | 1210 | { |
michael@0 | 1211 | #ifdef DEBUG |
michael@0 | 1212 | void (&TraceLogFunc)(TraceLogger*, uint32_t) = TraceLogStopEvent; |
michael@0 | 1213 | |
michael@0 | 1214 | PushRegsInMask(RegisterSet::Volatile()); |
michael@0 | 1215 | |
michael@0 | 1216 | RegisterSet regs = RegisterSet::Volatile(); |
michael@0 | 1217 | regs.takeUnchecked(logger); |
michael@0 | 1218 | regs.takeUnchecked(textId); |
michael@0 | 1219 | |
michael@0 | 1220 | Register temp = regs.takeGeneral(); |
michael@0 | 1221 | |
michael@0 | 1222 | setupUnalignedABICall(2, temp); |
michael@0 | 1223 | passABIArg(logger); |
michael@0 | 1224 | passABIArg(textId); |
michael@0 | 1225 | callWithABINoProfiling(JS_FUNC_TO_DATA_PTR(void *, TraceLogFunc)); |
michael@0 | 1226 | |
michael@0 | 1227 | regs.add(temp); |
michael@0 | 1228 | |
michael@0 | 1229 | PopRegsInMask(RegisterSet::Volatile()); |
michael@0 | 1230 | #else |
michael@0 | 1231 | tracelogStop(logger); |
michael@0 | 1232 | #endif |
michael@0 | 1233 | } |
michael@0 | 1234 | |
michael@0 | 1235 | void |
michael@0 | 1236 | MacroAssembler::tracelogStop(Register logger) |
michael@0 | 1237 | { |
michael@0 | 1238 | void (&TraceLogFunc)(TraceLogger*) = TraceLogStopEvent; |
michael@0 | 1239 | |
michael@0 | 1240 | PushRegsInMask(RegisterSet::Volatile()); |
michael@0 | 1241 | |
michael@0 | 1242 | RegisterSet regs = RegisterSet::Volatile(); |
michael@0 | 1243 | regs.takeUnchecked(logger); |
michael@0 | 1244 | |
michael@0 | 1245 | Register temp = regs.takeGeneral(); |
michael@0 | 1246 | |
michael@0 | 1247 | setupUnalignedABICall(1, temp); |
michael@0 | 1248 | passABIArg(logger); |
michael@0 | 1249 | callWithABINoProfiling(JS_FUNC_TO_DATA_PTR(void *, TraceLogFunc)); |
michael@0 | 1250 | |
michael@0 | 1251 | regs.add(temp); |
michael@0 | 1252 | |
michael@0 | 1253 | PopRegsInMask(RegisterSet::Volatile()); |
michael@0 | 1254 | } |
michael@0 | 1255 | #endif |
michael@0 | 1256 | |
michael@0 | 1257 | void |
michael@0 | 1258 | MacroAssembler::convertInt32ValueToDouble(const Address &address, Register scratch, Label *done) |
michael@0 | 1259 | { |
michael@0 | 1260 | branchTestInt32(Assembler::NotEqual, address, done); |
michael@0 | 1261 | unboxInt32(address, scratch); |
michael@0 | 1262 | convertInt32ToDouble(scratch, ScratchFloatReg); |
michael@0 | 1263 | storeDouble(ScratchFloatReg, address); |
michael@0 | 1264 | } |
michael@0 | 1265 | |
michael@0 | 1266 | void |
michael@0 | 1267 | MacroAssembler::convertValueToFloatingPoint(ValueOperand value, FloatRegister output, |
michael@0 | 1268 | Label *fail, MIRType outputType) |
michael@0 | 1269 | { |
michael@0 | 1270 | Register tag = splitTagForTest(value); |
michael@0 | 1271 | |
michael@0 | 1272 | Label isDouble, isInt32, isBool, isNull, done; |
michael@0 | 1273 | |
michael@0 | 1274 | branchTestDouble(Assembler::Equal, tag, &isDouble); |
michael@0 | 1275 | branchTestInt32(Assembler::Equal, tag, &isInt32); |
michael@0 | 1276 | branchTestBoolean(Assembler::Equal, tag, &isBool); |
michael@0 | 1277 | branchTestNull(Assembler::Equal, tag, &isNull); |
michael@0 | 1278 | branchTestUndefined(Assembler::NotEqual, tag, fail); |
michael@0 | 1279 | |
michael@0 | 1280 | // fall-through: undefined |
michael@0 | 1281 | loadConstantFloatingPoint(GenericNaN(), float(GenericNaN()), output, outputType); |
michael@0 | 1282 | jump(&done); |
michael@0 | 1283 | |
michael@0 | 1284 | bind(&isNull); |
michael@0 | 1285 | loadConstantFloatingPoint(0.0, 0.0f, output, outputType); |
michael@0 | 1286 | jump(&done); |
michael@0 | 1287 | |
michael@0 | 1288 | bind(&isBool); |
michael@0 | 1289 | boolValueToFloatingPoint(value, output, outputType); |
michael@0 | 1290 | jump(&done); |
michael@0 | 1291 | |
michael@0 | 1292 | bind(&isInt32); |
michael@0 | 1293 | int32ValueToFloatingPoint(value, output, outputType); |
michael@0 | 1294 | jump(&done); |
michael@0 | 1295 | |
michael@0 | 1296 | bind(&isDouble); |
michael@0 | 1297 | unboxDouble(value, output); |
michael@0 | 1298 | if (outputType == MIRType_Float32) |
michael@0 | 1299 | convertDoubleToFloat32(output, output); |
michael@0 | 1300 | bind(&done); |
michael@0 | 1301 | } |
michael@0 | 1302 | |
michael@0 | 1303 | bool |
michael@0 | 1304 | MacroAssembler::convertValueToFloatingPoint(JSContext *cx, const Value &v, FloatRegister output, |
michael@0 | 1305 | Label *fail, MIRType outputType) |
michael@0 | 1306 | { |
michael@0 | 1307 | if (v.isNumber() || v.isString()) { |
michael@0 | 1308 | double d; |
michael@0 | 1309 | if (v.isNumber()) |
michael@0 | 1310 | d = v.toNumber(); |
michael@0 | 1311 | else if (!StringToNumber(cx, v.toString(), &d)) |
michael@0 | 1312 | return false; |
michael@0 | 1313 | |
michael@0 | 1314 | loadConstantFloatingPoint(d, (float)d, output, outputType); |
michael@0 | 1315 | return true; |
michael@0 | 1316 | } |
michael@0 | 1317 | |
michael@0 | 1318 | if (v.isBoolean()) { |
michael@0 | 1319 | if (v.toBoolean()) |
michael@0 | 1320 | loadConstantFloatingPoint(1.0, 1.0f, output, outputType); |
michael@0 | 1321 | else |
michael@0 | 1322 | loadConstantFloatingPoint(0.0, 0.0f, output, outputType); |
michael@0 | 1323 | return true; |
michael@0 | 1324 | } |
michael@0 | 1325 | |
michael@0 | 1326 | if (v.isNull()) { |
michael@0 | 1327 | loadConstantFloatingPoint(0.0, 0.0f, output, outputType); |
michael@0 | 1328 | return true; |
michael@0 | 1329 | } |
michael@0 | 1330 | |
michael@0 | 1331 | if (v.isUndefined()) { |
michael@0 | 1332 | loadConstantFloatingPoint(GenericNaN(), float(GenericNaN()), output, outputType); |
michael@0 | 1333 | return true; |
michael@0 | 1334 | } |
michael@0 | 1335 | |
michael@0 | 1336 | JS_ASSERT(v.isObject()); |
michael@0 | 1337 | jump(fail); |
michael@0 | 1338 | return true; |
michael@0 | 1339 | } |
michael@0 | 1340 | |
michael@0 | 1341 | void |
michael@0 | 1342 | MacroAssembler::PushEmptyRooted(VMFunction::RootType rootType) |
michael@0 | 1343 | { |
michael@0 | 1344 | switch (rootType) { |
michael@0 | 1345 | case VMFunction::RootNone: |
michael@0 | 1346 | MOZ_ASSUME_UNREACHABLE("Handle must have root type"); |
michael@0 | 1347 | case VMFunction::RootObject: |
michael@0 | 1348 | case VMFunction::RootString: |
michael@0 | 1349 | case VMFunction::RootPropertyName: |
michael@0 | 1350 | case VMFunction::RootFunction: |
michael@0 | 1351 | case VMFunction::RootCell: |
michael@0 | 1352 | Push(ImmPtr(nullptr)); |
michael@0 | 1353 | break; |
michael@0 | 1354 | case VMFunction::RootValue: |
michael@0 | 1355 | Push(UndefinedValue()); |
michael@0 | 1356 | break; |
michael@0 | 1357 | } |
michael@0 | 1358 | } |
michael@0 | 1359 | |
michael@0 | 1360 | void |
michael@0 | 1361 | MacroAssembler::popRooted(VMFunction::RootType rootType, Register cellReg, |
michael@0 | 1362 | const ValueOperand &valueReg) |
michael@0 | 1363 | { |
michael@0 | 1364 | switch (rootType) { |
michael@0 | 1365 | case VMFunction::RootNone: |
michael@0 | 1366 | MOZ_ASSUME_UNREACHABLE("Handle must have root type"); |
michael@0 | 1367 | case VMFunction::RootObject: |
michael@0 | 1368 | case VMFunction::RootString: |
michael@0 | 1369 | case VMFunction::RootPropertyName: |
michael@0 | 1370 | case VMFunction::RootFunction: |
michael@0 | 1371 | case VMFunction::RootCell: |
michael@0 | 1372 | Pop(cellReg); |
michael@0 | 1373 | break; |
michael@0 | 1374 | case VMFunction::RootValue: |
michael@0 | 1375 | Pop(valueReg); |
michael@0 | 1376 | break; |
michael@0 | 1377 | } |
michael@0 | 1378 | } |
michael@0 | 1379 | |
michael@0 | 1380 | bool |
michael@0 | 1381 | MacroAssembler::convertConstantOrRegisterToFloatingPoint(JSContext *cx, ConstantOrRegister src, |
michael@0 | 1382 | FloatRegister output, Label *fail, |
michael@0 | 1383 | MIRType outputType) |
michael@0 | 1384 | { |
michael@0 | 1385 | if (src.constant()) |
michael@0 | 1386 | return convertValueToFloatingPoint(cx, src.value(), output, fail, outputType); |
michael@0 | 1387 | |
michael@0 | 1388 | convertTypedOrValueToFloatingPoint(src.reg(), output, fail, outputType); |
michael@0 | 1389 | return true; |
michael@0 | 1390 | } |
michael@0 | 1391 | |
michael@0 | 1392 | void |
michael@0 | 1393 | MacroAssembler::convertTypedOrValueToFloatingPoint(TypedOrValueRegister src, FloatRegister output, |
michael@0 | 1394 | Label *fail, MIRType outputType) |
michael@0 | 1395 | { |
michael@0 | 1396 | JS_ASSERT(IsFloatingPointType(outputType)); |
michael@0 | 1397 | |
michael@0 | 1398 | if (src.hasValue()) { |
michael@0 | 1399 | convertValueToFloatingPoint(src.valueReg(), output, fail, outputType); |
michael@0 | 1400 | return; |
michael@0 | 1401 | } |
michael@0 | 1402 | |
michael@0 | 1403 | bool outputIsDouble = outputType == MIRType_Double; |
michael@0 | 1404 | switch (src.type()) { |
michael@0 | 1405 | case MIRType_Null: |
michael@0 | 1406 | loadConstantFloatingPoint(0.0, 0.0f, output, outputType); |
michael@0 | 1407 | break; |
michael@0 | 1408 | case MIRType_Boolean: |
michael@0 | 1409 | case MIRType_Int32: |
michael@0 | 1410 | convertInt32ToFloatingPoint(src.typedReg().gpr(), output, outputType); |
michael@0 | 1411 | break; |
michael@0 | 1412 | case MIRType_Float32: |
michael@0 | 1413 | if (outputIsDouble) { |
michael@0 | 1414 | convertFloat32ToDouble(src.typedReg().fpu(), output); |
michael@0 | 1415 | } else { |
michael@0 | 1416 | if (src.typedReg().fpu() != output) |
michael@0 | 1417 | moveFloat32(src.typedReg().fpu(), output); |
michael@0 | 1418 | } |
michael@0 | 1419 | break; |
michael@0 | 1420 | case MIRType_Double: |
michael@0 | 1421 | if (outputIsDouble) { |
michael@0 | 1422 | if (src.typedReg().fpu() != output) |
michael@0 | 1423 | moveDouble(src.typedReg().fpu(), output); |
michael@0 | 1424 | } else { |
michael@0 | 1425 | convertDoubleToFloat32(src.typedReg().fpu(), output); |
michael@0 | 1426 | } |
michael@0 | 1427 | break; |
michael@0 | 1428 | case MIRType_Object: |
michael@0 | 1429 | case MIRType_String: |
michael@0 | 1430 | jump(fail); |
michael@0 | 1431 | break; |
michael@0 | 1432 | case MIRType_Undefined: |
michael@0 | 1433 | loadConstantFloatingPoint(GenericNaN(), float(GenericNaN()), output, outputType); |
michael@0 | 1434 | break; |
michael@0 | 1435 | default: |
michael@0 | 1436 | MOZ_ASSUME_UNREACHABLE("Bad MIRType"); |
michael@0 | 1437 | } |
michael@0 | 1438 | } |
michael@0 | 1439 | |
michael@0 | 1440 | void |
michael@0 | 1441 | MacroAssembler::convertDoubleToInt(FloatRegister src, Register output, FloatRegister temp, |
michael@0 | 1442 | Label *truncateFail, Label *fail, |
michael@0 | 1443 | IntConversionBehavior behavior) |
michael@0 | 1444 | { |
michael@0 | 1445 | switch (behavior) { |
michael@0 | 1446 | case IntConversion_Normal: |
michael@0 | 1447 | case IntConversion_NegativeZeroCheck: |
michael@0 | 1448 | convertDoubleToInt32(src, output, fail, behavior == IntConversion_NegativeZeroCheck); |
michael@0 | 1449 | break; |
michael@0 | 1450 | case IntConversion_Truncate: |
michael@0 | 1451 | branchTruncateDouble(src, output, truncateFail ? truncateFail : fail); |
michael@0 | 1452 | break; |
michael@0 | 1453 | case IntConversion_ClampToUint8: |
michael@0 | 1454 | // Clamping clobbers the input register, so use a temp. |
michael@0 | 1455 | moveDouble(src, temp); |
michael@0 | 1456 | clampDoubleToUint8(temp, output); |
michael@0 | 1457 | break; |
michael@0 | 1458 | } |
michael@0 | 1459 | } |
michael@0 | 1460 | |
michael@0 | 1461 | void |
michael@0 | 1462 | MacroAssembler::convertValueToInt(ValueOperand value, MDefinition *maybeInput, |
michael@0 | 1463 | Label *handleStringEntry, Label *handleStringRejoin, |
michael@0 | 1464 | Label *truncateDoubleSlow, |
michael@0 | 1465 | Register stringReg, FloatRegister temp, Register output, |
michael@0 | 1466 | Label *fail, IntConversionBehavior behavior, |
michael@0 | 1467 | IntConversionInputKind conversion) |
michael@0 | 1468 | { |
michael@0 | 1469 | Register tag = splitTagForTest(value); |
michael@0 | 1470 | bool handleStrings = (behavior == IntConversion_Truncate || |
michael@0 | 1471 | behavior == IntConversion_ClampToUint8) && |
michael@0 | 1472 | handleStringEntry && |
michael@0 | 1473 | handleStringRejoin; |
michael@0 | 1474 | |
michael@0 | 1475 | JS_ASSERT_IF(handleStrings, conversion == IntConversion_Any); |
michael@0 | 1476 | |
michael@0 | 1477 | Label done, isInt32, isBool, isDouble, isNull, isString; |
michael@0 | 1478 | |
michael@0 | 1479 | branchEqualTypeIfNeeded(MIRType_Int32, maybeInput, tag, &isInt32); |
michael@0 | 1480 | if (conversion == IntConversion_Any || conversion == IntConversion_NumbersOrBoolsOnly) |
michael@0 | 1481 | branchEqualTypeIfNeeded(MIRType_Boolean, maybeInput, tag, &isBool); |
michael@0 | 1482 | branchEqualTypeIfNeeded(MIRType_Double, maybeInput, tag, &isDouble); |
michael@0 | 1483 | |
michael@0 | 1484 | if (conversion == IntConversion_Any) { |
michael@0 | 1485 | // If we are not truncating, we fail for anything that's not |
michael@0 | 1486 | // null. Otherwise we might be able to handle strings and objects. |
michael@0 | 1487 | switch (behavior) { |
michael@0 | 1488 | case IntConversion_Normal: |
michael@0 | 1489 | case IntConversion_NegativeZeroCheck: |
michael@0 | 1490 | branchTestNull(Assembler::NotEqual, tag, fail); |
michael@0 | 1491 | break; |
michael@0 | 1492 | |
michael@0 | 1493 | case IntConversion_Truncate: |
michael@0 | 1494 | case IntConversion_ClampToUint8: |
michael@0 | 1495 | branchEqualTypeIfNeeded(MIRType_Null, maybeInput, tag, &isNull); |
michael@0 | 1496 | if (handleStrings) |
michael@0 | 1497 | branchEqualTypeIfNeeded(MIRType_String, maybeInput, tag, &isString); |
michael@0 | 1498 | branchEqualTypeIfNeeded(MIRType_Object, maybeInput, tag, fail); |
michael@0 | 1499 | branchTestUndefined(Assembler::NotEqual, tag, fail); |
michael@0 | 1500 | break; |
michael@0 | 1501 | } |
michael@0 | 1502 | } else { |
michael@0 | 1503 | jump(fail); |
michael@0 | 1504 | } |
michael@0 | 1505 | |
michael@0 | 1506 | // The value is null or undefined in truncation contexts - just emit 0. |
michael@0 | 1507 | if (isNull.used()) |
michael@0 | 1508 | bind(&isNull); |
michael@0 | 1509 | mov(ImmWord(0), output); |
michael@0 | 1510 | jump(&done); |
michael@0 | 1511 | |
michael@0 | 1512 | // Try converting a string into a double, then jump to the double case. |
michael@0 | 1513 | if (handleStrings) { |
michael@0 | 1514 | bind(&isString); |
michael@0 | 1515 | unboxString(value, stringReg); |
michael@0 | 1516 | jump(handleStringEntry); |
michael@0 | 1517 | } |
michael@0 | 1518 | |
michael@0 | 1519 | // Try converting double into integer. |
michael@0 | 1520 | if (isDouble.used() || handleStrings) { |
michael@0 | 1521 | if (isDouble.used()) { |
michael@0 | 1522 | bind(&isDouble); |
michael@0 | 1523 | unboxDouble(value, temp); |
michael@0 | 1524 | } |
michael@0 | 1525 | |
michael@0 | 1526 | if (handleStrings) |
michael@0 | 1527 | bind(handleStringRejoin); |
michael@0 | 1528 | |
michael@0 | 1529 | convertDoubleToInt(temp, output, temp, truncateDoubleSlow, fail, behavior); |
michael@0 | 1530 | jump(&done); |
michael@0 | 1531 | } |
michael@0 | 1532 | |
michael@0 | 1533 | // Just unbox a bool, the result is 0 or 1. |
michael@0 | 1534 | if (isBool.used()) { |
michael@0 | 1535 | bind(&isBool); |
michael@0 | 1536 | unboxBoolean(value, output); |
michael@0 | 1537 | jump(&done); |
michael@0 | 1538 | } |
michael@0 | 1539 | |
michael@0 | 1540 | // Integers can be unboxed. |
michael@0 | 1541 | if (isInt32.used()) { |
michael@0 | 1542 | bind(&isInt32); |
michael@0 | 1543 | unboxInt32(value, output); |
michael@0 | 1544 | if (behavior == IntConversion_ClampToUint8) |
michael@0 | 1545 | clampIntToUint8(output); |
michael@0 | 1546 | } |
michael@0 | 1547 | |
michael@0 | 1548 | bind(&done); |
michael@0 | 1549 | } |
michael@0 | 1550 | |
michael@0 | 1551 | bool |
michael@0 | 1552 | MacroAssembler::convertValueToInt(JSContext *cx, const Value &v, Register output, Label *fail, |
michael@0 | 1553 | IntConversionBehavior behavior) |
michael@0 | 1554 | { |
michael@0 | 1555 | bool handleStrings = (behavior == IntConversion_Truncate || |
michael@0 | 1556 | behavior == IntConversion_ClampToUint8); |
michael@0 | 1557 | |
michael@0 | 1558 | if (v.isNumber() || (handleStrings && v.isString())) { |
michael@0 | 1559 | double d; |
michael@0 | 1560 | if (v.isNumber()) |
michael@0 | 1561 | d = v.toNumber(); |
michael@0 | 1562 | else if (!StringToNumber(cx, v.toString(), &d)) |
michael@0 | 1563 | return false; |
michael@0 | 1564 | |
michael@0 | 1565 | switch (behavior) { |
michael@0 | 1566 | case IntConversion_Normal: |
michael@0 | 1567 | case IntConversion_NegativeZeroCheck: { |
michael@0 | 1568 | // -0 is checked anyways if we have a constant value. |
michael@0 | 1569 | int i; |
michael@0 | 1570 | if (mozilla::NumberIsInt32(d, &i)) |
michael@0 | 1571 | move32(Imm32(i), output); |
michael@0 | 1572 | else |
michael@0 | 1573 | jump(fail); |
michael@0 | 1574 | break; |
michael@0 | 1575 | } |
michael@0 | 1576 | case IntConversion_Truncate: |
michael@0 | 1577 | move32(Imm32(js::ToInt32(d)), output); |
michael@0 | 1578 | break; |
michael@0 | 1579 | case IntConversion_ClampToUint8: |
michael@0 | 1580 | move32(Imm32(ClampDoubleToUint8(d)), output); |
michael@0 | 1581 | break; |
michael@0 | 1582 | } |
michael@0 | 1583 | |
michael@0 | 1584 | return true; |
michael@0 | 1585 | } |
michael@0 | 1586 | |
michael@0 | 1587 | if (v.isBoolean()) { |
michael@0 | 1588 | move32(Imm32(v.toBoolean() ? 1 : 0), output); |
michael@0 | 1589 | return true; |
michael@0 | 1590 | } |
michael@0 | 1591 | |
michael@0 | 1592 | if (v.isNull() || v.isUndefined()) { |
michael@0 | 1593 | move32(Imm32(0), output); |
michael@0 | 1594 | return true; |
michael@0 | 1595 | } |
michael@0 | 1596 | |
michael@0 | 1597 | JS_ASSERT(v.isObject()); |
michael@0 | 1598 | |
michael@0 | 1599 | jump(fail); |
michael@0 | 1600 | return true; |
michael@0 | 1601 | } |
michael@0 | 1602 | |
michael@0 | 1603 | bool |
michael@0 | 1604 | MacroAssembler::convertConstantOrRegisterToInt(JSContext *cx, ConstantOrRegister src, |
michael@0 | 1605 | FloatRegister temp, Register output, |
michael@0 | 1606 | Label *fail, IntConversionBehavior behavior) |
michael@0 | 1607 | { |
michael@0 | 1608 | if (src.constant()) |
michael@0 | 1609 | return convertValueToInt(cx, src.value(), output, fail, behavior); |
michael@0 | 1610 | |
michael@0 | 1611 | convertTypedOrValueToInt(src.reg(), temp, output, fail, behavior); |
michael@0 | 1612 | return true; |
michael@0 | 1613 | } |
michael@0 | 1614 | |
michael@0 | 1615 | void |
michael@0 | 1616 | MacroAssembler::convertTypedOrValueToInt(TypedOrValueRegister src, FloatRegister temp, |
michael@0 | 1617 | Register output, Label *fail, |
michael@0 | 1618 | IntConversionBehavior behavior) |
michael@0 | 1619 | { |
michael@0 | 1620 | if (src.hasValue()) { |
michael@0 | 1621 | convertValueToInt(src.valueReg(), temp, output, fail, behavior); |
michael@0 | 1622 | return; |
michael@0 | 1623 | } |
michael@0 | 1624 | |
michael@0 | 1625 | switch (src.type()) { |
michael@0 | 1626 | case MIRType_Undefined: |
michael@0 | 1627 | case MIRType_Null: |
michael@0 | 1628 | move32(Imm32(0), output); |
michael@0 | 1629 | break; |
michael@0 | 1630 | case MIRType_Boolean: |
michael@0 | 1631 | case MIRType_Int32: |
michael@0 | 1632 | if (src.typedReg().gpr() != output) |
michael@0 | 1633 | move32(src.typedReg().gpr(), output); |
michael@0 | 1634 | if (src.type() == MIRType_Int32 && behavior == IntConversion_ClampToUint8) |
michael@0 | 1635 | clampIntToUint8(output); |
michael@0 | 1636 | break; |
michael@0 | 1637 | case MIRType_Double: |
michael@0 | 1638 | convertDoubleToInt(src.typedReg().fpu(), output, temp, nullptr, fail, behavior); |
michael@0 | 1639 | break; |
michael@0 | 1640 | case MIRType_Float32: |
michael@0 | 1641 | // Conversion to Double simplifies implementation at the expense of performance. |
michael@0 | 1642 | convertFloat32ToDouble(src.typedReg().fpu(), temp); |
michael@0 | 1643 | convertDoubleToInt(temp, output, temp, nullptr, fail, behavior); |
michael@0 | 1644 | break; |
michael@0 | 1645 | case MIRType_String: |
michael@0 | 1646 | case MIRType_Object: |
michael@0 | 1647 | jump(fail); |
michael@0 | 1648 | break; |
michael@0 | 1649 | default: |
michael@0 | 1650 | MOZ_ASSUME_UNREACHABLE("Bad MIRType"); |
michael@0 | 1651 | } |
michael@0 | 1652 | } |
michael@0 | 1653 | |
michael@0 | 1654 | void |
michael@0 | 1655 | MacroAssembler::finish() |
michael@0 | 1656 | { |
michael@0 | 1657 | if (sequentialFailureLabel_.used()) { |
michael@0 | 1658 | bind(&sequentialFailureLabel_); |
michael@0 | 1659 | handleFailure(SequentialExecution); |
michael@0 | 1660 | } |
michael@0 | 1661 | if (parallelFailureLabel_.used()) { |
michael@0 | 1662 | bind(¶llelFailureLabel_); |
michael@0 | 1663 | handleFailure(ParallelExecution); |
michael@0 | 1664 | } |
michael@0 | 1665 | |
michael@0 | 1666 | MacroAssemblerSpecific::finish(); |
michael@0 | 1667 | } |
michael@0 | 1668 | |
michael@0 | 1669 | void |
michael@0 | 1670 | MacroAssembler::branchIfNotInterpretedConstructor(Register fun, Register scratch, Label *label) |
michael@0 | 1671 | { |
michael@0 | 1672 | // 16-bit loads are slow and unaligned 32-bit loads may be too so |
michael@0 | 1673 | // perform an aligned 32-bit load and adjust the bitmask accordingly. |
michael@0 | 1674 | JS_ASSERT(JSFunction::offsetOfNargs() % sizeof(uint32_t) == 0); |
michael@0 | 1675 | JS_ASSERT(JSFunction::offsetOfFlags() == JSFunction::offsetOfNargs() + 2); |
michael@0 | 1676 | JS_STATIC_ASSERT(IS_LITTLE_ENDIAN); |
michael@0 | 1677 | |
michael@0 | 1678 | // Emit code for the following test: |
michael@0 | 1679 | // |
michael@0 | 1680 | // bool isInterpretedConstructor() const { |
michael@0 | 1681 | // return isInterpreted() && !isFunctionPrototype() && !isArrow() && |
michael@0 | 1682 | // (!isSelfHostedBuiltin() || isSelfHostedConstructor()); |
michael@0 | 1683 | // } |
michael@0 | 1684 | |
michael@0 | 1685 | // First, ensure it's a scripted function. |
michael@0 | 1686 | load32(Address(fun, JSFunction::offsetOfNargs()), scratch); |
michael@0 | 1687 | branchTest32(Assembler::Zero, scratch, Imm32(JSFunction::INTERPRETED << 16), label); |
michael@0 | 1688 | |
michael@0 | 1689 | // Common case: if IS_FUN_PROTO, ARROW and SELF_HOSTED are not set, |
michael@0 | 1690 | // the function is an interpreted constructor and we're done. |
michael@0 | 1691 | Label done; |
michael@0 | 1692 | uint32_t bits = (JSFunction::IS_FUN_PROTO | JSFunction::ARROW | JSFunction::SELF_HOSTED) << 16; |
michael@0 | 1693 | branchTest32(Assembler::Zero, scratch, Imm32(bits), &done); |
michael@0 | 1694 | { |
michael@0 | 1695 | // The callee is either Function.prototype, an arrow function or |
michael@0 | 1696 | // self-hosted. None of these are constructible, except self-hosted |
michael@0 | 1697 | // constructors, so branch to |label| if SELF_HOSTED_CTOR is not set. |
michael@0 | 1698 | branchTest32(Assembler::Zero, scratch, Imm32(JSFunction::SELF_HOSTED_CTOR << 16), label); |
michael@0 | 1699 | |
michael@0 | 1700 | #ifdef DEBUG |
michael@0 | 1701 | // Function.prototype should not have the SELF_HOSTED_CTOR flag. |
michael@0 | 1702 | branchTest32(Assembler::Zero, scratch, Imm32(JSFunction::IS_FUN_PROTO << 16), &done); |
michael@0 | 1703 | breakpoint(); |
michael@0 | 1704 | #endif |
michael@0 | 1705 | } |
michael@0 | 1706 | bind(&done); |
michael@0 | 1707 | } |
michael@0 | 1708 | |
michael@0 | 1709 | void |
michael@0 | 1710 | MacroAssembler::branchEqualTypeIfNeeded(MIRType type, MDefinition *maybeDef, Register tag, |
michael@0 | 1711 | Label *label) |
michael@0 | 1712 | { |
michael@0 | 1713 | if (!maybeDef || maybeDef->mightBeType(type)) { |
michael@0 | 1714 | switch (type) { |
michael@0 | 1715 | case MIRType_Null: |
michael@0 | 1716 | branchTestNull(Equal, tag, label); |
michael@0 | 1717 | break; |
michael@0 | 1718 | case MIRType_Boolean: |
michael@0 | 1719 | branchTestBoolean(Equal, tag, label); |
michael@0 | 1720 | break; |
michael@0 | 1721 | case MIRType_Int32: |
michael@0 | 1722 | branchTestInt32(Equal, tag, label); |
michael@0 | 1723 | break; |
michael@0 | 1724 | case MIRType_Double: |
michael@0 | 1725 | branchTestDouble(Equal, tag, label); |
michael@0 | 1726 | break; |
michael@0 | 1727 | case MIRType_String: |
michael@0 | 1728 | branchTestString(Equal, tag, label); |
michael@0 | 1729 | break; |
michael@0 | 1730 | case MIRType_Object: |
michael@0 | 1731 | branchTestObject(Equal, tag, label); |
michael@0 | 1732 | break; |
michael@0 | 1733 | default: |
michael@0 | 1734 | MOZ_ASSUME_UNREACHABLE("Unsupported type"); |
michael@0 | 1735 | } |
michael@0 | 1736 | } |
michael@0 | 1737 | } |
michael@0 | 1738 | |
michael@0 | 1739 | |
michael@0 | 1740 | // If a pseudostack frame has this as its label, its stack pointer |
michael@0 | 1741 | // field points to the registers saved on entry to JIT code. A native |
michael@0 | 1742 | // stack unwinder could use that information to continue unwinding |
michael@0 | 1743 | // past that point. |
michael@0 | 1744 | const char MacroAssembler::enterJitLabel[] = "EnterJIT"; |
michael@0 | 1745 | |
michael@0 | 1746 | // Creates an enterJIT pseudostack frame, as described above. Pushes |
michael@0 | 1747 | // a word to the stack to indicate whether this was done. |framePtr| is |
michael@0 | 1748 | // the pointer to the machine-dependent saved state. |
michael@0 | 1749 | void |
michael@0 | 1750 | MacroAssembler::spsMarkJit(SPSProfiler *p, Register framePtr, Register temp) |
michael@0 | 1751 | { |
michael@0 | 1752 | Label spsNotEnabled; |
michael@0 | 1753 | uint32_t *enabledAddr = p->addressOfEnabled(); |
michael@0 | 1754 | load32(AbsoluteAddress(enabledAddr), temp); |
michael@0 | 1755 | push(temp); // +4: Did we push an sps frame. |
michael@0 | 1756 | branchTest32(Assembler::Equal, temp, temp, &spsNotEnabled); |
michael@0 | 1757 | |
michael@0 | 1758 | Label stackFull; |
michael@0 | 1759 | // We always need the "safe" versions, because these are used in trampolines |
michael@0 | 1760 | // and won't be regenerated when SPS state changes. |
michael@0 | 1761 | spsProfileEntryAddressSafe(p, 0, temp, &stackFull); |
michael@0 | 1762 | |
michael@0 | 1763 | storePtr(ImmPtr(enterJitLabel), Address(temp, ProfileEntry::offsetOfString())); |
michael@0 | 1764 | storePtr(framePtr, Address(temp, ProfileEntry::offsetOfStackAddress())); |
michael@0 | 1765 | storePtr(ImmWord(uintptr_t(0)), Address(temp, ProfileEntry::offsetOfScript())); |
michael@0 | 1766 | store32(Imm32(ProfileEntry::NullPCIndex), Address(temp, ProfileEntry::offsetOfPCIdx())); |
michael@0 | 1767 | |
michael@0 | 1768 | /* Always increment the stack size, whether or not we actually pushed. */ |
michael@0 | 1769 | bind(&stackFull); |
michael@0 | 1770 | loadPtr(AbsoluteAddress(p->addressOfSizePointer()), temp); |
michael@0 | 1771 | add32(Imm32(1), Address(temp, 0)); |
michael@0 | 1772 | |
michael@0 | 1773 | bind(&spsNotEnabled); |
michael@0 | 1774 | } |
michael@0 | 1775 | |
michael@0 | 1776 | // Pops the word pushed by spsMarkJit and, if spsMarkJit pushed an SPS |
michael@0 | 1777 | // frame, pops it. |
michael@0 | 1778 | void |
michael@0 | 1779 | MacroAssembler::spsUnmarkJit(SPSProfiler *p, Register temp) |
michael@0 | 1780 | { |
michael@0 | 1781 | Label spsNotEnabled; |
michael@0 | 1782 | pop(temp); // -4: Was the profiler enabled. |
michael@0 | 1783 | branchTest32(Assembler::Equal, temp, temp, &spsNotEnabled); |
michael@0 | 1784 | |
michael@0 | 1785 | spsPopFrameSafe(p, temp); |
michael@0 | 1786 | |
michael@0 | 1787 | bind(&spsNotEnabled); |
michael@0 | 1788 | } |