1.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 1.2 +++ b/js/src/jit/shared/Assembler-shared.h Wed Dec 31 06:09:35 2014 +0100 1.3 @@ -0,0 +1,912 @@ 1.4 +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- 1.5 + * vim: set ts=8 sts=4 et sw=4 tw=99: 1.6 + * This Source Code Form is subject to the terms of the Mozilla Public 1.7 + * License, v. 2.0. If a copy of the MPL was not distributed with this 1.8 + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 1.9 + 1.10 +#ifndef jit_shared_Assembler_shared_h 1.11 +#define jit_shared_Assembler_shared_h 1.12 + 1.13 +#include "mozilla/PodOperations.h" 1.14 + 1.15 +#include <limits.h> 1.16 + 1.17 +#include "jsworkers.h" 1.18 + 1.19 +#include "jit/IonAllocPolicy.h" 1.20 +#include "jit/Registers.h" 1.21 +#include "jit/RegisterSets.h" 1.22 + 1.23 +#if defined(JS_CODEGEN_X64) || defined(JS_CODEGEN_ARM) 1.24 +// JS_SMALL_BRANCH means the range on a branch instruction 1.25 +// is smaller than the whole address space 1.26 +# define JS_SMALL_BRANCH 1.27 +#endif 1.28 +namespace js { 1.29 +namespace jit { 1.30 + 1.31 +enum Scale { 1.32 + TimesOne = 0, 1.33 + TimesTwo = 1, 1.34 + TimesFour = 2, 1.35 + TimesEight = 3 1.36 +}; 1.37 + 1.38 +static inline unsigned 1.39 +ScaleToShift(Scale scale) 1.40 +{ 1.41 + return unsigned(scale); 1.42 +} 1.43 + 1.44 +static inline bool 1.45 +IsShiftInScaleRange(int i) 1.46 +{ 1.47 + return i >= TimesOne && i <= TimesEight; 1.48 +} 1.49 + 1.50 +static inline Scale 1.51 +ShiftToScale(int i) 1.52 +{ 1.53 + JS_ASSERT(IsShiftInScaleRange(i)); 1.54 + return Scale(i); 1.55 +} 1.56 + 1.57 +static inline Scale 1.58 +ScaleFromElemWidth(int shift) 1.59 +{ 1.60 + switch (shift) { 1.61 + case 1: 1.62 + return TimesOne; 1.63 + case 2: 1.64 + return TimesTwo; 1.65 + case 4: 1.66 + return TimesFour; 1.67 + case 8: 1.68 + return TimesEight; 1.69 + } 1.70 + 1.71 + MOZ_ASSUME_UNREACHABLE("Invalid scale"); 1.72 +} 1.73 + 1.74 +// Used for 32-bit immediates which do not require relocation. 1.75 +struct Imm32 1.76 +{ 1.77 + int32_t value; 1.78 + 1.79 + explicit Imm32(int32_t value) : value(value) 1.80 + { } 1.81 + 1.82 + static inline Imm32 ShiftOf(enum Scale s) { 1.83 + switch (s) { 1.84 + case TimesOne: 1.85 + return Imm32(0); 1.86 + case TimesTwo: 1.87 + return Imm32(1); 1.88 + case TimesFour: 1.89 + return Imm32(2); 1.90 + case TimesEight: 1.91 + return Imm32(3); 1.92 + }; 1.93 + MOZ_ASSUME_UNREACHABLE("Invalid scale"); 1.94 + } 1.95 + 1.96 + static inline Imm32 FactorOf(enum Scale s) { 1.97 + return Imm32(1 << ShiftOf(s).value); 1.98 + } 1.99 +}; 1.100 + 1.101 +// Pointer-sized integer to be embedded as an immediate in an instruction. 1.102 +struct ImmWord 1.103 +{ 1.104 + uintptr_t value; 1.105 + 1.106 + explicit ImmWord(uintptr_t value) : value(value) 1.107 + { } 1.108 +}; 1.109 + 1.110 +#ifdef DEBUG 1.111 +static inline bool 1.112 +IsCompilingAsmJS() 1.113 +{ 1.114 + // asm.js compilation pushes an IonContext with a null JSCompartment. 1.115 + IonContext *ictx = MaybeGetIonContext(); 1.116 + return ictx && ictx->compartment == nullptr; 1.117 +} 1.118 +#endif 1.119 + 1.120 +// Pointer to be embedded as an immediate in an instruction. 1.121 +struct ImmPtr 1.122 +{ 1.123 + void *value; 1.124 + 1.125 + explicit ImmPtr(const void *value) : value(const_cast<void*>(value)) 1.126 + { 1.127 + // To make code serialization-safe, asm.js compilation should only 1.128 + // compile pointer immediates using AsmJSImmPtr. 1.129 + JS_ASSERT(!IsCompilingAsmJS()); 1.130 + } 1.131 + 1.132 + template <class R> 1.133 + explicit ImmPtr(R (*pf)()) 1.134 + : value(JS_FUNC_TO_DATA_PTR(void *, pf)) 1.135 + { 1.136 + JS_ASSERT(!IsCompilingAsmJS()); 1.137 + } 1.138 + 1.139 + template <class R, class A1> 1.140 + explicit ImmPtr(R (*pf)(A1)) 1.141 + : value(JS_FUNC_TO_DATA_PTR(void *, pf)) 1.142 + { 1.143 + JS_ASSERT(!IsCompilingAsmJS()); 1.144 + } 1.145 + 1.146 + template <class R, class A1, class A2> 1.147 + explicit ImmPtr(R (*pf)(A1, A2)) 1.148 + : value(JS_FUNC_TO_DATA_PTR(void *, pf)) 1.149 + { 1.150 + JS_ASSERT(!IsCompilingAsmJS()); 1.151 + } 1.152 + 1.153 + template <class R, class A1, class A2, class A3> 1.154 + explicit ImmPtr(R (*pf)(A1, A2, A3)) 1.155 + : value(JS_FUNC_TO_DATA_PTR(void *, pf)) 1.156 + { 1.157 + JS_ASSERT(!IsCompilingAsmJS()); 1.158 + } 1.159 + 1.160 + template <class R, class A1, class A2, class A3, class A4> 1.161 + explicit ImmPtr(R (*pf)(A1, A2, A3, A4)) 1.162 + : value(JS_FUNC_TO_DATA_PTR(void *, pf)) 1.163 + { 1.164 + JS_ASSERT(!IsCompilingAsmJS()); 1.165 + } 1.166 + 1.167 +}; 1.168 + 1.169 +// The same as ImmPtr except that the intention is to patch this 1.170 +// instruction. The initial value of the immediate is 'addr' and this value is 1.171 +// either clobbered or used in the patching process. 1.172 +struct PatchedImmPtr { 1.173 + void *value; 1.174 + 1.175 + explicit PatchedImmPtr() 1.176 + : value(nullptr) 1.177 + { } 1.178 + explicit PatchedImmPtr(const void *value) 1.179 + : value(const_cast<void*>(value)) 1.180 + { } 1.181 +}; 1.182 + 1.183 +// Used for immediates which require relocation. 1.184 +struct ImmGCPtr 1.185 +{ 1.186 + uintptr_t value; 1.187 + 1.188 + explicit ImmGCPtr(const gc::Cell *ptr) : value(reinterpret_cast<uintptr_t>(ptr)) 1.189 + { 1.190 + JS_ASSERT(!IsPoisonedPtr(ptr)); 1.191 + JS_ASSERT_IF(ptr, ptr->isTenured()); 1.192 + 1.193 + // asm.js shouldn't be creating GC things 1.194 + JS_ASSERT(!IsCompilingAsmJS()); 1.195 + } 1.196 + 1.197 + protected: 1.198 + ImmGCPtr() : value(0) {} 1.199 +}; 1.200 + 1.201 +// Used for immediates which require relocation and may be traced during minor GC. 1.202 +struct ImmMaybeNurseryPtr : public ImmGCPtr 1.203 +{ 1.204 + explicit ImmMaybeNurseryPtr(gc::Cell *ptr) 1.205 + { 1.206 + this->value = reinterpret_cast<uintptr_t>(ptr); 1.207 + JS_ASSERT(!IsPoisonedPtr(ptr)); 1.208 + 1.209 + // asm.js shouldn't be creating GC things 1.210 + JS_ASSERT(!IsCompilingAsmJS()); 1.211 + } 1.212 +}; 1.213 + 1.214 +// Pointer to be embedded as an immediate that is loaded/stored from by an 1.215 +// instruction. 1.216 +struct AbsoluteAddress { 1.217 + void *addr; 1.218 + 1.219 + explicit AbsoluteAddress(const void *addr) 1.220 + : addr(const_cast<void*>(addr)) 1.221 + { 1.222 + // asm.js shouldn't be creating GC things 1.223 + JS_ASSERT(!IsCompilingAsmJS()); 1.224 + } 1.225 + 1.226 + AbsoluteAddress offset(ptrdiff_t delta) { 1.227 + return AbsoluteAddress(((uint8_t *) addr) + delta); 1.228 + } 1.229 +}; 1.230 + 1.231 +// The same as AbsoluteAddress except that the intention is to patch this 1.232 +// instruction. The initial value of the immediate is 'addr' and this value is 1.233 +// either clobbered or used in the patching process. 1.234 +struct PatchedAbsoluteAddress { 1.235 + void *addr; 1.236 + 1.237 + explicit PatchedAbsoluteAddress() 1.238 + : addr(nullptr) 1.239 + { } 1.240 + explicit PatchedAbsoluteAddress(const void *addr) 1.241 + : addr(const_cast<void*>(addr)) 1.242 + { } 1.243 +}; 1.244 + 1.245 +// Specifies an address computed in the form of a register base and a constant, 1.246 +// 32-bit offset. 1.247 +struct Address 1.248 +{ 1.249 + Register base; 1.250 + int32_t offset; 1.251 + 1.252 + Address(Register base, int32_t offset) : base(base), offset(offset) 1.253 + { } 1.254 + 1.255 + Address() { mozilla::PodZero(this); } 1.256 +}; 1.257 + 1.258 +// Specifies an address computed in the form of a register base, a register 1.259 +// index with a scale, and a constant, 32-bit offset. 1.260 +struct BaseIndex 1.261 +{ 1.262 + Register base; 1.263 + Register index; 1.264 + Scale scale; 1.265 + int32_t offset; 1.266 + 1.267 + BaseIndex(Register base, Register index, Scale scale, int32_t offset = 0) 1.268 + : base(base), index(index), scale(scale), offset(offset) 1.269 + { } 1.270 + 1.271 + BaseIndex() { mozilla::PodZero(this); } 1.272 +}; 1.273 + 1.274 +class Relocation { 1.275 + public: 1.276 + enum Kind { 1.277 + // The target is immovable, so patching is only needed if the source 1.278 + // buffer is relocated and the reference is relative. 1.279 + HARDCODED, 1.280 + 1.281 + // The target is the start of a JitCode buffer, which must be traced 1.282 + // during garbage collection. Relocations and patching may be needed. 1.283 + JITCODE 1.284 + }; 1.285 +}; 1.286 + 1.287 +struct LabelBase 1.288 +{ 1.289 + protected: 1.290 + // offset_ >= 0 means that the label is either bound or has incoming 1.291 + // uses and needs to be bound. 1.292 + int32_t offset_ : 31; 1.293 + bool bound_ : 1; 1.294 + 1.295 + // Disallow assignment. 1.296 + void operator =(const LabelBase &label); 1.297 + public: 1.298 + static const int32_t INVALID_OFFSET = -1; 1.299 + 1.300 + LabelBase() : offset_(INVALID_OFFSET), bound_(false) 1.301 + { } 1.302 + LabelBase(const LabelBase &label) 1.303 + : offset_(label.offset_), 1.304 + bound_(label.bound_) 1.305 + { } 1.306 + 1.307 + // If the label is bound, all incoming edges have been patched and any 1.308 + // future incoming edges will be immediately patched. 1.309 + bool bound() const { 1.310 + return bound_; 1.311 + } 1.312 + int32_t offset() const { 1.313 + JS_ASSERT(bound() || used()); 1.314 + return offset_; 1.315 + } 1.316 + // Returns whether the label is not bound, but has incoming uses. 1.317 + bool used() const { 1.318 + return !bound() && offset_ > INVALID_OFFSET; 1.319 + } 1.320 + // Binds the label, fixing its final position in the code stream. 1.321 + void bind(int32_t offset) { 1.322 + JS_ASSERT(!bound()); 1.323 + offset_ = offset; 1.324 + bound_ = true; 1.325 + JS_ASSERT(offset_ == offset); 1.326 + } 1.327 + // Marks the label as neither bound nor used. 1.328 + void reset() { 1.329 + offset_ = INVALID_OFFSET; 1.330 + bound_ = false; 1.331 + } 1.332 + // Sets the label's latest used position, returning the old use position in 1.333 + // the process. 1.334 + int32_t use(int32_t offset) { 1.335 + JS_ASSERT(!bound()); 1.336 + 1.337 + int32_t old = offset_; 1.338 + offset_ = offset; 1.339 + JS_ASSERT(offset_ == offset); 1.340 + 1.341 + return old; 1.342 + } 1.343 +}; 1.344 + 1.345 +// A label represents a position in an assembly buffer that may or may not have 1.346 +// already been generated. Labels can either be "bound" or "unbound", the 1.347 +// former meaning that its position is known and the latter that its position 1.348 +// is not yet known. 1.349 +// 1.350 +// A jump to an unbound label adds that jump to the label's incoming queue. A 1.351 +// jump to a bound label automatically computes the jump distance. The process 1.352 +// of binding a label automatically corrects all incoming jumps. 1.353 +class Label : public LabelBase 1.354 +{ 1.355 + public: 1.356 + Label() 1.357 + { } 1.358 + Label(const Label &label) : LabelBase(label) 1.359 + { } 1.360 + ~Label() 1.361 + { 1.362 +#ifdef DEBUG 1.363 + // The assertion below doesn't hold if an error occurred. 1.364 + if (OOM_counter > OOM_maxAllocations) 1.365 + return; 1.366 + if (MaybeGetIonContext() && GetIonContext()->runtime->hadOutOfMemory()) 1.367 + return; 1.368 + 1.369 + MOZ_ASSERT(!used()); 1.370 +#endif 1.371 + } 1.372 +}; 1.373 + 1.374 +// Label's destructor asserts that if it has been used it has also been bound. 1.375 +// In the case long-lived labels, however, failed compilation (e.g. OOM) will 1.376 +// trigger this failure innocuously. This Label silences the assertion. 1.377 +class NonAssertingLabel : public Label 1.378 +{ 1.379 + public: 1.380 + ~NonAssertingLabel() 1.381 + { 1.382 +#ifdef DEBUG 1.383 + if (used()) 1.384 + bind(0); 1.385 +#endif 1.386 + } 1.387 +}; 1.388 + 1.389 +class RepatchLabel 1.390 +{ 1.391 + static const int32_t INVALID_OFFSET = 0xC0000000; 1.392 + int32_t offset_ : 31; 1.393 + uint32_t bound_ : 1; 1.394 + public: 1.395 + 1.396 + RepatchLabel() : offset_(INVALID_OFFSET), bound_(0) {} 1.397 + 1.398 + void use(uint32_t newOffset) { 1.399 + JS_ASSERT(offset_ == INVALID_OFFSET); 1.400 + JS_ASSERT(newOffset != (uint32_t)INVALID_OFFSET); 1.401 + offset_ = newOffset; 1.402 + } 1.403 + bool bound() const { 1.404 + return bound_; 1.405 + } 1.406 + void bind(int32_t dest) { 1.407 + JS_ASSERT(!bound_); 1.408 + JS_ASSERT(dest != INVALID_OFFSET); 1.409 + offset_ = dest; 1.410 + bound_ = true; 1.411 + } 1.412 + int32_t target() { 1.413 + JS_ASSERT(bound()); 1.414 + int32_t ret = offset_; 1.415 + offset_ = INVALID_OFFSET; 1.416 + return ret; 1.417 + } 1.418 + int32_t offset() { 1.419 + JS_ASSERT(!bound()); 1.420 + return offset_; 1.421 + } 1.422 + bool used() const { 1.423 + return !bound() && offset_ != (INVALID_OFFSET); 1.424 + } 1.425 + 1.426 +}; 1.427 +// An absolute label is like a Label, except it represents an absolute 1.428 +// reference rather than a relative one. Thus, it cannot be patched until after 1.429 +// linking. 1.430 +struct AbsoluteLabel : public LabelBase 1.431 +{ 1.432 + public: 1.433 + AbsoluteLabel() 1.434 + { } 1.435 + AbsoluteLabel(const AbsoluteLabel &label) : LabelBase(label) 1.436 + { } 1.437 + int32_t prev() const { 1.438 + JS_ASSERT(!bound()); 1.439 + if (!used()) 1.440 + return INVALID_OFFSET; 1.441 + return offset(); 1.442 + } 1.443 + void setPrev(int32_t offset) { 1.444 + use(offset); 1.445 + } 1.446 + void bind() { 1.447 + bound_ = true; 1.448 + 1.449 + // These labels cannot be used after being bound. 1.450 + offset_ = -1; 1.451 + } 1.452 +}; 1.453 + 1.454 +// A code label contains an absolute reference to a point in the code 1.455 +// Thus, it cannot be patched until after linking 1.456 +class CodeLabel 1.457 +{ 1.458 + // The destination position, where the absolute reference should get patched into 1.459 + AbsoluteLabel dest_; 1.460 + 1.461 + // The source label (relative) in the code to where the 1.462 + // the destination should get patched to. 1.463 + Label src_; 1.464 + 1.465 + public: 1.466 + CodeLabel() 1.467 + { } 1.468 + CodeLabel(const AbsoluteLabel &dest) 1.469 + : dest_(dest) 1.470 + { } 1.471 + AbsoluteLabel *dest() { 1.472 + return &dest_; 1.473 + } 1.474 + Label *src() { 1.475 + return &src_; 1.476 + } 1.477 +}; 1.478 + 1.479 +// Location of a jump or label in a generated JitCode block, relative to the 1.480 +// start of the block. 1.481 + 1.482 +class CodeOffsetJump 1.483 +{ 1.484 + size_t offset_; 1.485 + 1.486 +#ifdef JS_SMALL_BRANCH 1.487 + size_t jumpTableIndex_; 1.488 +#endif 1.489 + 1.490 + public: 1.491 + 1.492 +#ifdef JS_SMALL_BRANCH 1.493 + CodeOffsetJump(size_t offset, size_t jumpTableIndex) 1.494 + : offset_(offset), jumpTableIndex_(jumpTableIndex) 1.495 + {} 1.496 + size_t jumpTableIndex() const { 1.497 + return jumpTableIndex_; 1.498 + } 1.499 +#else 1.500 + CodeOffsetJump(size_t offset) : offset_(offset) {} 1.501 +#endif 1.502 + 1.503 + CodeOffsetJump() { 1.504 + mozilla::PodZero(this); 1.505 + } 1.506 + 1.507 + size_t offset() const { 1.508 + return offset_; 1.509 + } 1.510 + void fixup(MacroAssembler *masm); 1.511 +}; 1.512 + 1.513 +class CodeOffsetLabel 1.514 +{ 1.515 + size_t offset_; 1.516 + 1.517 + public: 1.518 + CodeOffsetLabel(size_t offset) : offset_(offset) {} 1.519 + CodeOffsetLabel() : offset_(0) {} 1.520 + 1.521 + size_t offset() const { 1.522 + return offset_; 1.523 + } 1.524 + void fixup(MacroAssembler *masm); 1.525 + 1.526 +}; 1.527 + 1.528 +// Absolute location of a jump or a label in some generated JitCode block. 1.529 +// Can also encode a CodeOffset{Jump,Label}, such that the offset is initially 1.530 +// set and the absolute location later filled in after the final JitCode is 1.531 +// allocated. 1.532 + 1.533 +class CodeLocationJump 1.534 +{ 1.535 + uint8_t *raw_; 1.536 +#ifdef DEBUG 1.537 + enum State { Uninitialized, Absolute, Relative }; 1.538 + State state_; 1.539 + void setUninitialized() { 1.540 + state_ = Uninitialized; 1.541 + } 1.542 + void setAbsolute() { 1.543 + state_ = Absolute; 1.544 + } 1.545 + void setRelative() { 1.546 + state_ = Relative; 1.547 + } 1.548 +#else 1.549 + void setUninitialized() const { 1.550 + } 1.551 + void setAbsolute() const { 1.552 + } 1.553 + void setRelative() const { 1.554 + } 1.555 +#endif 1.556 + 1.557 +#ifdef JS_SMALL_BRANCH 1.558 + uint8_t *jumpTableEntry_; 1.559 +#endif 1.560 + 1.561 + public: 1.562 + CodeLocationJump() { 1.563 + raw_ = nullptr; 1.564 + setUninitialized(); 1.565 +#ifdef JS_SMALL_BRANCH 1.566 + jumpTableEntry_ = (uint8_t *) 0xdeadab1e; 1.567 +#endif 1.568 + } 1.569 + CodeLocationJump(JitCode *code, CodeOffsetJump base) { 1.570 + *this = base; 1.571 + repoint(code); 1.572 + } 1.573 + 1.574 + void operator = (CodeOffsetJump base) { 1.575 + raw_ = (uint8_t *) base.offset(); 1.576 + setRelative(); 1.577 +#ifdef JS_SMALL_BRANCH 1.578 + jumpTableEntry_ = (uint8_t *) base.jumpTableIndex(); 1.579 +#endif 1.580 + } 1.581 + 1.582 + void repoint(JitCode *code, MacroAssembler* masm = nullptr); 1.583 + 1.584 + uint8_t *raw() const { 1.585 + JS_ASSERT(state_ == Absolute); 1.586 + return raw_; 1.587 + } 1.588 + uint8_t *offset() const { 1.589 + JS_ASSERT(state_ == Relative); 1.590 + return raw_; 1.591 + } 1.592 + 1.593 +#ifdef JS_SMALL_BRANCH 1.594 + uint8_t *jumpTableEntry() const { 1.595 + JS_ASSERT(state_ == Absolute); 1.596 + return jumpTableEntry_; 1.597 + } 1.598 +#endif 1.599 +}; 1.600 + 1.601 +class CodeLocationLabel 1.602 +{ 1.603 + uint8_t *raw_; 1.604 +#ifdef DEBUG 1.605 + enum State { Uninitialized, Absolute, Relative }; 1.606 + State state_; 1.607 + void setUninitialized() { 1.608 + state_ = Uninitialized; 1.609 + } 1.610 + void setAbsolute() { 1.611 + state_ = Absolute; 1.612 + } 1.613 + void setRelative() { 1.614 + state_ = Relative; 1.615 + } 1.616 +#else 1.617 + void setUninitialized() const { 1.618 + } 1.619 + void setAbsolute() const { 1.620 + } 1.621 + void setRelative() const { 1.622 + } 1.623 +#endif 1.624 + 1.625 + public: 1.626 + CodeLocationLabel() { 1.627 + raw_ = nullptr; 1.628 + setUninitialized(); 1.629 + } 1.630 + CodeLocationLabel(JitCode *code, CodeOffsetLabel base) { 1.631 + *this = base; 1.632 + repoint(code); 1.633 + } 1.634 + CodeLocationLabel(JitCode *code) { 1.635 + raw_ = code->raw(); 1.636 + setAbsolute(); 1.637 + } 1.638 + CodeLocationLabel(uint8_t *raw) { 1.639 + raw_ = raw; 1.640 + setAbsolute(); 1.641 + } 1.642 + 1.643 + void operator = (CodeOffsetLabel base) { 1.644 + raw_ = (uint8_t *)base.offset(); 1.645 + setRelative(); 1.646 + } 1.647 + ptrdiff_t operator - (const CodeLocationLabel &other) { 1.648 + return raw_ - other.raw_; 1.649 + } 1.650 + 1.651 + void repoint(JitCode *code, MacroAssembler *masm = nullptr); 1.652 + 1.653 +#ifdef DEBUG 1.654 + bool isSet() const { 1.655 + return state_ != Uninitialized; 1.656 + } 1.657 +#endif 1.658 + 1.659 + uint8_t *raw() const { 1.660 + JS_ASSERT(state_ == Absolute); 1.661 + return raw_; 1.662 + } 1.663 + uint8_t *offset() const { 1.664 + JS_ASSERT(state_ == Relative); 1.665 + return raw_; 1.666 + } 1.667 +}; 1.668 + 1.669 +// Describes the user-visible properties of a callsite. 1.670 +// 1.671 +// A few general notes about the stack-walking supported by CallSite(Desc): 1.672 +// - This information facilitates stack-walking performed by FrameIter which 1.673 +// is used by Error.stack and other user-visible stack-walking functions. 1.674 +// - Ion/asm.js calling conventions do not maintain a frame-pointer so 1.675 +// stack-walking must lookup the stack depth based on the PC. 1.676 +// - Stack-walking only occurs from C++ after a synchronous calls (JS-to-JS and 1.677 +// JS-to-C++). Thus, we do not need to map arbitrary PCs to stack-depths, 1.678 +// just the return address at callsites. 1.679 +// - An exception to the above rule is the interrupt callback which can happen 1.680 +// at arbitrary PCs. In such cases, we drop frames from the stack-walk. In 1.681 +// the future when a full PC->stack-depth map is maintained, we handle this 1.682 +// case. 1.683 +class CallSiteDesc 1.684 +{ 1.685 + uint32_t line_; 1.686 + uint32_t column_; 1.687 + uint32_t functionNameIndex_; 1.688 + 1.689 + static const uint32_t sEntryTrampoline = UINT32_MAX; 1.690 + static const uint32_t sExit = UINT32_MAX - 1; 1.691 + 1.692 + public: 1.693 + static const uint32_t FUNCTION_NAME_INDEX_MAX = UINT32_MAX - 2; 1.694 + 1.695 + CallSiteDesc() {} 1.696 + 1.697 + CallSiteDesc(uint32_t line, uint32_t column, uint32_t functionNameIndex) 1.698 + : line_(line), column_(column), functionNameIndex_(functionNameIndex) 1.699 + {} 1.700 + 1.701 + static CallSiteDesc Entry() { return CallSiteDesc(0, 0, sEntryTrampoline); } 1.702 + static CallSiteDesc Exit() { return CallSiteDesc(0, 0, sExit); } 1.703 + 1.704 + bool isEntry() const { return functionNameIndex_ == sEntryTrampoline; } 1.705 + bool isExit() const { return functionNameIndex_ == sExit; } 1.706 + bool isNormal() const { return !(isEntry() || isExit()); } 1.707 + 1.708 + uint32_t line() const { JS_ASSERT(isNormal()); return line_; } 1.709 + uint32_t column() const { JS_ASSERT(isNormal()); return column_; } 1.710 + uint32_t functionNameIndex() const { JS_ASSERT(isNormal()); return functionNameIndex_; } 1.711 +}; 1.712 + 1.713 +// Adds to CallSiteDesc the metadata necessary to walk the stack given an 1.714 +// initial stack-pointer. 1.715 +struct CallSite : public CallSiteDesc 1.716 +{ 1.717 + uint32_t returnAddressOffset_; 1.718 + uint32_t stackDepth_; 1.719 + 1.720 + public: 1.721 + CallSite() {} 1.722 + 1.723 + CallSite(CallSiteDesc desc, uint32_t returnAddressOffset, uint32_t stackDepth) 1.724 + : CallSiteDesc(desc), 1.725 + returnAddressOffset_(returnAddressOffset), 1.726 + stackDepth_(stackDepth) 1.727 + { } 1.728 + 1.729 + void setReturnAddressOffset(uint32_t r) { returnAddressOffset_ = r; } 1.730 + uint32_t returnAddressOffset() const { return returnAddressOffset_; } 1.731 + 1.732 + // The stackDepth measures the amount of stack space pushed since the 1.733 + // function was called. In particular, this includes the word pushed by the 1.734 + // call instruction on x86/x64. 1.735 + uint32_t stackDepth() const { JS_ASSERT(!isEntry()); return stackDepth_; } 1.736 +}; 1.737 + 1.738 +typedef Vector<CallSite, 0, SystemAllocPolicy> CallSiteVector; 1.739 + 1.740 +// Summarizes a heap access made by asm.js code that needs to be patched later 1.741 +// and/or looked up by the asm.js signal handlers. Different architectures need 1.742 +// to know different things (x64: offset and length, ARM: where to patch in 1.743 +// heap length, x86: where to patch in heap length and base) hence the massive 1.744 +// #ifdefery. 1.745 +class AsmJSHeapAccess 1.746 +{ 1.747 + uint32_t offset_; 1.748 +#if defined(JS_CODEGEN_X86) 1.749 + uint8_t cmpDelta_; // the number of bytes from the cmp to the load/store instruction 1.750 +#endif 1.751 +#if defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64) 1.752 + uint8_t opLength_; // the length of the load/store instruction 1.753 + uint8_t isFloat32Load_; 1.754 + AnyRegister::Code loadedReg_ : 8; 1.755 +#endif 1.756 + 1.757 + JS_STATIC_ASSERT(AnyRegister::Total < UINT8_MAX); 1.758 + 1.759 + public: 1.760 + AsmJSHeapAccess() {} 1.761 +#if defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64) 1.762 + // If 'cmp' equals 'offset' or if it is not supplied then the 1.763 + // cmpDelta_ is zero indicating that there is no length to patch. 1.764 + AsmJSHeapAccess(uint32_t offset, uint32_t after, ArrayBufferView::ViewType vt, 1.765 + AnyRegister loadedReg, uint32_t cmp = UINT32_MAX) 1.766 + : offset_(offset), 1.767 +# if defined(JS_CODEGEN_X86) 1.768 + cmpDelta_(cmp == UINT32_MAX ? 0 : offset - cmp), 1.769 +# endif 1.770 + opLength_(after - offset), 1.771 + isFloat32Load_(vt == ArrayBufferView::TYPE_FLOAT32), 1.772 + loadedReg_(loadedReg.code()) 1.773 + {} 1.774 + AsmJSHeapAccess(uint32_t offset, uint8_t after, uint32_t cmp = UINT32_MAX) 1.775 + : offset_(offset), 1.776 +# if defined(JS_CODEGEN_X86) 1.777 + cmpDelta_(cmp == UINT32_MAX ? 0 : offset - cmp), 1.778 +# endif 1.779 + opLength_(after - offset), 1.780 + isFloat32Load_(false), 1.781 + loadedReg_(UINT8_MAX) 1.782 + {} 1.783 +#elif defined(JS_CODEGEN_ARM) 1.784 + explicit AsmJSHeapAccess(uint32_t offset) 1.785 + : offset_(offset) 1.786 + {} 1.787 +#endif 1.788 + 1.789 + uint32_t offset() const { return offset_; } 1.790 + void setOffset(uint32_t offset) { offset_ = offset; } 1.791 +#if defined(JS_CODEGEN_X86) 1.792 + bool hasLengthCheck() const { return cmpDelta_ > 0; } 1.793 + void *patchLengthAt(uint8_t *code) const { return code + (offset_ - cmpDelta_); } 1.794 + void *patchOffsetAt(uint8_t *code) const { return code + (offset_ + opLength_); } 1.795 +#endif 1.796 +#if defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64) 1.797 + unsigned opLength() const { return opLength_; } 1.798 + bool isLoad() const { return loadedReg_ != UINT8_MAX; } 1.799 + bool isFloat32Load() const { return isFloat32Load_; } 1.800 + AnyRegister loadedReg() const { return AnyRegister::FromCode(loadedReg_); } 1.801 +#endif 1.802 +}; 1.803 + 1.804 +typedef Vector<AsmJSHeapAccess, 0, SystemAllocPolicy> AsmJSHeapAccessVector; 1.805 + 1.806 +struct AsmJSGlobalAccess 1.807 +{ 1.808 + CodeOffsetLabel patchAt; 1.809 + unsigned globalDataOffset; 1.810 + 1.811 + AsmJSGlobalAccess(CodeOffsetLabel patchAt, unsigned globalDataOffset) 1.812 + : patchAt(patchAt), globalDataOffset(globalDataOffset) 1.813 + {} 1.814 +}; 1.815 + 1.816 +// Describes the intended pointee of an immediate to be embedded in asm.js 1.817 +// code. By representing the pointee as a symbolic enum, the pointee can be 1.818 +// patched after deserialization when the address of global things has changed. 1.819 +enum AsmJSImmKind 1.820 +{ 1.821 + AsmJSImm_Runtime, 1.822 + AsmJSImm_StackLimit, 1.823 + AsmJSImm_ReportOverRecursed, 1.824 + AsmJSImm_HandleExecutionInterrupt, 1.825 + AsmJSImm_InvokeFromAsmJS_Ignore, 1.826 + AsmJSImm_InvokeFromAsmJS_ToInt32, 1.827 + AsmJSImm_InvokeFromAsmJS_ToNumber, 1.828 + AsmJSImm_CoerceInPlace_ToInt32, 1.829 + AsmJSImm_CoerceInPlace_ToNumber, 1.830 + AsmJSImm_ToInt32, 1.831 +#if defined(JS_CODEGEN_ARM) 1.832 + AsmJSImm_aeabi_idivmod, 1.833 + AsmJSImm_aeabi_uidivmod, 1.834 +#endif 1.835 + AsmJSImm_ModD, 1.836 + AsmJSImm_SinD, 1.837 + AsmJSImm_CosD, 1.838 + AsmJSImm_TanD, 1.839 + AsmJSImm_ASinD, 1.840 + AsmJSImm_ACosD, 1.841 + AsmJSImm_ATanD, 1.842 + AsmJSImm_CeilD, 1.843 + AsmJSImm_CeilF, 1.844 + AsmJSImm_FloorD, 1.845 + AsmJSImm_FloorF, 1.846 + AsmJSImm_ExpD, 1.847 + AsmJSImm_LogD, 1.848 + AsmJSImm_PowD, 1.849 + AsmJSImm_ATan2D, 1.850 +#ifdef DEBUG 1.851 + AsmJSImm_AssumeUnreachable, 1.852 +#endif 1.853 + AsmJSImm_Invalid 1.854 +}; 1.855 + 1.856 +// Pointer to be embedded as an immediate in asm.js code. 1.857 +class AsmJSImmPtr 1.858 +{ 1.859 + AsmJSImmKind kind_; 1.860 + public: 1.861 + AsmJSImmKind kind() const { return kind_; } 1.862 + AsmJSImmPtr(AsmJSImmKind kind) : kind_(kind) { JS_ASSERT(IsCompilingAsmJS()); } 1.863 + AsmJSImmPtr() {} 1.864 +}; 1.865 + 1.866 +// Pointer to be embedded as an immediate that is loaded/stored from by an 1.867 +// instruction in asm.js code. 1.868 +class AsmJSAbsoluteAddress 1.869 +{ 1.870 + AsmJSImmKind kind_; 1.871 + public: 1.872 + AsmJSImmKind kind() const { return kind_; } 1.873 + AsmJSAbsoluteAddress(AsmJSImmKind kind) : kind_(kind) { JS_ASSERT(IsCompilingAsmJS()); } 1.874 + AsmJSAbsoluteAddress() {} 1.875 +}; 1.876 + 1.877 +// Represents an instruction to be patched and the intended pointee. These 1.878 +// links are accumulated in the MacroAssembler, but patching is done outside 1.879 +// the MacroAssembler (in AsmJSModule::staticallyLink). 1.880 +struct AsmJSAbsoluteLink 1.881 +{ 1.882 + AsmJSAbsoluteLink(CodeOffsetLabel patchAt, AsmJSImmKind target) 1.883 + : patchAt(patchAt), target(target) {} 1.884 + CodeOffsetLabel patchAt; 1.885 + AsmJSImmKind target; 1.886 +}; 1.887 + 1.888 +// The base class of all Assemblers for all archs. 1.889 +class AssemblerShared 1.890 +{ 1.891 + Vector<CallSite, 0, SystemAllocPolicy> callsites_; 1.892 + Vector<AsmJSHeapAccess, 0, SystemAllocPolicy> asmJSHeapAccesses_; 1.893 + Vector<AsmJSGlobalAccess, 0, SystemAllocPolicy> asmJSGlobalAccesses_; 1.894 + Vector<AsmJSAbsoluteLink, 0, SystemAllocPolicy> asmJSAbsoluteLinks_; 1.895 + 1.896 + public: 1.897 + bool append(CallSite callsite) { return callsites_.append(callsite); } 1.898 + CallSiteVector &&extractCallSites() { return Move(callsites_); } 1.899 + 1.900 + bool append(AsmJSHeapAccess access) { return asmJSHeapAccesses_.append(access); } 1.901 + AsmJSHeapAccessVector &&extractAsmJSHeapAccesses() { return Move(asmJSHeapAccesses_); } 1.902 + 1.903 + bool append(AsmJSGlobalAccess access) { return asmJSGlobalAccesses_.append(access); } 1.904 + size_t numAsmJSGlobalAccesses() const { return asmJSGlobalAccesses_.length(); } 1.905 + AsmJSGlobalAccess asmJSGlobalAccess(size_t i) const { return asmJSGlobalAccesses_[i]; } 1.906 + 1.907 + bool append(AsmJSAbsoluteLink link) { return asmJSAbsoluteLinks_.append(link); } 1.908 + size_t numAsmJSAbsoluteLinks() const { return asmJSAbsoluteLinks_.length(); } 1.909 + AsmJSAbsoluteLink asmJSAbsoluteLink(size_t i) const { return asmJSAbsoluteLinks_[i]; } 1.910 +}; 1.911 + 1.912 +} // namespace jit 1.913 +} // namespace js 1.914 + 1.915 +#endif /* jit_shared_Assembler_shared_h */