Wed, 31 Dec 2014 06:09:35 +0100
Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #ifndef jit_shared_Assembler_shared_h
8 #define jit_shared_Assembler_shared_h
10 #include "mozilla/PodOperations.h"
12 #include <limits.h>
14 #include "jsworkers.h"
16 #include "jit/IonAllocPolicy.h"
17 #include "jit/Registers.h"
18 #include "jit/RegisterSets.h"
20 #if defined(JS_CODEGEN_X64) || defined(JS_CODEGEN_ARM)
21 // JS_SMALL_BRANCH means the range on a branch instruction
22 // is smaller than the whole address space
23 # define JS_SMALL_BRANCH
24 #endif
25 namespace js {
26 namespace jit {
28 enum Scale {
29 TimesOne = 0,
30 TimesTwo = 1,
31 TimesFour = 2,
32 TimesEight = 3
33 };
35 static inline unsigned
36 ScaleToShift(Scale scale)
37 {
38 return unsigned(scale);
39 }
41 static inline bool
42 IsShiftInScaleRange(int i)
43 {
44 return i >= TimesOne && i <= TimesEight;
45 }
47 static inline Scale
48 ShiftToScale(int i)
49 {
50 JS_ASSERT(IsShiftInScaleRange(i));
51 return Scale(i);
52 }
54 static inline Scale
55 ScaleFromElemWidth(int shift)
56 {
57 switch (shift) {
58 case 1:
59 return TimesOne;
60 case 2:
61 return TimesTwo;
62 case 4:
63 return TimesFour;
64 case 8:
65 return TimesEight;
66 }
68 MOZ_ASSUME_UNREACHABLE("Invalid scale");
69 }
71 // Used for 32-bit immediates which do not require relocation.
72 struct Imm32
73 {
74 int32_t value;
76 explicit Imm32(int32_t value) : value(value)
77 { }
79 static inline Imm32 ShiftOf(enum Scale s) {
80 switch (s) {
81 case TimesOne:
82 return Imm32(0);
83 case TimesTwo:
84 return Imm32(1);
85 case TimesFour:
86 return Imm32(2);
87 case TimesEight:
88 return Imm32(3);
89 };
90 MOZ_ASSUME_UNREACHABLE("Invalid scale");
91 }
93 static inline Imm32 FactorOf(enum Scale s) {
94 return Imm32(1 << ShiftOf(s).value);
95 }
96 };
98 // Pointer-sized integer to be embedded as an immediate in an instruction.
99 struct ImmWord
100 {
101 uintptr_t value;
103 explicit ImmWord(uintptr_t value) : value(value)
104 { }
105 };
107 #ifdef DEBUG
108 static inline bool
109 IsCompilingAsmJS()
110 {
111 // asm.js compilation pushes an IonContext with a null JSCompartment.
112 IonContext *ictx = MaybeGetIonContext();
113 return ictx && ictx->compartment == nullptr;
114 }
115 #endif
117 // Pointer to be embedded as an immediate in an instruction.
118 struct ImmPtr
119 {
120 void *value;
122 explicit ImmPtr(const void *value) : value(const_cast<void*>(value))
123 {
124 // To make code serialization-safe, asm.js compilation should only
125 // compile pointer immediates using AsmJSImmPtr.
126 JS_ASSERT(!IsCompilingAsmJS());
127 }
129 template <class R>
130 explicit ImmPtr(R (*pf)())
131 : value(JS_FUNC_TO_DATA_PTR(void *, pf))
132 {
133 JS_ASSERT(!IsCompilingAsmJS());
134 }
136 template <class R, class A1>
137 explicit ImmPtr(R (*pf)(A1))
138 : value(JS_FUNC_TO_DATA_PTR(void *, pf))
139 {
140 JS_ASSERT(!IsCompilingAsmJS());
141 }
143 template <class R, class A1, class A2>
144 explicit ImmPtr(R (*pf)(A1, A2))
145 : value(JS_FUNC_TO_DATA_PTR(void *, pf))
146 {
147 JS_ASSERT(!IsCompilingAsmJS());
148 }
150 template <class R, class A1, class A2, class A3>
151 explicit ImmPtr(R (*pf)(A1, A2, A3))
152 : value(JS_FUNC_TO_DATA_PTR(void *, pf))
153 {
154 JS_ASSERT(!IsCompilingAsmJS());
155 }
157 template <class R, class A1, class A2, class A3, class A4>
158 explicit ImmPtr(R (*pf)(A1, A2, A3, A4))
159 : value(JS_FUNC_TO_DATA_PTR(void *, pf))
160 {
161 JS_ASSERT(!IsCompilingAsmJS());
162 }
164 };
166 // The same as ImmPtr except that the intention is to patch this
167 // instruction. The initial value of the immediate is 'addr' and this value is
168 // either clobbered or used in the patching process.
169 struct PatchedImmPtr {
170 void *value;
172 explicit PatchedImmPtr()
173 : value(nullptr)
174 { }
175 explicit PatchedImmPtr(const void *value)
176 : value(const_cast<void*>(value))
177 { }
178 };
180 // Used for immediates which require relocation.
181 struct ImmGCPtr
182 {
183 uintptr_t value;
185 explicit ImmGCPtr(const gc::Cell *ptr) : value(reinterpret_cast<uintptr_t>(ptr))
186 {
187 JS_ASSERT(!IsPoisonedPtr(ptr));
188 JS_ASSERT_IF(ptr, ptr->isTenured());
190 // asm.js shouldn't be creating GC things
191 JS_ASSERT(!IsCompilingAsmJS());
192 }
194 protected:
195 ImmGCPtr() : value(0) {}
196 };
198 // Used for immediates which require relocation and may be traced during minor GC.
199 struct ImmMaybeNurseryPtr : public ImmGCPtr
200 {
201 explicit ImmMaybeNurseryPtr(gc::Cell *ptr)
202 {
203 this->value = reinterpret_cast<uintptr_t>(ptr);
204 JS_ASSERT(!IsPoisonedPtr(ptr));
206 // asm.js shouldn't be creating GC things
207 JS_ASSERT(!IsCompilingAsmJS());
208 }
209 };
211 // Pointer to be embedded as an immediate that is loaded/stored from by an
212 // instruction.
213 struct AbsoluteAddress {
214 void *addr;
216 explicit AbsoluteAddress(const void *addr)
217 : addr(const_cast<void*>(addr))
218 {
219 // asm.js shouldn't be creating GC things
220 JS_ASSERT(!IsCompilingAsmJS());
221 }
223 AbsoluteAddress offset(ptrdiff_t delta) {
224 return AbsoluteAddress(((uint8_t *) addr) + delta);
225 }
226 };
228 // The same as AbsoluteAddress except that the intention is to patch this
229 // instruction. The initial value of the immediate is 'addr' and this value is
230 // either clobbered or used in the patching process.
231 struct PatchedAbsoluteAddress {
232 void *addr;
234 explicit PatchedAbsoluteAddress()
235 : addr(nullptr)
236 { }
237 explicit PatchedAbsoluteAddress(const void *addr)
238 : addr(const_cast<void*>(addr))
239 { }
240 };
242 // Specifies an address computed in the form of a register base and a constant,
243 // 32-bit offset.
244 struct Address
245 {
246 Register base;
247 int32_t offset;
249 Address(Register base, int32_t offset) : base(base), offset(offset)
250 { }
252 Address() { mozilla::PodZero(this); }
253 };
255 // Specifies an address computed in the form of a register base, a register
256 // index with a scale, and a constant, 32-bit offset.
257 struct BaseIndex
258 {
259 Register base;
260 Register index;
261 Scale scale;
262 int32_t offset;
264 BaseIndex(Register base, Register index, Scale scale, int32_t offset = 0)
265 : base(base), index(index), scale(scale), offset(offset)
266 { }
268 BaseIndex() { mozilla::PodZero(this); }
269 };
271 class Relocation {
272 public:
273 enum Kind {
274 // The target is immovable, so patching is only needed if the source
275 // buffer is relocated and the reference is relative.
276 HARDCODED,
278 // The target is the start of a JitCode buffer, which must be traced
279 // during garbage collection. Relocations and patching may be needed.
280 JITCODE
281 };
282 };
284 struct LabelBase
285 {
286 protected:
287 // offset_ >= 0 means that the label is either bound or has incoming
288 // uses and needs to be bound.
289 int32_t offset_ : 31;
290 bool bound_ : 1;
292 // Disallow assignment.
293 void operator =(const LabelBase &label);
294 public:
295 static const int32_t INVALID_OFFSET = -1;
297 LabelBase() : offset_(INVALID_OFFSET), bound_(false)
298 { }
299 LabelBase(const LabelBase &label)
300 : offset_(label.offset_),
301 bound_(label.bound_)
302 { }
304 // If the label is bound, all incoming edges have been patched and any
305 // future incoming edges will be immediately patched.
306 bool bound() const {
307 return bound_;
308 }
309 int32_t offset() const {
310 JS_ASSERT(bound() || used());
311 return offset_;
312 }
313 // Returns whether the label is not bound, but has incoming uses.
314 bool used() const {
315 return !bound() && offset_ > INVALID_OFFSET;
316 }
317 // Binds the label, fixing its final position in the code stream.
318 void bind(int32_t offset) {
319 JS_ASSERT(!bound());
320 offset_ = offset;
321 bound_ = true;
322 JS_ASSERT(offset_ == offset);
323 }
324 // Marks the label as neither bound nor used.
325 void reset() {
326 offset_ = INVALID_OFFSET;
327 bound_ = false;
328 }
329 // Sets the label's latest used position, returning the old use position in
330 // the process.
331 int32_t use(int32_t offset) {
332 JS_ASSERT(!bound());
334 int32_t old = offset_;
335 offset_ = offset;
336 JS_ASSERT(offset_ == offset);
338 return old;
339 }
340 };
342 // A label represents a position in an assembly buffer that may or may not have
343 // already been generated. Labels can either be "bound" or "unbound", the
344 // former meaning that its position is known and the latter that its position
345 // is not yet known.
346 //
347 // A jump to an unbound label adds that jump to the label's incoming queue. A
348 // jump to a bound label automatically computes the jump distance. The process
349 // of binding a label automatically corrects all incoming jumps.
350 class Label : public LabelBase
351 {
352 public:
353 Label()
354 { }
355 Label(const Label &label) : LabelBase(label)
356 { }
357 ~Label()
358 {
359 #ifdef DEBUG
360 // The assertion below doesn't hold if an error occurred.
361 if (OOM_counter > OOM_maxAllocations)
362 return;
363 if (MaybeGetIonContext() && GetIonContext()->runtime->hadOutOfMemory())
364 return;
366 MOZ_ASSERT(!used());
367 #endif
368 }
369 };
371 // Label's destructor asserts that if it has been used it has also been bound.
372 // In the case long-lived labels, however, failed compilation (e.g. OOM) will
373 // trigger this failure innocuously. This Label silences the assertion.
374 class NonAssertingLabel : public Label
375 {
376 public:
377 ~NonAssertingLabel()
378 {
379 #ifdef DEBUG
380 if (used())
381 bind(0);
382 #endif
383 }
384 };
386 class RepatchLabel
387 {
388 static const int32_t INVALID_OFFSET = 0xC0000000;
389 int32_t offset_ : 31;
390 uint32_t bound_ : 1;
391 public:
393 RepatchLabel() : offset_(INVALID_OFFSET), bound_(0) {}
395 void use(uint32_t newOffset) {
396 JS_ASSERT(offset_ == INVALID_OFFSET);
397 JS_ASSERT(newOffset != (uint32_t)INVALID_OFFSET);
398 offset_ = newOffset;
399 }
400 bool bound() const {
401 return bound_;
402 }
403 void bind(int32_t dest) {
404 JS_ASSERT(!bound_);
405 JS_ASSERT(dest != INVALID_OFFSET);
406 offset_ = dest;
407 bound_ = true;
408 }
409 int32_t target() {
410 JS_ASSERT(bound());
411 int32_t ret = offset_;
412 offset_ = INVALID_OFFSET;
413 return ret;
414 }
415 int32_t offset() {
416 JS_ASSERT(!bound());
417 return offset_;
418 }
419 bool used() const {
420 return !bound() && offset_ != (INVALID_OFFSET);
421 }
423 };
424 // An absolute label is like a Label, except it represents an absolute
425 // reference rather than a relative one. Thus, it cannot be patched until after
426 // linking.
427 struct AbsoluteLabel : public LabelBase
428 {
429 public:
430 AbsoluteLabel()
431 { }
432 AbsoluteLabel(const AbsoluteLabel &label) : LabelBase(label)
433 { }
434 int32_t prev() const {
435 JS_ASSERT(!bound());
436 if (!used())
437 return INVALID_OFFSET;
438 return offset();
439 }
440 void setPrev(int32_t offset) {
441 use(offset);
442 }
443 void bind() {
444 bound_ = true;
446 // These labels cannot be used after being bound.
447 offset_ = -1;
448 }
449 };
451 // A code label contains an absolute reference to a point in the code
452 // Thus, it cannot be patched until after linking
453 class CodeLabel
454 {
455 // The destination position, where the absolute reference should get patched into
456 AbsoluteLabel dest_;
458 // The source label (relative) in the code to where the
459 // the destination should get patched to.
460 Label src_;
462 public:
463 CodeLabel()
464 { }
465 CodeLabel(const AbsoluteLabel &dest)
466 : dest_(dest)
467 { }
468 AbsoluteLabel *dest() {
469 return &dest_;
470 }
471 Label *src() {
472 return &src_;
473 }
474 };
476 // Location of a jump or label in a generated JitCode block, relative to the
477 // start of the block.
479 class CodeOffsetJump
480 {
481 size_t offset_;
483 #ifdef JS_SMALL_BRANCH
484 size_t jumpTableIndex_;
485 #endif
487 public:
489 #ifdef JS_SMALL_BRANCH
490 CodeOffsetJump(size_t offset, size_t jumpTableIndex)
491 : offset_(offset), jumpTableIndex_(jumpTableIndex)
492 {}
493 size_t jumpTableIndex() const {
494 return jumpTableIndex_;
495 }
496 #else
497 CodeOffsetJump(size_t offset) : offset_(offset) {}
498 #endif
500 CodeOffsetJump() {
501 mozilla::PodZero(this);
502 }
504 size_t offset() const {
505 return offset_;
506 }
507 void fixup(MacroAssembler *masm);
508 };
510 class CodeOffsetLabel
511 {
512 size_t offset_;
514 public:
515 CodeOffsetLabel(size_t offset) : offset_(offset) {}
516 CodeOffsetLabel() : offset_(0) {}
518 size_t offset() const {
519 return offset_;
520 }
521 void fixup(MacroAssembler *masm);
523 };
525 // Absolute location of a jump or a label in some generated JitCode block.
526 // Can also encode a CodeOffset{Jump,Label}, such that the offset is initially
527 // set and the absolute location later filled in after the final JitCode is
528 // allocated.
530 class CodeLocationJump
531 {
532 uint8_t *raw_;
533 #ifdef DEBUG
534 enum State { Uninitialized, Absolute, Relative };
535 State state_;
536 void setUninitialized() {
537 state_ = Uninitialized;
538 }
539 void setAbsolute() {
540 state_ = Absolute;
541 }
542 void setRelative() {
543 state_ = Relative;
544 }
545 #else
546 void setUninitialized() const {
547 }
548 void setAbsolute() const {
549 }
550 void setRelative() const {
551 }
552 #endif
554 #ifdef JS_SMALL_BRANCH
555 uint8_t *jumpTableEntry_;
556 #endif
558 public:
559 CodeLocationJump() {
560 raw_ = nullptr;
561 setUninitialized();
562 #ifdef JS_SMALL_BRANCH
563 jumpTableEntry_ = (uint8_t *) 0xdeadab1e;
564 #endif
565 }
566 CodeLocationJump(JitCode *code, CodeOffsetJump base) {
567 *this = base;
568 repoint(code);
569 }
571 void operator = (CodeOffsetJump base) {
572 raw_ = (uint8_t *) base.offset();
573 setRelative();
574 #ifdef JS_SMALL_BRANCH
575 jumpTableEntry_ = (uint8_t *) base.jumpTableIndex();
576 #endif
577 }
579 void repoint(JitCode *code, MacroAssembler* masm = nullptr);
581 uint8_t *raw() const {
582 JS_ASSERT(state_ == Absolute);
583 return raw_;
584 }
585 uint8_t *offset() const {
586 JS_ASSERT(state_ == Relative);
587 return raw_;
588 }
590 #ifdef JS_SMALL_BRANCH
591 uint8_t *jumpTableEntry() const {
592 JS_ASSERT(state_ == Absolute);
593 return jumpTableEntry_;
594 }
595 #endif
596 };
598 class CodeLocationLabel
599 {
600 uint8_t *raw_;
601 #ifdef DEBUG
602 enum State { Uninitialized, Absolute, Relative };
603 State state_;
604 void setUninitialized() {
605 state_ = Uninitialized;
606 }
607 void setAbsolute() {
608 state_ = Absolute;
609 }
610 void setRelative() {
611 state_ = Relative;
612 }
613 #else
614 void setUninitialized() const {
615 }
616 void setAbsolute() const {
617 }
618 void setRelative() const {
619 }
620 #endif
622 public:
623 CodeLocationLabel() {
624 raw_ = nullptr;
625 setUninitialized();
626 }
627 CodeLocationLabel(JitCode *code, CodeOffsetLabel base) {
628 *this = base;
629 repoint(code);
630 }
631 CodeLocationLabel(JitCode *code) {
632 raw_ = code->raw();
633 setAbsolute();
634 }
635 CodeLocationLabel(uint8_t *raw) {
636 raw_ = raw;
637 setAbsolute();
638 }
640 void operator = (CodeOffsetLabel base) {
641 raw_ = (uint8_t *)base.offset();
642 setRelative();
643 }
644 ptrdiff_t operator - (const CodeLocationLabel &other) {
645 return raw_ - other.raw_;
646 }
648 void repoint(JitCode *code, MacroAssembler *masm = nullptr);
650 #ifdef DEBUG
651 bool isSet() const {
652 return state_ != Uninitialized;
653 }
654 #endif
656 uint8_t *raw() const {
657 JS_ASSERT(state_ == Absolute);
658 return raw_;
659 }
660 uint8_t *offset() const {
661 JS_ASSERT(state_ == Relative);
662 return raw_;
663 }
664 };
666 // Describes the user-visible properties of a callsite.
667 //
668 // A few general notes about the stack-walking supported by CallSite(Desc):
669 // - This information facilitates stack-walking performed by FrameIter which
670 // is used by Error.stack and other user-visible stack-walking functions.
671 // - Ion/asm.js calling conventions do not maintain a frame-pointer so
672 // stack-walking must lookup the stack depth based on the PC.
673 // - Stack-walking only occurs from C++ after a synchronous calls (JS-to-JS and
674 // JS-to-C++). Thus, we do not need to map arbitrary PCs to stack-depths,
675 // just the return address at callsites.
676 // - An exception to the above rule is the interrupt callback which can happen
677 // at arbitrary PCs. In such cases, we drop frames from the stack-walk. In
678 // the future when a full PC->stack-depth map is maintained, we handle this
679 // case.
680 class CallSiteDesc
681 {
682 uint32_t line_;
683 uint32_t column_;
684 uint32_t functionNameIndex_;
686 static const uint32_t sEntryTrampoline = UINT32_MAX;
687 static const uint32_t sExit = UINT32_MAX - 1;
689 public:
690 static const uint32_t FUNCTION_NAME_INDEX_MAX = UINT32_MAX - 2;
692 CallSiteDesc() {}
694 CallSiteDesc(uint32_t line, uint32_t column, uint32_t functionNameIndex)
695 : line_(line), column_(column), functionNameIndex_(functionNameIndex)
696 {}
698 static CallSiteDesc Entry() { return CallSiteDesc(0, 0, sEntryTrampoline); }
699 static CallSiteDesc Exit() { return CallSiteDesc(0, 0, sExit); }
701 bool isEntry() const { return functionNameIndex_ == sEntryTrampoline; }
702 bool isExit() const { return functionNameIndex_ == sExit; }
703 bool isNormal() const { return !(isEntry() || isExit()); }
705 uint32_t line() const { JS_ASSERT(isNormal()); return line_; }
706 uint32_t column() const { JS_ASSERT(isNormal()); return column_; }
707 uint32_t functionNameIndex() const { JS_ASSERT(isNormal()); return functionNameIndex_; }
708 };
710 // Adds to CallSiteDesc the metadata necessary to walk the stack given an
711 // initial stack-pointer.
712 struct CallSite : public CallSiteDesc
713 {
714 uint32_t returnAddressOffset_;
715 uint32_t stackDepth_;
717 public:
718 CallSite() {}
720 CallSite(CallSiteDesc desc, uint32_t returnAddressOffset, uint32_t stackDepth)
721 : CallSiteDesc(desc),
722 returnAddressOffset_(returnAddressOffset),
723 stackDepth_(stackDepth)
724 { }
726 void setReturnAddressOffset(uint32_t r) { returnAddressOffset_ = r; }
727 uint32_t returnAddressOffset() const { return returnAddressOffset_; }
729 // The stackDepth measures the amount of stack space pushed since the
730 // function was called. In particular, this includes the word pushed by the
731 // call instruction on x86/x64.
732 uint32_t stackDepth() const { JS_ASSERT(!isEntry()); return stackDepth_; }
733 };
735 typedef Vector<CallSite, 0, SystemAllocPolicy> CallSiteVector;
737 // Summarizes a heap access made by asm.js code that needs to be patched later
738 // and/or looked up by the asm.js signal handlers. Different architectures need
739 // to know different things (x64: offset and length, ARM: where to patch in
740 // heap length, x86: where to patch in heap length and base) hence the massive
741 // #ifdefery.
742 class AsmJSHeapAccess
743 {
744 uint32_t offset_;
745 #if defined(JS_CODEGEN_X86)
746 uint8_t cmpDelta_; // the number of bytes from the cmp to the load/store instruction
747 #endif
748 #if defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64)
749 uint8_t opLength_; // the length of the load/store instruction
750 uint8_t isFloat32Load_;
751 AnyRegister::Code loadedReg_ : 8;
752 #endif
754 JS_STATIC_ASSERT(AnyRegister::Total < UINT8_MAX);
756 public:
757 AsmJSHeapAccess() {}
758 #if defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64)
759 // If 'cmp' equals 'offset' or if it is not supplied then the
760 // cmpDelta_ is zero indicating that there is no length to patch.
761 AsmJSHeapAccess(uint32_t offset, uint32_t after, ArrayBufferView::ViewType vt,
762 AnyRegister loadedReg, uint32_t cmp = UINT32_MAX)
763 : offset_(offset),
764 # if defined(JS_CODEGEN_X86)
765 cmpDelta_(cmp == UINT32_MAX ? 0 : offset - cmp),
766 # endif
767 opLength_(after - offset),
768 isFloat32Load_(vt == ArrayBufferView::TYPE_FLOAT32),
769 loadedReg_(loadedReg.code())
770 {}
771 AsmJSHeapAccess(uint32_t offset, uint8_t after, uint32_t cmp = UINT32_MAX)
772 : offset_(offset),
773 # if defined(JS_CODEGEN_X86)
774 cmpDelta_(cmp == UINT32_MAX ? 0 : offset - cmp),
775 # endif
776 opLength_(after - offset),
777 isFloat32Load_(false),
778 loadedReg_(UINT8_MAX)
779 {}
780 #elif defined(JS_CODEGEN_ARM)
781 explicit AsmJSHeapAccess(uint32_t offset)
782 : offset_(offset)
783 {}
784 #endif
786 uint32_t offset() const { return offset_; }
787 void setOffset(uint32_t offset) { offset_ = offset; }
788 #if defined(JS_CODEGEN_X86)
789 bool hasLengthCheck() const { return cmpDelta_ > 0; }
790 void *patchLengthAt(uint8_t *code) const { return code + (offset_ - cmpDelta_); }
791 void *patchOffsetAt(uint8_t *code) const { return code + (offset_ + opLength_); }
792 #endif
793 #if defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64)
794 unsigned opLength() const { return opLength_; }
795 bool isLoad() const { return loadedReg_ != UINT8_MAX; }
796 bool isFloat32Load() const { return isFloat32Load_; }
797 AnyRegister loadedReg() const { return AnyRegister::FromCode(loadedReg_); }
798 #endif
799 };
801 typedef Vector<AsmJSHeapAccess, 0, SystemAllocPolicy> AsmJSHeapAccessVector;
803 struct AsmJSGlobalAccess
804 {
805 CodeOffsetLabel patchAt;
806 unsigned globalDataOffset;
808 AsmJSGlobalAccess(CodeOffsetLabel patchAt, unsigned globalDataOffset)
809 : patchAt(patchAt), globalDataOffset(globalDataOffset)
810 {}
811 };
813 // Describes the intended pointee of an immediate to be embedded in asm.js
814 // code. By representing the pointee as a symbolic enum, the pointee can be
815 // patched after deserialization when the address of global things has changed.
816 enum AsmJSImmKind
817 {
818 AsmJSImm_Runtime,
819 AsmJSImm_StackLimit,
820 AsmJSImm_ReportOverRecursed,
821 AsmJSImm_HandleExecutionInterrupt,
822 AsmJSImm_InvokeFromAsmJS_Ignore,
823 AsmJSImm_InvokeFromAsmJS_ToInt32,
824 AsmJSImm_InvokeFromAsmJS_ToNumber,
825 AsmJSImm_CoerceInPlace_ToInt32,
826 AsmJSImm_CoerceInPlace_ToNumber,
827 AsmJSImm_ToInt32,
828 #if defined(JS_CODEGEN_ARM)
829 AsmJSImm_aeabi_idivmod,
830 AsmJSImm_aeabi_uidivmod,
831 #endif
832 AsmJSImm_ModD,
833 AsmJSImm_SinD,
834 AsmJSImm_CosD,
835 AsmJSImm_TanD,
836 AsmJSImm_ASinD,
837 AsmJSImm_ACosD,
838 AsmJSImm_ATanD,
839 AsmJSImm_CeilD,
840 AsmJSImm_CeilF,
841 AsmJSImm_FloorD,
842 AsmJSImm_FloorF,
843 AsmJSImm_ExpD,
844 AsmJSImm_LogD,
845 AsmJSImm_PowD,
846 AsmJSImm_ATan2D,
847 #ifdef DEBUG
848 AsmJSImm_AssumeUnreachable,
849 #endif
850 AsmJSImm_Invalid
851 };
853 // Pointer to be embedded as an immediate in asm.js code.
854 class AsmJSImmPtr
855 {
856 AsmJSImmKind kind_;
857 public:
858 AsmJSImmKind kind() const { return kind_; }
859 AsmJSImmPtr(AsmJSImmKind kind) : kind_(kind) { JS_ASSERT(IsCompilingAsmJS()); }
860 AsmJSImmPtr() {}
861 };
863 // Pointer to be embedded as an immediate that is loaded/stored from by an
864 // instruction in asm.js code.
865 class AsmJSAbsoluteAddress
866 {
867 AsmJSImmKind kind_;
868 public:
869 AsmJSImmKind kind() const { return kind_; }
870 AsmJSAbsoluteAddress(AsmJSImmKind kind) : kind_(kind) { JS_ASSERT(IsCompilingAsmJS()); }
871 AsmJSAbsoluteAddress() {}
872 };
874 // Represents an instruction to be patched and the intended pointee. These
875 // links are accumulated in the MacroAssembler, but patching is done outside
876 // the MacroAssembler (in AsmJSModule::staticallyLink).
877 struct AsmJSAbsoluteLink
878 {
879 AsmJSAbsoluteLink(CodeOffsetLabel patchAt, AsmJSImmKind target)
880 : patchAt(patchAt), target(target) {}
881 CodeOffsetLabel patchAt;
882 AsmJSImmKind target;
883 };
885 // The base class of all Assemblers for all archs.
886 class AssemblerShared
887 {
888 Vector<CallSite, 0, SystemAllocPolicy> callsites_;
889 Vector<AsmJSHeapAccess, 0, SystemAllocPolicy> asmJSHeapAccesses_;
890 Vector<AsmJSGlobalAccess, 0, SystemAllocPolicy> asmJSGlobalAccesses_;
891 Vector<AsmJSAbsoluteLink, 0, SystemAllocPolicy> asmJSAbsoluteLinks_;
893 public:
894 bool append(CallSite callsite) { return callsites_.append(callsite); }
895 CallSiteVector &&extractCallSites() { return Move(callsites_); }
897 bool append(AsmJSHeapAccess access) { return asmJSHeapAccesses_.append(access); }
898 AsmJSHeapAccessVector &&extractAsmJSHeapAccesses() { return Move(asmJSHeapAccesses_); }
900 bool append(AsmJSGlobalAccess access) { return asmJSGlobalAccesses_.append(access); }
901 size_t numAsmJSGlobalAccesses() const { return asmJSGlobalAccesses_.length(); }
902 AsmJSGlobalAccess asmJSGlobalAccess(size_t i) const { return asmJSGlobalAccesses_[i]; }
904 bool append(AsmJSAbsoluteLink link) { return asmJSAbsoluteLinks_.append(link); }
905 size_t numAsmJSAbsoluteLinks() const { return asmJSAbsoluteLinks_.length(); }
906 AsmJSAbsoluteLink asmJSAbsoluteLink(size_t i) const { return asmJSAbsoluteLinks_[i]; }
907 };
909 } // namespace jit
910 } // namespace js
912 #endif /* jit_shared_Assembler_shared_h */