|
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- |
|
2 * vim: set ts=8 sts=4 et sw=4 tw=99: |
|
3 * This Source Code Form is subject to the terms of the Mozilla Public |
|
4 * License, v. 2.0. If a copy of the MPL was not distributed with this |
|
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
|
6 |
|
7 #ifndef jit_shared_Assembler_x86_shared_h |
|
8 #define jit_shared_Assembler_x86_shared_h |
|
9 |
|
10 #include <cstddef> |
|
11 |
|
12 #include "assembler/assembler/X86Assembler.h" |
|
13 #include "jit/shared/Assembler-shared.h" |
|
14 |
|
15 namespace js { |
|
16 namespace jit { |
|
17 |
|
18 class Operand |
|
19 { |
|
20 public: |
|
21 enum Kind { |
|
22 REG, |
|
23 MEM_REG_DISP, |
|
24 FPREG, |
|
25 MEM_SCALE, |
|
26 MEM_ADDRESS32 |
|
27 }; |
|
28 |
|
29 private: |
|
30 Kind kind_ : 4; |
|
31 int32_t base_ : 5; |
|
32 Scale scale_ : 3; |
|
33 int32_t index_ : 5; |
|
34 int32_t disp_; |
|
35 |
|
36 public: |
|
37 explicit Operand(Register reg) |
|
38 : kind_(REG), |
|
39 base_(reg.code()) |
|
40 { } |
|
41 explicit Operand(FloatRegister reg) |
|
42 : kind_(FPREG), |
|
43 base_(reg.code()) |
|
44 { } |
|
45 explicit Operand(const Address &address) |
|
46 : kind_(MEM_REG_DISP), |
|
47 base_(address.base.code()), |
|
48 disp_(address.offset) |
|
49 { } |
|
50 explicit Operand(const BaseIndex &address) |
|
51 : kind_(MEM_SCALE), |
|
52 base_(address.base.code()), |
|
53 scale_(address.scale), |
|
54 index_(address.index.code()), |
|
55 disp_(address.offset) |
|
56 { } |
|
57 Operand(Register base, Register index, Scale scale, int32_t disp = 0) |
|
58 : kind_(MEM_SCALE), |
|
59 base_(base.code()), |
|
60 scale_(scale), |
|
61 index_(index.code()), |
|
62 disp_(disp) |
|
63 { } |
|
64 Operand(Register reg, int32_t disp) |
|
65 : kind_(MEM_REG_DISP), |
|
66 base_(reg.code()), |
|
67 disp_(disp) |
|
68 { } |
|
69 explicit Operand(const AbsoluteAddress &address) |
|
70 : kind_(MEM_ADDRESS32), |
|
71 disp_(JSC::X86Assembler::addressImmediate(address.addr)) |
|
72 { } |
|
73 |
|
74 Address toAddress() const { |
|
75 JS_ASSERT(kind() == MEM_REG_DISP); |
|
76 return Address(Register::FromCode(base()), disp()); |
|
77 } |
|
78 |
|
79 BaseIndex toBaseIndex() const { |
|
80 JS_ASSERT(kind() == MEM_SCALE); |
|
81 return BaseIndex(Register::FromCode(base()), Register::FromCode(index()), scale(), disp()); |
|
82 } |
|
83 |
|
84 Kind kind() const { |
|
85 return kind_; |
|
86 } |
|
87 Registers::Code reg() const { |
|
88 JS_ASSERT(kind() == REG); |
|
89 return (Registers::Code)base_; |
|
90 } |
|
91 Registers::Code base() const { |
|
92 JS_ASSERT(kind() == MEM_REG_DISP || kind() == MEM_SCALE); |
|
93 return (Registers::Code)base_; |
|
94 } |
|
95 Registers::Code index() const { |
|
96 JS_ASSERT(kind() == MEM_SCALE); |
|
97 return (Registers::Code)index_; |
|
98 } |
|
99 Scale scale() const { |
|
100 JS_ASSERT(kind() == MEM_SCALE); |
|
101 return scale_; |
|
102 } |
|
103 FloatRegisters::Code fpu() const { |
|
104 JS_ASSERT(kind() == FPREG); |
|
105 return (FloatRegisters::Code)base_; |
|
106 } |
|
107 int32_t disp() const { |
|
108 JS_ASSERT(kind() == MEM_REG_DISP || kind() == MEM_SCALE); |
|
109 return disp_; |
|
110 } |
|
111 void *address() const { |
|
112 JS_ASSERT(kind() == MEM_ADDRESS32); |
|
113 return reinterpret_cast<void *>(disp_); |
|
114 } |
|
115 }; |
|
116 |
|
117 class AssemblerX86Shared : public AssemblerShared |
|
118 { |
|
119 protected: |
|
120 struct RelativePatch { |
|
121 int32_t offset; |
|
122 void *target; |
|
123 Relocation::Kind kind; |
|
124 |
|
125 RelativePatch(int32_t offset, void *target, Relocation::Kind kind) |
|
126 : offset(offset), |
|
127 target(target), |
|
128 kind(kind) |
|
129 { } |
|
130 }; |
|
131 |
|
132 Vector<CodeLabel, 0, SystemAllocPolicy> codeLabels_; |
|
133 Vector<RelativePatch, 8, SystemAllocPolicy> jumps_; |
|
134 CompactBufferWriter jumpRelocations_; |
|
135 CompactBufferWriter dataRelocations_; |
|
136 CompactBufferWriter preBarriers_; |
|
137 bool enoughMemory_; |
|
138 |
|
139 void writeDataRelocation(const Value &val) { |
|
140 if (val.isMarkable()) { |
|
141 JS_ASSERT(static_cast<gc::Cell*>(val.toGCThing())->isTenured()); |
|
142 dataRelocations_.writeUnsigned(masm.currentOffset()); |
|
143 } |
|
144 } |
|
145 void writeDataRelocation(const ImmGCPtr &ptr) { |
|
146 if (ptr.value) |
|
147 dataRelocations_.writeUnsigned(masm.currentOffset()); |
|
148 } |
|
149 void writePrebarrierOffset(CodeOffsetLabel label) { |
|
150 preBarriers_.writeUnsigned(label.offset()); |
|
151 } |
|
152 |
|
153 protected: |
|
154 JSC::X86Assembler masm; |
|
155 |
|
156 typedef JSC::X86Assembler::JmpSrc JmpSrc; |
|
157 typedef JSC::X86Assembler::JmpDst JmpDst; |
|
158 |
|
159 public: |
|
160 enum Condition { |
|
161 Equal = JSC::X86Assembler::ConditionE, |
|
162 NotEqual = JSC::X86Assembler::ConditionNE, |
|
163 Above = JSC::X86Assembler::ConditionA, |
|
164 AboveOrEqual = JSC::X86Assembler::ConditionAE, |
|
165 Below = JSC::X86Assembler::ConditionB, |
|
166 BelowOrEqual = JSC::X86Assembler::ConditionBE, |
|
167 GreaterThan = JSC::X86Assembler::ConditionG, |
|
168 GreaterThanOrEqual = JSC::X86Assembler::ConditionGE, |
|
169 LessThan = JSC::X86Assembler::ConditionL, |
|
170 LessThanOrEqual = JSC::X86Assembler::ConditionLE, |
|
171 Overflow = JSC::X86Assembler::ConditionO, |
|
172 Signed = JSC::X86Assembler::ConditionS, |
|
173 NotSigned = JSC::X86Assembler::ConditionNS, |
|
174 Zero = JSC::X86Assembler::ConditionE, |
|
175 NonZero = JSC::X86Assembler::ConditionNE, |
|
176 Parity = JSC::X86Assembler::ConditionP, |
|
177 NoParity = JSC::X86Assembler::ConditionNP |
|
178 }; |
|
179 |
|
180 // If this bit is set, the ucomisd operands have to be inverted. |
|
181 static const int DoubleConditionBitInvert = 0x10; |
|
182 |
|
183 // Bit set when a DoubleCondition does not map to a single x86 condition. |
|
184 // The macro assembler has to special-case these conditions. |
|
185 static const int DoubleConditionBitSpecial = 0x20; |
|
186 static const int DoubleConditionBits = DoubleConditionBitInvert | DoubleConditionBitSpecial; |
|
187 |
|
188 enum DoubleCondition { |
|
189 // These conditions will only evaluate to true if the comparison is ordered - i.e. neither operand is NaN. |
|
190 DoubleOrdered = NoParity, |
|
191 DoubleEqual = Equal | DoubleConditionBitSpecial, |
|
192 DoubleNotEqual = NotEqual, |
|
193 DoubleGreaterThan = Above, |
|
194 DoubleGreaterThanOrEqual = AboveOrEqual, |
|
195 DoubleLessThan = Above | DoubleConditionBitInvert, |
|
196 DoubleLessThanOrEqual = AboveOrEqual | DoubleConditionBitInvert, |
|
197 // If either operand is NaN, these conditions always evaluate to true. |
|
198 DoubleUnordered = Parity, |
|
199 DoubleEqualOrUnordered = Equal, |
|
200 DoubleNotEqualOrUnordered = NotEqual | DoubleConditionBitSpecial, |
|
201 DoubleGreaterThanOrUnordered = Below | DoubleConditionBitInvert, |
|
202 DoubleGreaterThanOrEqualOrUnordered = BelowOrEqual | DoubleConditionBitInvert, |
|
203 DoubleLessThanOrUnordered = Below, |
|
204 DoubleLessThanOrEqualOrUnordered = BelowOrEqual |
|
205 }; |
|
206 |
|
207 enum NaNCond { |
|
208 NaN_HandledByCond, |
|
209 NaN_IsTrue, |
|
210 NaN_IsFalse |
|
211 }; |
|
212 |
|
213 // If the primary condition returned by ConditionFromDoubleCondition doesn't |
|
214 // handle NaNs properly, return NaN_IsFalse if the comparison should be |
|
215 // overridden to return false on NaN, NaN_IsTrue if it should be overridden |
|
216 // to return true on NaN, or NaN_HandledByCond if no secondary check is |
|
217 // needed. |
|
218 static inline NaNCond NaNCondFromDoubleCondition(DoubleCondition cond) { |
|
219 switch (cond) { |
|
220 case DoubleOrdered: |
|
221 case DoubleNotEqual: |
|
222 case DoubleGreaterThan: |
|
223 case DoubleGreaterThanOrEqual: |
|
224 case DoubleLessThan: |
|
225 case DoubleLessThanOrEqual: |
|
226 case DoubleUnordered: |
|
227 case DoubleEqualOrUnordered: |
|
228 case DoubleGreaterThanOrUnordered: |
|
229 case DoubleGreaterThanOrEqualOrUnordered: |
|
230 case DoubleLessThanOrUnordered: |
|
231 case DoubleLessThanOrEqualOrUnordered: |
|
232 return NaN_HandledByCond; |
|
233 case DoubleEqual: |
|
234 return NaN_IsFalse; |
|
235 case DoubleNotEqualOrUnordered: |
|
236 return NaN_IsTrue; |
|
237 } |
|
238 |
|
239 MOZ_ASSUME_UNREACHABLE("Unknown double condition"); |
|
240 } |
|
241 |
|
242 static void staticAsserts() { |
|
243 // DoubleConditionBits should not interfere with x86 condition codes. |
|
244 JS_STATIC_ASSERT(!((Equal | NotEqual | Above | AboveOrEqual | Below | |
|
245 BelowOrEqual | Parity | NoParity) & DoubleConditionBits)); |
|
246 } |
|
247 |
|
248 AssemblerX86Shared() |
|
249 : enoughMemory_(true) |
|
250 { |
|
251 } |
|
252 |
|
253 static Condition InvertCondition(Condition cond); |
|
254 |
|
255 // Return the primary condition to test. Some primary conditions may not |
|
256 // handle NaNs properly and may therefore require a secondary condition. |
|
257 // Use NaNCondFromDoubleCondition to determine what else is needed. |
|
258 static inline Condition ConditionFromDoubleCondition(DoubleCondition cond) { |
|
259 return static_cast<Condition>(cond & ~DoubleConditionBits); |
|
260 } |
|
261 |
|
262 static void TraceDataRelocations(JSTracer *trc, JitCode *code, CompactBufferReader &reader); |
|
263 |
|
264 // MacroAssemblers hold onto gcthings, so they are traced by the GC. |
|
265 void trace(JSTracer *trc); |
|
266 |
|
267 bool oom() const { |
|
268 return masm.oom() || |
|
269 !enoughMemory_ || |
|
270 jumpRelocations_.oom() || |
|
271 dataRelocations_.oom() || |
|
272 preBarriers_.oom(); |
|
273 } |
|
274 |
|
275 void setPrinter(Sprinter *sp) { |
|
276 masm.setPrinter(sp); |
|
277 } |
|
278 |
|
279 void executableCopy(void *buffer); |
|
280 void processCodeLabels(uint8_t *rawCode); |
|
281 void copyJumpRelocationTable(uint8_t *dest); |
|
282 void copyDataRelocationTable(uint8_t *dest); |
|
283 void copyPreBarrierTable(uint8_t *dest); |
|
284 |
|
285 bool addCodeLabel(CodeLabel label) { |
|
286 return codeLabels_.append(label); |
|
287 } |
|
288 size_t numCodeLabels() const { |
|
289 return codeLabels_.length(); |
|
290 } |
|
291 CodeLabel codeLabel(size_t i) { |
|
292 return codeLabels_[i]; |
|
293 } |
|
294 |
|
295 // Size of the instruction stream, in bytes. |
|
296 size_t size() const { |
|
297 return masm.size(); |
|
298 } |
|
299 // Size of the jump relocation table, in bytes. |
|
300 size_t jumpRelocationTableBytes() const { |
|
301 return jumpRelocations_.length(); |
|
302 } |
|
303 size_t dataRelocationTableBytes() const { |
|
304 return dataRelocations_.length(); |
|
305 } |
|
306 size_t preBarrierTableBytes() const { |
|
307 return preBarriers_.length(); |
|
308 } |
|
309 // Size of the data table, in bytes. |
|
310 size_t bytesNeeded() const { |
|
311 return size() + |
|
312 jumpRelocationTableBytes() + |
|
313 dataRelocationTableBytes() + |
|
314 preBarrierTableBytes(); |
|
315 } |
|
316 |
|
317 public: |
|
318 void align(int alignment) { |
|
319 masm.align(alignment); |
|
320 } |
|
321 void writeCodePointer(AbsoluteLabel *label) { |
|
322 JS_ASSERT(!label->bound()); |
|
323 // Thread the patch list through the unpatched address word in the |
|
324 // instruction stream. |
|
325 masm.jumpTablePointer(label->prev()); |
|
326 label->setPrev(masm.size()); |
|
327 } |
|
328 void writeDoubleConstant(double d, Label *label) { |
|
329 label->bind(masm.size()); |
|
330 masm.doubleConstant(d); |
|
331 } |
|
332 void writeFloatConstant(float f, Label *label) { |
|
333 label->bind(masm.size()); |
|
334 masm.floatConstant(f); |
|
335 } |
|
336 void movl(const Imm32 &imm32, const Register &dest) { |
|
337 masm.movl_i32r(imm32.value, dest.code()); |
|
338 } |
|
339 void movl(const Register &src, const Register &dest) { |
|
340 masm.movl_rr(src.code(), dest.code()); |
|
341 } |
|
342 void movl(const Operand &src, const Register &dest) { |
|
343 switch (src.kind()) { |
|
344 case Operand::REG: |
|
345 masm.movl_rr(src.reg(), dest.code()); |
|
346 break; |
|
347 case Operand::MEM_REG_DISP: |
|
348 masm.movl_mr(src.disp(), src.base(), dest.code()); |
|
349 break; |
|
350 case Operand::MEM_SCALE: |
|
351 masm.movl_mr(src.disp(), src.base(), src.index(), src.scale(), dest.code()); |
|
352 break; |
|
353 case Operand::MEM_ADDRESS32: |
|
354 masm.movl_mr(src.address(), dest.code()); |
|
355 break; |
|
356 default: |
|
357 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
358 } |
|
359 } |
|
360 void movl(const Register &src, const Operand &dest) { |
|
361 switch (dest.kind()) { |
|
362 case Operand::REG: |
|
363 masm.movl_rr(src.code(), dest.reg()); |
|
364 break; |
|
365 case Operand::MEM_REG_DISP: |
|
366 masm.movl_rm(src.code(), dest.disp(), dest.base()); |
|
367 break; |
|
368 case Operand::MEM_SCALE: |
|
369 masm.movl_rm(src.code(), dest.disp(), dest.base(), dest.index(), dest.scale()); |
|
370 break; |
|
371 case Operand::MEM_ADDRESS32: |
|
372 masm.movl_rm(src.code(), dest.address()); |
|
373 break; |
|
374 default: |
|
375 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
376 } |
|
377 } |
|
378 void movl(const Imm32 &imm32, const Operand &dest) { |
|
379 switch (dest.kind()) { |
|
380 case Operand::REG: |
|
381 masm.movl_i32r(imm32.value, dest.reg()); |
|
382 break; |
|
383 case Operand::MEM_REG_DISP: |
|
384 masm.movl_i32m(imm32.value, dest.disp(), dest.base()); |
|
385 break; |
|
386 case Operand::MEM_SCALE: |
|
387 masm.movl_i32m(imm32.value, dest.disp(), dest.base(), dest.index(), dest.scale()); |
|
388 break; |
|
389 default: |
|
390 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
391 } |
|
392 } |
|
393 |
|
394 void xchgl(const Register &src, const Register &dest) { |
|
395 masm.xchgl_rr(src.code(), dest.code()); |
|
396 } |
|
397 |
|
398 // Eventually movapd and movaps should be overloaded to support loads and |
|
399 // stores too. |
|
400 void movapd(const FloatRegister &src, const FloatRegister &dest) { |
|
401 JS_ASSERT(HasSSE2()); |
|
402 masm.movapd_rr(src.code(), dest.code()); |
|
403 } |
|
404 void movaps(const FloatRegister &src, const FloatRegister &dest) { |
|
405 JS_ASSERT(HasSSE2()); |
|
406 masm.movaps_rr(src.code(), dest.code()); |
|
407 } |
|
408 |
|
409 // movsd and movss are only provided in load/store form since the |
|
410 // register-to-register form has different semantics (it doesn't clobber |
|
411 // the whole output register) and isn't needed currently. |
|
412 void movsd(const Address &src, const FloatRegister &dest) { |
|
413 masm.movsd_mr(src.offset, src.base.code(), dest.code()); |
|
414 } |
|
415 void movsd(const BaseIndex &src, const FloatRegister &dest) { |
|
416 masm.movsd_mr(src.offset, src.base.code(), src.index.code(), src.scale, dest.code()); |
|
417 } |
|
418 void movsd(const FloatRegister &src, const Address &dest) { |
|
419 masm.movsd_rm(src.code(), dest.offset, dest.base.code()); |
|
420 } |
|
421 void movsd(const FloatRegister &src, const BaseIndex &dest) { |
|
422 masm.movsd_rm(src.code(), dest.offset, dest.base.code(), dest.index.code(), dest.scale); |
|
423 } |
|
424 void movss(const Address &src, const FloatRegister &dest) { |
|
425 masm.movss_mr(src.offset, src.base.code(), dest.code()); |
|
426 } |
|
427 void movss(const BaseIndex &src, const FloatRegister &dest) { |
|
428 masm.movss_mr(src.offset, src.base.code(), src.index.code(), src.scale, dest.code()); |
|
429 } |
|
430 void movss(const FloatRegister &src, const Address &dest) { |
|
431 masm.movss_rm(src.code(), dest.offset, dest.base.code()); |
|
432 } |
|
433 void movss(const FloatRegister &src, const BaseIndex &dest) { |
|
434 masm.movss_rm(src.code(), dest.offset, dest.base.code(), dest.index.code(), dest.scale); |
|
435 } |
|
436 void movdqa(const Operand &src, const FloatRegister &dest) { |
|
437 JS_ASSERT(HasSSE2()); |
|
438 switch (src.kind()) { |
|
439 case Operand::MEM_REG_DISP: |
|
440 masm.movdqa_mr(src.disp(), src.base(), dest.code()); |
|
441 break; |
|
442 case Operand::MEM_SCALE: |
|
443 masm.movdqa_mr(src.disp(), src.base(), src.index(), src.scale(), dest.code()); |
|
444 break; |
|
445 default: |
|
446 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
447 } |
|
448 } |
|
449 void movdqa(const FloatRegister &src, const Operand &dest) { |
|
450 JS_ASSERT(HasSSE2()); |
|
451 switch (dest.kind()) { |
|
452 case Operand::MEM_REG_DISP: |
|
453 masm.movdqa_rm(src.code(), dest.disp(), dest.base()); |
|
454 break; |
|
455 case Operand::MEM_SCALE: |
|
456 masm.movdqa_rm(src.code(), dest.disp(), dest.base(), dest.index(), dest.scale()); |
|
457 break; |
|
458 default: |
|
459 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
460 } |
|
461 } |
|
462 void cvtss2sd(const FloatRegister &src, const FloatRegister &dest) { |
|
463 JS_ASSERT(HasSSE2()); |
|
464 masm.cvtss2sd_rr(src.code(), dest.code()); |
|
465 } |
|
466 void cvtsd2ss(const FloatRegister &src, const FloatRegister &dest) { |
|
467 JS_ASSERT(HasSSE2()); |
|
468 masm.cvtsd2ss_rr(src.code(), dest.code()); |
|
469 } |
|
470 void movzbl(const Operand &src, const Register &dest) { |
|
471 switch (src.kind()) { |
|
472 case Operand::MEM_REG_DISP: |
|
473 masm.movzbl_mr(src.disp(), src.base(), dest.code()); |
|
474 break; |
|
475 case Operand::MEM_SCALE: |
|
476 masm.movzbl_mr(src.disp(), src.base(), src.index(), src.scale(), dest.code()); |
|
477 break; |
|
478 default: |
|
479 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
480 } |
|
481 } |
|
482 void movsbl(const Operand &src, const Register &dest) { |
|
483 switch (src.kind()) { |
|
484 case Operand::MEM_REG_DISP: |
|
485 masm.movsbl_mr(src.disp(), src.base(), dest.code()); |
|
486 break; |
|
487 case Operand::MEM_SCALE: |
|
488 masm.movsbl_mr(src.disp(), src.base(), src.index(), src.scale(), dest.code()); |
|
489 break; |
|
490 default: |
|
491 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
492 } |
|
493 } |
|
494 void movb(const Register &src, const Operand &dest) { |
|
495 switch (dest.kind()) { |
|
496 case Operand::MEM_REG_DISP: |
|
497 masm.movb_rm(src.code(), dest.disp(), dest.base()); |
|
498 break; |
|
499 case Operand::MEM_SCALE: |
|
500 masm.movb_rm(src.code(), dest.disp(), dest.base(), dest.index(), dest.scale()); |
|
501 break; |
|
502 default: |
|
503 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
504 } |
|
505 } |
|
506 void movb(const Imm32 &src, const Operand &dest) { |
|
507 switch (dest.kind()) { |
|
508 case Operand::MEM_REG_DISP: |
|
509 masm.movb_i8m(src.value, dest.disp(), dest.base()); |
|
510 break; |
|
511 case Operand::MEM_SCALE: |
|
512 masm.movb_i8m(src.value, dest.disp(), dest.base(), dest.index(), dest.scale()); |
|
513 break; |
|
514 default: |
|
515 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
516 } |
|
517 } |
|
518 void movzwl(const Operand &src, const Register &dest) { |
|
519 switch (src.kind()) { |
|
520 case Operand::REG: |
|
521 masm.movzwl_rr(src.reg(), dest.code()); |
|
522 break; |
|
523 case Operand::MEM_REG_DISP: |
|
524 masm.movzwl_mr(src.disp(), src.base(), dest.code()); |
|
525 break; |
|
526 case Operand::MEM_SCALE: |
|
527 masm.movzwl_mr(src.disp(), src.base(), src.index(), src.scale(), dest.code()); |
|
528 break; |
|
529 default: |
|
530 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
531 } |
|
532 } |
|
533 void movzwl(const Register &src, const Register &dest) { |
|
534 masm.movzwl_rr(src.code(), dest.code()); |
|
535 } |
|
536 void movw(const Register &src, const Operand &dest) { |
|
537 switch (dest.kind()) { |
|
538 case Operand::MEM_REG_DISP: |
|
539 masm.movw_rm(src.code(), dest.disp(), dest.base()); |
|
540 break; |
|
541 case Operand::MEM_SCALE: |
|
542 masm.movw_rm(src.code(), dest.disp(), dest.base(), dest.index(), dest.scale()); |
|
543 break; |
|
544 default: |
|
545 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
546 } |
|
547 } |
|
548 void movw(const Imm32 &src, const Operand &dest) { |
|
549 switch (dest.kind()) { |
|
550 case Operand::MEM_REG_DISP: |
|
551 masm.movw_i16m(src.value, dest.disp(), dest.base()); |
|
552 break; |
|
553 case Operand::MEM_SCALE: |
|
554 masm.movw_i16m(src.value, dest.disp(), dest.base(), dest.index(), dest.scale()); |
|
555 break; |
|
556 default: |
|
557 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
558 } |
|
559 } |
|
560 void movswl(const Operand &src, const Register &dest) { |
|
561 switch (src.kind()) { |
|
562 case Operand::MEM_REG_DISP: |
|
563 masm.movswl_mr(src.disp(), src.base(), dest.code()); |
|
564 break; |
|
565 case Operand::MEM_SCALE: |
|
566 masm.movswl_mr(src.disp(), src.base(), src.index(), src.scale(), dest.code()); |
|
567 break; |
|
568 default: |
|
569 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
570 } |
|
571 } |
|
572 void leal(const Operand &src, const Register &dest) { |
|
573 switch (src.kind()) { |
|
574 case Operand::MEM_REG_DISP: |
|
575 masm.leal_mr(src.disp(), src.base(), dest.code()); |
|
576 break; |
|
577 case Operand::MEM_SCALE: |
|
578 masm.leal_mr(src.disp(), src.base(), src.index(), src.scale(), dest.code()); |
|
579 break; |
|
580 default: |
|
581 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
582 } |
|
583 } |
|
584 |
|
585 protected: |
|
586 JmpSrc jSrc(Condition cond, Label *label) { |
|
587 JmpSrc j = masm.jCC(static_cast<JSC::X86Assembler::Condition>(cond)); |
|
588 if (label->bound()) { |
|
589 // The jump can be immediately patched to the correct destination. |
|
590 masm.linkJump(j, JmpDst(label->offset())); |
|
591 } else { |
|
592 // Thread the jump list through the unpatched jump targets. |
|
593 JmpSrc prev = JmpSrc(label->use(j.offset())); |
|
594 masm.setNextJump(j, prev); |
|
595 } |
|
596 return j; |
|
597 } |
|
598 JmpSrc jmpSrc(Label *label) { |
|
599 JmpSrc j = masm.jmp(); |
|
600 if (label->bound()) { |
|
601 // The jump can be immediately patched to the correct destination. |
|
602 masm.linkJump(j, JmpDst(label->offset())); |
|
603 } else { |
|
604 // Thread the jump list through the unpatched jump targets. |
|
605 JmpSrc prev = JmpSrc(label->use(j.offset())); |
|
606 masm.setNextJump(j, prev); |
|
607 } |
|
608 return j; |
|
609 } |
|
610 |
|
611 // Comparison of EAX against the address given by a Label. |
|
612 JmpSrc cmpSrc(Label *label) { |
|
613 JmpSrc j = masm.cmp_eax(); |
|
614 if (label->bound()) { |
|
615 // The jump can be immediately patched to the correct destination. |
|
616 masm.linkJump(j, JmpDst(label->offset())); |
|
617 } else { |
|
618 // Thread the jump list through the unpatched jump targets. |
|
619 JmpSrc prev = JmpSrc(label->use(j.offset())); |
|
620 masm.setNextJump(j, prev); |
|
621 } |
|
622 return j; |
|
623 } |
|
624 |
|
625 JmpSrc jSrc(Condition cond, RepatchLabel *label) { |
|
626 JmpSrc j = masm.jCC(static_cast<JSC::X86Assembler::Condition>(cond)); |
|
627 if (label->bound()) { |
|
628 // The jump can be immediately patched to the correct destination. |
|
629 masm.linkJump(j, JmpDst(label->offset())); |
|
630 } else { |
|
631 label->use(j.offset()); |
|
632 } |
|
633 return j; |
|
634 } |
|
635 JmpSrc jmpSrc(RepatchLabel *label) { |
|
636 JmpSrc j = masm.jmp(); |
|
637 if (label->bound()) { |
|
638 // The jump can be immediately patched to the correct destination. |
|
639 masm.linkJump(j, JmpDst(label->offset())); |
|
640 } else { |
|
641 // Thread the jump list through the unpatched jump targets. |
|
642 label->use(j.offset()); |
|
643 } |
|
644 return j; |
|
645 } |
|
646 |
|
647 public: |
|
648 void nop() { masm.nop(); } |
|
649 void j(Condition cond, Label *label) { jSrc(cond, label); } |
|
650 void jmp(Label *label) { jmpSrc(label); } |
|
651 void j(Condition cond, RepatchLabel *label) { jSrc(cond, label); } |
|
652 void jmp(RepatchLabel *label) { jmpSrc(label); } |
|
653 |
|
654 void jmp(const Operand &op) { |
|
655 switch (op.kind()) { |
|
656 case Operand::MEM_REG_DISP: |
|
657 masm.jmp_m(op.disp(), op.base()); |
|
658 break; |
|
659 case Operand::MEM_SCALE: |
|
660 masm.jmp_m(op.disp(), op.base(), op.index(), op.scale()); |
|
661 break; |
|
662 case Operand::REG: |
|
663 masm.jmp_r(op.reg()); |
|
664 break; |
|
665 default: |
|
666 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
667 } |
|
668 } |
|
669 void cmpEAX(Label *label) { cmpSrc(label); } |
|
670 void bind(Label *label) { |
|
671 JSC::MacroAssembler::Label jsclabel; |
|
672 JSC::X86Assembler::JmpDst dst(masm.label()); |
|
673 if (label->used()) { |
|
674 bool more; |
|
675 JSC::X86Assembler::JmpSrc jmp(label->offset()); |
|
676 do { |
|
677 JSC::X86Assembler::JmpSrc next; |
|
678 more = masm.nextJump(jmp, &next); |
|
679 masm.linkJump(jmp, dst); |
|
680 jmp = next; |
|
681 } while (more); |
|
682 } |
|
683 label->bind(dst.offset()); |
|
684 } |
|
685 void bind(RepatchLabel *label) { |
|
686 JSC::MacroAssembler::Label jsclabel; |
|
687 JSC::X86Assembler::JmpDst dst(masm.label()); |
|
688 if (label->used()) { |
|
689 JSC::X86Assembler::JmpSrc jmp(label->offset()); |
|
690 masm.linkJump(jmp, dst); |
|
691 } |
|
692 label->bind(dst.offset()); |
|
693 } |
|
694 uint32_t currentOffset() { |
|
695 return masm.label().offset(); |
|
696 } |
|
697 |
|
698 // Re-routes pending jumps to a new label. |
|
699 void retarget(Label *label, Label *target) { |
|
700 JSC::MacroAssembler::Label jsclabel; |
|
701 if (label->used()) { |
|
702 bool more; |
|
703 JSC::X86Assembler::JmpSrc jmp(label->offset()); |
|
704 do { |
|
705 JSC::X86Assembler::JmpSrc next; |
|
706 more = masm.nextJump(jmp, &next); |
|
707 |
|
708 if (target->bound()) { |
|
709 // The jump can be immediately patched to the correct destination. |
|
710 masm.linkJump(jmp, JmpDst(target->offset())); |
|
711 } else { |
|
712 // Thread the jump list through the unpatched jump targets. |
|
713 JmpSrc prev = JmpSrc(target->use(jmp.offset())); |
|
714 masm.setNextJump(jmp, prev); |
|
715 } |
|
716 |
|
717 jmp = next; |
|
718 } while (more); |
|
719 } |
|
720 label->reset(); |
|
721 } |
|
722 |
|
723 static void Bind(uint8_t *raw, AbsoluteLabel *label, const void *address) { |
|
724 if (label->used()) { |
|
725 intptr_t src = label->offset(); |
|
726 do { |
|
727 intptr_t next = reinterpret_cast<intptr_t>(JSC::X86Assembler::getPointer(raw + src)); |
|
728 JSC::X86Assembler::setPointer(raw + src, address); |
|
729 src = next; |
|
730 } while (src != AbsoluteLabel::INVALID_OFFSET); |
|
731 } |
|
732 label->bind(); |
|
733 } |
|
734 |
|
735 // See Bind and JSC::X86Assembler::setPointer. |
|
736 size_t labelOffsetToPatchOffset(size_t offset) { |
|
737 return offset - sizeof(void*); |
|
738 } |
|
739 |
|
740 void ret() { |
|
741 masm.ret(); |
|
742 } |
|
743 void retn(Imm32 n) { |
|
744 // Remove the size of the return address which is included in the frame. |
|
745 masm.ret(n.value - sizeof(void *)); |
|
746 } |
|
747 void call(Label *label) { |
|
748 if (label->bound()) { |
|
749 masm.linkJump(masm.call(), JmpDst(label->offset())); |
|
750 } else { |
|
751 JmpSrc j = masm.call(); |
|
752 JmpSrc prev = JmpSrc(label->use(j.offset())); |
|
753 masm.setNextJump(j, prev); |
|
754 } |
|
755 } |
|
756 void call(const Register ®) { |
|
757 masm.call(reg.code()); |
|
758 } |
|
759 void call(const Operand &op) { |
|
760 switch (op.kind()) { |
|
761 case Operand::REG: |
|
762 masm.call(op.reg()); |
|
763 break; |
|
764 case Operand::MEM_REG_DISP: |
|
765 masm.call_m(op.disp(), op.base()); |
|
766 break; |
|
767 default: |
|
768 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
769 } |
|
770 } |
|
771 |
|
772 void breakpoint() { |
|
773 masm.int3(); |
|
774 } |
|
775 |
|
776 #ifdef DEBUG |
|
777 static bool HasSSE2() { |
|
778 return JSC::MacroAssembler::isSSE2Present(); |
|
779 } |
|
780 #endif |
|
781 static bool HasSSE3() { |
|
782 return JSC::MacroAssembler::isSSE3Present(); |
|
783 } |
|
784 static bool HasSSE41() { |
|
785 return JSC::MacroAssembler::isSSE41Present(); |
|
786 } |
|
787 |
|
788 // The below cmpl methods switch the lhs and rhs when it invokes the |
|
789 // macroassembler to conform with intel standard. When calling this |
|
790 // function put the left operand on the left as you would expect. |
|
791 void cmpl(const Register &lhs, const Register &rhs) { |
|
792 masm.cmpl_rr(rhs.code(), lhs.code()); |
|
793 } |
|
794 void cmpl(const Register &lhs, const Operand &rhs) { |
|
795 switch (rhs.kind()) { |
|
796 case Operand::REG: |
|
797 masm.cmpl_rr(rhs.reg(), lhs.code()); |
|
798 break; |
|
799 case Operand::MEM_REG_DISP: |
|
800 masm.cmpl_mr(rhs.disp(), rhs.base(), lhs.code()); |
|
801 break; |
|
802 default: |
|
803 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
804 } |
|
805 } |
|
806 void cmpl(const Register &src, Imm32 imm) { |
|
807 masm.cmpl_ir(imm.value, src.code()); |
|
808 } |
|
809 void cmpl(const Operand &op, Imm32 imm) { |
|
810 switch (op.kind()) { |
|
811 case Operand::REG: |
|
812 masm.cmpl_ir(imm.value, op.reg()); |
|
813 break; |
|
814 case Operand::MEM_REG_DISP: |
|
815 masm.cmpl_im(imm.value, op.disp(), op.base()); |
|
816 break; |
|
817 case Operand::MEM_SCALE: |
|
818 masm.cmpl_im(imm.value, op.disp(), op.base(), op.index(), op.scale()); |
|
819 break; |
|
820 case Operand::MEM_ADDRESS32: |
|
821 masm.cmpl_im(imm.value, op.address()); |
|
822 break; |
|
823 default: |
|
824 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
825 } |
|
826 } |
|
827 void cmpl(const Operand &lhs, const Register &rhs) { |
|
828 switch (lhs.kind()) { |
|
829 case Operand::REG: |
|
830 masm.cmpl_rr(rhs.code(), lhs.reg()); |
|
831 break; |
|
832 case Operand::MEM_REG_DISP: |
|
833 masm.cmpl_rm(rhs.code(), lhs.disp(), lhs.base()); |
|
834 break; |
|
835 case Operand::MEM_ADDRESS32: |
|
836 masm.cmpl_rm(rhs.code(), lhs.address()); |
|
837 break; |
|
838 default: |
|
839 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
840 } |
|
841 } |
|
842 void cmpl(const Operand &op, ImmWord imm) { |
|
843 switch (op.kind()) { |
|
844 case Operand::REG: |
|
845 masm.cmpl_ir(imm.value, op.reg()); |
|
846 break; |
|
847 case Operand::MEM_REG_DISP: |
|
848 masm.cmpl_im(imm.value, op.disp(), op.base()); |
|
849 break; |
|
850 case Operand::MEM_ADDRESS32: |
|
851 masm.cmpl_im(imm.value, op.address()); |
|
852 break; |
|
853 default: |
|
854 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
855 } |
|
856 } |
|
857 void cmpl(const Operand &op, ImmPtr imm) { |
|
858 cmpl(op, ImmWord(uintptr_t(imm.value))); |
|
859 } |
|
860 void cmpw(const Register &lhs, const Register &rhs) { |
|
861 masm.cmpw_rr(lhs.code(), rhs.code()); |
|
862 } |
|
863 void setCC(Condition cond, const Register &r) { |
|
864 masm.setCC_r(static_cast<JSC::X86Assembler::Condition>(cond), r.code()); |
|
865 } |
|
866 void testb(const Register &lhs, const Register &rhs) { |
|
867 JS_ASSERT(GeneralRegisterSet(Registers::SingleByteRegs).has(lhs)); |
|
868 JS_ASSERT(GeneralRegisterSet(Registers::SingleByteRegs).has(rhs)); |
|
869 masm.testb_rr(rhs.code(), lhs.code()); |
|
870 } |
|
871 void testw(const Register &lhs, const Register &rhs) { |
|
872 masm.testw_rr(rhs.code(), lhs.code()); |
|
873 } |
|
874 void testl(const Register &lhs, const Register &rhs) { |
|
875 masm.testl_rr(rhs.code(), lhs.code()); |
|
876 } |
|
877 void testl(const Register &lhs, Imm32 rhs) { |
|
878 masm.testl_i32r(rhs.value, lhs.code()); |
|
879 } |
|
880 void testl(const Operand &lhs, Imm32 rhs) { |
|
881 switch (lhs.kind()) { |
|
882 case Operand::REG: |
|
883 masm.testl_i32r(rhs.value, lhs.reg()); |
|
884 break; |
|
885 case Operand::MEM_REG_DISP: |
|
886 masm.testl_i32m(rhs.value, lhs.disp(), lhs.base()); |
|
887 break; |
|
888 default: |
|
889 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
890 break; |
|
891 } |
|
892 } |
|
893 |
|
894 void addl(Imm32 imm, const Register &dest) { |
|
895 masm.addl_ir(imm.value, dest.code()); |
|
896 } |
|
897 void addl(Imm32 imm, const Operand &op) { |
|
898 switch (op.kind()) { |
|
899 case Operand::REG: |
|
900 masm.addl_ir(imm.value, op.reg()); |
|
901 break; |
|
902 case Operand::MEM_REG_DISP: |
|
903 masm.addl_im(imm.value, op.disp(), op.base()); |
|
904 break; |
|
905 case Operand::MEM_ADDRESS32: |
|
906 masm.addl_im(imm.value, op.address()); |
|
907 break; |
|
908 default: |
|
909 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
910 } |
|
911 } |
|
912 void subl(Imm32 imm, const Register &dest) { |
|
913 masm.subl_ir(imm.value, dest.code()); |
|
914 } |
|
915 void subl(Imm32 imm, const Operand &op) { |
|
916 switch (op.kind()) { |
|
917 case Operand::REG: |
|
918 masm.subl_ir(imm.value, op.reg()); |
|
919 break; |
|
920 case Operand::MEM_REG_DISP: |
|
921 masm.subl_im(imm.value, op.disp(), op.base()); |
|
922 break; |
|
923 default: |
|
924 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
925 } |
|
926 } |
|
927 void addl(const Register &src, const Register &dest) { |
|
928 masm.addl_rr(src.code(), dest.code()); |
|
929 } |
|
930 void subl(const Register &src, const Register &dest) { |
|
931 masm.subl_rr(src.code(), dest.code()); |
|
932 } |
|
933 void subl(const Operand &src, const Register &dest) { |
|
934 switch (src.kind()) { |
|
935 case Operand::REG: |
|
936 masm.subl_rr(src.reg(), dest.code()); |
|
937 break; |
|
938 case Operand::MEM_REG_DISP: |
|
939 masm.subl_mr(src.disp(), src.base(), dest.code()); |
|
940 break; |
|
941 default: |
|
942 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
943 } |
|
944 } |
|
945 void subl(const Register &src, const Operand &dest) { |
|
946 switch (dest.kind()) { |
|
947 case Operand::REG: |
|
948 masm.subl_rr(src.code(), dest.reg()); |
|
949 break; |
|
950 case Operand::MEM_REG_DISP: |
|
951 masm.subl_rm(src.code(), dest.disp(), dest.base()); |
|
952 break; |
|
953 default: |
|
954 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
955 } |
|
956 } |
|
957 void orl(const Register ®, const Register &dest) { |
|
958 masm.orl_rr(reg.code(), dest.code()); |
|
959 } |
|
960 void orl(Imm32 imm, const Register ®) { |
|
961 masm.orl_ir(imm.value, reg.code()); |
|
962 } |
|
963 void orl(Imm32 imm, const Operand &op) { |
|
964 switch (op.kind()) { |
|
965 case Operand::REG: |
|
966 masm.orl_ir(imm.value, op.reg()); |
|
967 break; |
|
968 case Operand::MEM_REG_DISP: |
|
969 masm.orl_im(imm.value, op.disp(), op.base()); |
|
970 break; |
|
971 default: |
|
972 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
973 } |
|
974 } |
|
975 void xorl(const Register &src, const Register &dest) { |
|
976 masm.xorl_rr(src.code(), dest.code()); |
|
977 } |
|
978 void xorl(Imm32 imm, const Register ®) { |
|
979 masm.xorl_ir(imm.value, reg.code()); |
|
980 } |
|
981 void xorl(Imm32 imm, const Operand &op) { |
|
982 switch (op.kind()) { |
|
983 case Operand::REG: |
|
984 masm.xorl_ir(imm.value, op.reg()); |
|
985 break; |
|
986 case Operand::MEM_REG_DISP: |
|
987 masm.xorl_im(imm.value, op.disp(), op.base()); |
|
988 break; |
|
989 default: |
|
990 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
991 } |
|
992 } |
|
993 void andl(const Register &src, const Register &dest) { |
|
994 masm.andl_rr(src.code(), dest.code()); |
|
995 } |
|
996 void andl(Imm32 imm, const Register &dest) { |
|
997 masm.andl_ir(imm.value, dest.code()); |
|
998 } |
|
999 void andl(Imm32 imm, const Operand &op) { |
|
1000 switch (op.kind()) { |
|
1001 case Operand::REG: |
|
1002 masm.andl_ir(imm.value, op.reg()); |
|
1003 break; |
|
1004 case Operand::MEM_REG_DISP: |
|
1005 masm.andl_im(imm.value, op.disp(), op.base()); |
|
1006 break; |
|
1007 default: |
|
1008 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1009 } |
|
1010 } |
|
1011 void addl(const Operand &src, const Register &dest) { |
|
1012 switch (src.kind()) { |
|
1013 case Operand::REG: |
|
1014 masm.addl_rr(src.reg(), dest.code()); |
|
1015 break; |
|
1016 case Operand::MEM_REG_DISP: |
|
1017 masm.addl_mr(src.disp(), src.base(), dest.code()); |
|
1018 break; |
|
1019 default: |
|
1020 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1021 } |
|
1022 } |
|
1023 void orl(const Operand &src, const Register &dest) { |
|
1024 switch (src.kind()) { |
|
1025 case Operand::REG: |
|
1026 masm.orl_rr(src.reg(), dest.code()); |
|
1027 break; |
|
1028 case Operand::MEM_REG_DISP: |
|
1029 masm.orl_mr(src.disp(), src.base(), dest.code()); |
|
1030 break; |
|
1031 default: |
|
1032 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1033 } |
|
1034 } |
|
1035 void xorl(const Operand &src, const Register &dest) { |
|
1036 switch (src.kind()) { |
|
1037 case Operand::REG: |
|
1038 masm.xorl_rr(src.reg(), dest.code()); |
|
1039 break; |
|
1040 case Operand::MEM_REG_DISP: |
|
1041 masm.xorl_mr(src.disp(), src.base(), dest.code()); |
|
1042 break; |
|
1043 default: |
|
1044 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1045 } |
|
1046 } |
|
1047 void andl(const Operand &src, const Register &dest) { |
|
1048 switch (src.kind()) { |
|
1049 case Operand::REG: |
|
1050 masm.andl_rr(src.reg(), dest.code()); |
|
1051 break; |
|
1052 case Operand::MEM_REG_DISP: |
|
1053 masm.andl_mr(src.disp(), src.base(), dest.code()); |
|
1054 break; |
|
1055 default: |
|
1056 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1057 } |
|
1058 } |
|
1059 void imull(const Register &multiplier) { |
|
1060 masm.imull_r(multiplier.code()); |
|
1061 } |
|
1062 void imull(Imm32 imm, const Register &dest) { |
|
1063 masm.imull_i32r(dest.code(), imm.value, dest.code()); |
|
1064 } |
|
1065 void imull(const Register &src, const Register &dest) { |
|
1066 masm.imull_rr(src.code(), dest.code()); |
|
1067 } |
|
1068 void imull(Imm32 imm, const Register &src, const Register &dest) { |
|
1069 masm.imull_i32r(src.code(), imm.value, dest.code()); |
|
1070 } |
|
1071 void imull(const Operand &src, const Register &dest) { |
|
1072 switch (src.kind()) { |
|
1073 case Operand::REG: |
|
1074 masm.imull_rr(src.reg(), dest.code()); |
|
1075 break; |
|
1076 case Operand::MEM_REG_DISP: |
|
1077 masm.imull_mr(src.disp(), src.base(), dest.code()); |
|
1078 break; |
|
1079 default: |
|
1080 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1081 } |
|
1082 } |
|
1083 void negl(const Operand &src) { |
|
1084 switch (src.kind()) { |
|
1085 case Operand::REG: |
|
1086 masm.negl_r(src.reg()); |
|
1087 break; |
|
1088 case Operand::MEM_REG_DISP: |
|
1089 masm.negl_m(src.disp(), src.base()); |
|
1090 break; |
|
1091 default: |
|
1092 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1093 } |
|
1094 } |
|
1095 void negl(const Register ®) { |
|
1096 masm.negl_r(reg.code()); |
|
1097 } |
|
1098 void notl(const Operand &src) { |
|
1099 switch (src.kind()) { |
|
1100 case Operand::REG: |
|
1101 masm.notl_r(src.reg()); |
|
1102 break; |
|
1103 case Operand::MEM_REG_DISP: |
|
1104 masm.notl_m(src.disp(), src.base()); |
|
1105 break; |
|
1106 default: |
|
1107 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1108 } |
|
1109 } |
|
1110 void notl(const Register ®) { |
|
1111 masm.notl_r(reg.code()); |
|
1112 } |
|
1113 void shrl(const Imm32 imm, const Register &dest) { |
|
1114 masm.shrl_i8r(imm.value, dest.code()); |
|
1115 } |
|
1116 void shll(const Imm32 imm, const Register &dest) { |
|
1117 masm.shll_i8r(imm.value, dest.code()); |
|
1118 } |
|
1119 void sarl(const Imm32 imm, const Register &dest) { |
|
1120 masm.sarl_i8r(imm.value, dest.code()); |
|
1121 } |
|
1122 void shrl_cl(const Register &dest) { |
|
1123 masm.shrl_CLr(dest.code()); |
|
1124 } |
|
1125 void shll_cl(const Register &dest) { |
|
1126 masm.shll_CLr(dest.code()); |
|
1127 } |
|
1128 void sarl_cl(const Register &dest) { |
|
1129 masm.sarl_CLr(dest.code()); |
|
1130 } |
|
1131 |
|
1132 void incl(const Operand &op) { |
|
1133 switch (op.kind()) { |
|
1134 case Operand::MEM_REG_DISP: |
|
1135 masm.incl_m32(op.disp(), op.base()); |
|
1136 break; |
|
1137 default: |
|
1138 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1139 } |
|
1140 } |
|
1141 void lock_incl(const Operand &op) { |
|
1142 masm.prefix_lock(); |
|
1143 incl(op); |
|
1144 } |
|
1145 |
|
1146 void decl(const Operand &op) { |
|
1147 switch (op.kind()) { |
|
1148 case Operand::MEM_REG_DISP: |
|
1149 masm.decl_m32(op.disp(), op.base()); |
|
1150 break; |
|
1151 default: |
|
1152 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1153 } |
|
1154 } |
|
1155 void lock_decl(const Operand &op) { |
|
1156 masm.prefix_lock(); |
|
1157 decl(op); |
|
1158 } |
|
1159 |
|
1160 void lock_cmpxchg32(const Register &src, const Operand &op) { |
|
1161 masm.prefix_lock(); |
|
1162 switch (op.kind()) { |
|
1163 case Operand::MEM_REG_DISP: |
|
1164 masm.cmpxchg32(src.code(), op.disp(), op.base()); |
|
1165 break; |
|
1166 default: |
|
1167 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1168 } |
|
1169 } |
|
1170 |
|
1171 void xaddl(const Register &srcdest, const Operand &mem) { |
|
1172 switch (mem.kind()) { |
|
1173 case Operand::MEM_REG_DISP: |
|
1174 masm.xaddl_rm(srcdest.code(), mem.disp(), mem.base()); |
|
1175 break; |
|
1176 case Operand::MEM_SCALE: |
|
1177 masm.xaddl_rm(srcdest.code(), mem.disp(), mem.base(), mem.index(), mem.scale()); |
|
1178 break; |
|
1179 default: |
|
1180 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1181 } |
|
1182 } |
|
1183 |
|
1184 void push(const Imm32 imm) { |
|
1185 masm.push_i32(imm.value); |
|
1186 } |
|
1187 |
|
1188 void push(const Operand &src) { |
|
1189 switch (src.kind()) { |
|
1190 case Operand::REG: |
|
1191 masm.push_r(src.reg()); |
|
1192 break; |
|
1193 case Operand::MEM_REG_DISP: |
|
1194 masm.push_m(src.disp(), src.base()); |
|
1195 break; |
|
1196 default: |
|
1197 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1198 } |
|
1199 } |
|
1200 void push(const Register &src) { |
|
1201 masm.push_r(src.code()); |
|
1202 } |
|
1203 void push(const Address &src) { |
|
1204 masm.push_m(src.offset, src.base.code()); |
|
1205 } |
|
1206 |
|
1207 void pop(const Operand &src) { |
|
1208 switch (src.kind()) { |
|
1209 case Operand::REG: |
|
1210 masm.pop_r(src.reg()); |
|
1211 break; |
|
1212 case Operand::MEM_REG_DISP: |
|
1213 masm.pop_m(src.disp(), src.base()); |
|
1214 break; |
|
1215 default: |
|
1216 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1217 } |
|
1218 } |
|
1219 void pop(const Register &src) { |
|
1220 masm.pop_r(src.code()); |
|
1221 } |
|
1222 |
|
1223 void pushFlags() { |
|
1224 masm.push_flags(); |
|
1225 } |
|
1226 void popFlags() { |
|
1227 masm.pop_flags(); |
|
1228 } |
|
1229 |
|
1230 #ifdef JS_CODEGEN_X86 |
|
1231 void pushAllRegs() { |
|
1232 masm.pusha(); |
|
1233 } |
|
1234 void popAllRegs() { |
|
1235 masm.popa(); |
|
1236 } |
|
1237 #endif |
|
1238 |
|
1239 // Zero-extend byte to 32-bit integer. |
|
1240 void movzbl(const Register &src, const Register &dest) { |
|
1241 masm.movzbl_rr(src.code(), dest.code()); |
|
1242 } |
|
1243 |
|
1244 void cdq() { |
|
1245 masm.cdq(); |
|
1246 } |
|
1247 void idiv(Register divisor) { |
|
1248 masm.idivl_r(divisor.code()); |
|
1249 } |
|
1250 void udiv(Register divisor) { |
|
1251 masm.divl_r(divisor.code()); |
|
1252 } |
|
1253 |
|
1254 void unpcklps(const FloatRegister &src, const FloatRegister &dest) { |
|
1255 JS_ASSERT(HasSSE2()); |
|
1256 masm.unpcklps_rr(src.code(), dest.code()); |
|
1257 } |
|
1258 void pinsrd(const Register &src, const FloatRegister &dest) { |
|
1259 JS_ASSERT(HasSSE2()); |
|
1260 masm.pinsrd_rr(src.code(), dest.code()); |
|
1261 } |
|
1262 void pinsrd(const Operand &src, const FloatRegister &dest) { |
|
1263 JS_ASSERT(HasSSE2()); |
|
1264 switch (src.kind()) { |
|
1265 case Operand::REG: |
|
1266 masm.pinsrd_rr(src.reg(), dest.code()); |
|
1267 break; |
|
1268 case Operand::MEM_REG_DISP: |
|
1269 masm.pinsrd_mr(src.disp(), src.base(), dest.code()); |
|
1270 break; |
|
1271 default: |
|
1272 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1273 } |
|
1274 } |
|
1275 void psrldq(Imm32 shift, const FloatRegister &dest) { |
|
1276 JS_ASSERT(HasSSE2()); |
|
1277 masm.psrldq_ir(shift.value, dest.code()); |
|
1278 } |
|
1279 void psllq(Imm32 shift, const FloatRegister &dest) { |
|
1280 JS_ASSERT(HasSSE2()); |
|
1281 masm.psllq_ir(shift.value, dest.code()); |
|
1282 } |
|
1283 void psrlq(Imm32 shift, const FloatRegister &dest) { |
|
1284 JS_ASSERT(HasSSE2()); |
|
1285 masm.psrlq_ir(shift.value, dest.code()); |
|
1286 } |
|
1287 |
|
1288 void cvtsi2sd(const Operand &src, const FloatRegister &dest) { |
|
1289 JS_ASSERT(HasSSE2()); |
|
1290 switch (src.kind()) { |
|
1291 case Operand::REG: |
|
1292 masm.cvtsi2sd_rr(src.reg(), dest.code()); |
|
1293 break; |
|
1294 case Operand::MEM_REG_DISP: |
|
1295 masm.cvtsi2sd_mr(src.disp(), src.base(), dest.code()); |
|
1296 break; |
|
1297 case Operand::MEM_SCALE: |
|
1298 masm.cvtsi2sd_mr(src.disp(), src.base(), src.index(), src.scale(), dest.code()); |
|
1299 break; |
|
1300 default: |
|
1301 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1302 } |
|
1303 } |
|
1304 void cvttsd2si(const FloatRegister &src, const Register &dest) { |
|
1305 JS_ASSERT(HasSSE2()); |
|
1306 masm.cvttsd2si_rr(src.code(), dest.code()); |
|
1307 } |
|
1308 void cvttss2si(const FloatRegister &src, const Register &dest) { |
|
1309 JS_ASSERT(HasSSE2()); |
|
1310 masm.cvttss2si_rr(src.code(), dest.code()); |
|
1311 } |
|
1312 void cvtsi2ss(const Operand &src, const FloatRegister &dest) { |
|
1313 JS_ASSERT(HasSSE2()); |
|
1314 switch (src.kind()) { |
|
1315 case Operand::REG: |
|
1316 masm.cvtsi2ss_rr(src.reg(), dest.code()); |
|
1317 break; |
|
1318 case Operand::MEM_REG_DISP: |
|
1319 masm.cvtsi2ss_mr(src.disp(), src.base(), dest.code()); |
|
1320 break; |
|
1321 case Operand::MEM_SCALE: |
|
1322 masm.cvtsi2ss_mr(src.disp(), src.base(), src.index(), src.scale(), dest.code()); |
|
1323 break; |
|
1324 default: |
|
1325 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1326 } |
|
1327 } |
|
1328 void cvtsi2ss(const Register &src, const FloatRegister &dest) { |
|
1329 JS_ASSERT(HasSSE2()); |
|
1330 masm.cvtsi2ss_rr(src.code(), dest.code()); |
|
1331 } |
|
1332 void cvtsi2sd(const Register &src, const FloatRegister &dest) { |
|
1333 JS_ASSERT(HasSSE2()); |
|
1334 masm.cvtsi2sd_rr(src.code(), dest.code()); |
|
1335 } |
|
1336 void movmskpd(const FloatRegister &src, const Register &dest) { |
|
1337 JS_ASSERT(HasSSE2()); |
|
1338 masm.movmskpd_rr(src.code(), dest.code()); |
|
1339 } |
|
1340 void movmskps(const FloatRegister &src, const Register &dest) { |
|
1341 JS_ASSERT(HasSSE2()); |
|
1342 masm.movmskps_rr(src.code(), dest.code()); |
|
1343 } |
|
1344 void ptest(const FloatRegister &lhs, const FloatRegister &rhs) { |
|
1345 JS_ASSERT(HasSSE41()); |
|
1346 masm.ptest_rr(rhs.code(), lhs.code()); |
|
1347 } |
|
1348 void ucomisd(const FloatRegister &lhs, const FloatRegister &rhs) { |
|
1349 JS_ASSERT(HasSSE2()); |
|
1350 masm.ucomisd_rr(rhs.code(), lhs.code()); |
|
1351 } |
|
1352 void ucomiss(const FloatRegister &lhs, const FloatRegister &rhs) { |
|
1353 JS_ASSERT(HasSSE2()); |
|
1354 masm.ucomiss_rr(rhs.code(), lhs.code()); |
|
1355 } |
|
1356 void pcmpeqw(const FloatRegister &lhs, const FloatRegister &rhs) { |
|
1357 JS_ASSERT(HasSSE2()); |
|
1358 masm.pcmpeqw_rr(rhs.code(), lhs.code()); |
|
1359 } |
|
1360 void movd(const Register &src, const FloatRegister &dest) { |
|
1361 JS_ASSERT(HasSSE2()); |
|
1362 masm.movd_rr(src.code(), dest.code()); |
|
1363 } |
|
1364 void movd(const FloatRegister &src, const Register &dest) { |
|
1365 JS_ASSERT(HasSSE2()); |
|
1366 masm.movd_rr(src.code(), dest.code()); |
|
1367 } |
|
1368 void addsd(const FloatRegister &src, const FloatRegister &dest) { |
|
1369 JS_ASSERT(HasSSE2()); |
|
1370 masm.addsd_rr(src.code(), dest.code()); |
|
1371 } |
|
1372 void addss(const FloatRegister &src, const FloatRegister &dest) { |
|
1373 JS_ASSERT(HasSSE2()); |
|
1374 masm.addss_rr(src.code(), dest.code()); |
|
1375 } |
|
1376 void addsd(const Operand &src, const FloatRegister &dest) { |
|
1377 JS_ASSERT(HasSSE2()); |
|
1378 switch (src.kind()) { |
|
1379 case Operand::FPREG: |
|
1380 masm.addsd_rr(src.fpu(), dest.code()); |
|
1381 break; |
|
1382 case Operand::MEM_REG_DISP: |
|
1383 masm.addsd_mr(src.disp(), src.base(), dest.code()); |
|
1384 break; |
|
1385 case Operand::MEM_ADDRESS32: |
|
1386 masm.addsd_mr(src.address(), dest.code()); |
|
1387 break; |
|
1388 default: |
|
1389 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1390 } |
|
1391 } |
|
1392 void addss(const Operand &src, const FloatRegister &dest) { |
|
1393 JS_ASSERT(HasSSE2()); |
|
1394 switch (src.kind()) { |
|
1395 case Operand::FPREG: |
|
1396 masm.addss_rr(src.fpu(), dest.code()); |
|
1397 break; |
|
1398 case Operand::MEM_REG_DISP: |
|
1399 masm.addss_mr(src.disp(), src.base(), dest.code()); |
|
1400 break; |
|
1401 case Operand::MEM_ADDRESS32: |
|
1402 masm.addss_mr(src.address(), dest.code()); |
|
1403 break; |
|
1404 default: |
|
1405 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1406 } |
|
1407 } |
|
1408 void subsd(const FloatRegister &src, const FloatRegister &dest) { |
|
1409 JS_ASSERT(HasSSE2()); |
|
1410 masm.subsd_rr(src.code(), dest.code()); |
|
1411 } |
|
1412 void subss(const FloatRegister &src, const FloatRegister &dest) { |
|
1413 JS_ASSERT(HasSSE2()); |
|
1414 masm.subss_rr(src.code(), dest.code()); |
|
1415 } |
|
1416 void subsd(const Operand &src, const FloatRegister &dest) { |
|
1417 JS_ASSERT(HasSSE2()); |
|
1418 switch (src.kind()) { |
|
1419 case Operand::FPREG: |
|
1420 masm.subsd_rr(src.fpu(), dest.code()); |
|
1421 break; |
|
1422 case Operand::MEM_REG_DISP: |
|
1423 masm.subsd_mr(src.disp(), src.base(), dest.code()); |
|
1424 break; |
|
1425 default: |
|
1426 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1427 } |
|
1428 } |
|
1429 void subss(const Operand &src, const FloatRegister &dest) { |
|
1430 JS_ASSERT(HasSSE2()); |
|
1431 switch (src.kind()) { |
|
1432 case Operand::FPREG: |
|
1433 masm.subss_rr(src.fpu(), dest.code()); |
|
1434 break; |
|
1435 case Operand::MEM_REG_DISP: |
|
1436 masm.subss_mr(src.disp(), src.base(), dest.code()); |
|
1437 break; |
|
1438 default: |
|
1439 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1440 } |
|
1441 } |
|
1442 void mulsd(const FloatRegister &src, const FloatRegister &dest) { |
|
1443 JS_ASSERT(HasSSE2()); |
|
1444 masm.mulsd_rr(src.code(), dest.code()); |
|
1445 } |
|
1446 void mulsd(const Operand &src, const FloatRegister &dest) { |
|
1447 JS_ASSERT(HasSSE2()); |
|
1448 switch (src.kind()) { |
|
1449 case Operand::FPREG: |
|
1450 masm.mulsd_rr(src.fpu(), dest.code()); |
|
1451 break; |
|
1452 case Operand::MEM_REG_DISP: |
|
1453 masm.mulsd_mr(src.disp(), src.base(), dest.code()); |
|
1454 break; |
|
1455 default: |
|
1456 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1457 } |
|
1458 } |
|
1459 void mulss(const Operand &src, const FloatRegister &dest) { |
|
1460 JS_ASSERT(HasSSE2()); |
|
1461 switch (src.kind()) { |
|
1462 case Operand::FPREG: |
|
1463 masm.mulss_rr(src.fpu(), dest.code()); |
|
1464 break; |
|
1465 case Operand::MEM_REG_DISP: |
|
1466 masm.mulss_mr(src.disp(), src.base(), dest.code()); |
|
1467 break; |
|
1468 default: |
|
1469 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1470 } |
|
1471 } |
|
1472 void mulss(const FloatRegister &src, const FloatRegister &dest) { |
|
1473 JS_ASSERT(HasSSE2()); |
|
1474 masm.mulss_rr(src.code(), dest.code()); |
|
1475 } |
|
1476 void divsd(const FloatRegister &src, const FloatRegister &dest) { |
|
1477 JS_ASSERT(HasSSE2()); |
|
1478 masm.divsd_rr(src.code(), dest.code()); |
|
1479 } |
|
1480 void divss(const FloatRegister &src, const FloatRegister &dest) { |
|
1481 JS_ASSERT(HasSSE2()); |
|
1482 masm.divss_rr(src.code(), dest.code()); |
|
1483 } |
|
1484 void divsd(const Operand &src, const FloatRegister &dest) { |
|
1485 JS_ASSERT(HasSSE2()); |
|
1486 switch (src.kind()) { |
|
1487 case Operand::FPREG: |
|
1488 masm.divsd_rr(src.fpu(), dest.code()); |
|
1489 break; |
|
1490 case Operand::MEM_REG_DISP: |
|
1491 masm.divsd_mr(src.disp(), src.base(), dest.code()); |
|
1492 break; |
|
1493 default: |
|
1494 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1495 } |
|
1496 } |
|
1497 void divss(const Operand &src, const FloatRegister &dest) { |
|
1498 JS_ASSERT(HasSSE2()); |
|
1499 switch (src.kind()) { |
|
1500 case Operand::FPREG: |
|
1501 masm.divss_rr(src.fpu(), dest.code()); |
|
1502 break; |
|
1503 case Operand::MEM_REG_DISP: |
|
1504 masm.divss_mr(src.disp(), src.base(), dest.code()); |
|
1505 break; |
|
1506 default: |
|
1507 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1508 } |
|
1509 } |
|
1510 void xorpd(const FloatRegister &src, const FloatRegister &dest) { |
|
1511 JS_ASSERT(HasSSE2()); |
|
1512 masm.xorpd_rr(src.code(), dest.code()); |
|
1513 } |
|
1514 void xorps(const FloatRegister &src, const FloatRegister &dest) { |
|
1515 JS_ASSERT(HasSSE2()); |
|
1516 masm.xorps_rr(src.code(), dest.code()); |
|
1517 } |
|
1518 void orpd(const FloatRegister &src, const FloatRegister &dest) { |
|
1519 JS_ASSERT(HasSSE2()); |
|
1520 masm.orpd_rr(src.code(), dest.code()); |
|
1521 } |
|
1522 void andpd(const FloatRegister &src, const FloatRegister &dest) { |
|
1523 JS_ASSERT(HasSSE2()); |
|
1524 masm.andpd_rr(src.code(), dest.code()); |
|
1525 } |
|
1526 void andps(const FloatRegister &src, const FloatRegister &dest) { |
|
1527 JS_ASSERT(HasSSE2()); |
|
1528 masm.andps_rr(src.code(), dest.code()); |
|
1529 } |
|
1530 void sqrtsd(const FloatRegister &src, const FloatRegister &dest) { |
|
1531 JS_ASSERT(HasSSE2()); |
|
1532 masm.sqrtsd_rr(src.code(), dest.code()); |
|
1533 } |
|
1534 void sqrtss(const FloatRegister &src, const FloatRegister &dest) { |
|
1535 JS_ASSERT(HasSSE2()); |
|
1536 masm.sqrtss_rr(src.code(), dest.code()); |
|
1537 } |
|
1538 void roundsd(const FloatRegister &src, const FloatRegister &dest, |
|
1539 JSC::X86Assembler::RoundingMode mode) |
|
1540 { |
|
1541 JS_ASSERT(HasSSE41()); |
|
1542 masm.roundsd_rr(src.code(), dest.code(), mode); |
|
1543 } |
|
1544 void roundss(const FloatRegister &src, const FloatRegister &dest, |
|
1545 JSC::X86Assembler::RoundingMode mode) |
|
1546 { |
|
1547 JS_ASSERT(HasSSE41()); |
|
1548 masm.roundss_rr(src.code(), dest.code(), mode); |
|
1549 } |
|
1550 void minsd(const FloatRegister &src, const FloatRegister &dest) { |
|
1551 JS_ASSERT(HasSSE2()); |
|
1552 masm.minsd_rr(src.code(), dest.code()); |
|
1553 } |
|
1554 void minsd(const Operand &src, const FloatRegister &dest) { |
|
1555 JS_ASSERT(HasSSE2()); |
|
1556 switch (src.kind()) { |
|
1557 case Operand::FPREG: |
|
1558 masm.minsd_rr(src.fpu(), dest.code()); |
|
1559 break; |
|
1560 case Operand::MEM_REG_DISP: |
|
1561 masm.minsd_mr(src.disp(), src.base(), dest.code()); |
|
1562 break; |
|
1563 default: |
|
1564 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1565 } |
|
1566 } |
|
1567 void maxsd(const FloatRegister &src, const FloatRegister &dest) { |
|
1568 JS_ASSERT(HasSSE2()); |
|
1569 masm.maxsd_rr(src.code(), dest.code()); |
|
1570 } |
|
1571 void maxsd(const Operand &src, const FloatRegister &dest) { |
|
1572 JS_ASSERT(HasSSE2()); |
|
1573 switch (src.kind()) { |
|
1574 case Operand::FPREG: |
|
1575 masm.maxsd_rr(src.fpu(), dest.code()); |
|
1576 break; |
|
1577 case Operand::MEM_REG_DISP: |
|
1578 masm.maxsd_mr(src.disp(), src.base(), dest.code()); |
|
1579 break; |
|
1580 default: |
|
1581 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1582 } |
|
1583 } |
|
1584 void fisttp(const Operand &dest) { |
|
1585 JS_ASSERT(HasSSE3()); |
|
1586 switch (dest.kind()) { |
|
1587 case Operand::MEM_REG_DISP: |
|
1588 masm.fisttp_m(dest.disp(), dest.base()); |
|
1589 break; |
|
1590 default: |
|
1591 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1592 } |
|
1593 } |
|
1594 void fld(const Operand &dest) { |
|
1595 switch (dest.kind()) { |
|
1596 case Operand::MEM_REG_DISP: |
|
1597 masm.fld_m(dest.disp(), dest.base()); |
|
1598 break; |
|
1599 default: |
|
1600 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1601 } |
|
1602 } |
|
1603 void fstp(const Operand &src) { |
|
1604 switch (src.kind()) { |
|
1605 case Operand::MEM_REG_DISP: |
|
1606 masm.fstp_m(src.disp(), src.base()); |
|
1607 break; |
|
1608 default: |
|
1609 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1610 } |
|
1611 } |
|
1612 void fstp32(const Operand &src) { |
|
1613 switch (src.kind()) { |
|
1614 case Operand::MEM_REG_DISP: |
|
1615 masm.fstp32_m(src.disp(), src.base()); |
|
1616 break; |
|
1617 default: |
|
1618 MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); |
|
1619 } |
|
1620 } |
|
1621 |
|
1622 // Defined for compatibility with ARM's assembler |
|
1623 uint32_t actualOffset(uint32_t x) { |
|
1624 return x; |
|
1625 } |
|
1626 |
|
1627 uint32_t actualIndex(uint32_t x) { |
|
1628 return x; |
|
1629 } |
|
1630 |
|
1631 void flushBuffer() { |
|
1632 } |
|
1633 |
|
1634 // Patching. |
|
1635 |
|
1636 static size_t patchWrite_NearCallSize() { |
|
1637 return 5; |
|
1638 } |
|
1639 static uintptr_t getPointer(uint8_t *instPtr) { |
|
1640 uintptr_t *ptr = ((uintptr_t *) instPtr) - 1; |
|
1641 return *ptr; |
|
1642 } |
|
1643 // Write a relative call at the start location |dataLabel|. |
|
1644 // Note that this DOES NOT patch data that comes before |label|. |
|
1645 static void patchWrite_NearCall(CodeLocationLabel startLabel, CodeLocationLabel target) { |
|
1646 uint8_t *start = startLabel.raw(); |
|
1647 *start = 0xE8; |
|
1648 ptrdiff_t offset = target - startLabel - patchWrite_NearCallSize(); |
|
1649 JS_ASSERT(int32_t(offset) == offset); |
|
1650 *((int32_t *) (start + 1)) = offset; |
|
1651 } |
|
1652 |
|
1653 static void patchWrite_Imm32(CodeLocationLabel dataLabel, Imm32 toWrite) { |
|
1654 *((int32_t *) dataLabel.raw() - 1) = toWrite.value; |
|
1655 } |
|
1656 |
|
1657 static void patchDataWithValueCheck(CodeLocationLabel data, PatchedImmPtr newData, |
|
1658 PatchedImmPtr expectedData) { |
|
1659 // The pointer given is a pointer to *after* the data. |
|
1660 uintptr_t *ptr = ((uintptr_t *) data.raw()) - 1; |
|
1661 JS_ASSERT(*ptr == (uintptr_t)expectedData.value); |
|
1662 *ptr = (uintptr_t)newData.value; |
|
1663 } |
|
1664 static void patchDataWithValueCheck(CodeLocationLabel data, ImmPtr newData, ImmPtr expectedData) { |
|
1665 patchDataWithValueCheck(data, PatchedImmPtr(newData.value), PatchedImmPtr(expectedData.value)); |
|
1666 } |
|
1667 static uint32_t nopSize() { |
|
1668 return 1; |
|
1669 } |
|
1670 static uint8_t *nextInstruction(uint8_t *cur, uint32_t *count) { |
|
1671 MOZ_ASSUME_UNREACHABLE("nextInstruction NYI on x86"); |
|
1672 } |
|
1673 |
|
1674 // Toggle a jmp or cmp emitted by toggledJump(). |
|
1675 static void ToggleToJmp(CodeLocationLabel inst) { |
|
1676 uint8_t *ptr = (uint8_t *)inst.raw(); |
|
1677 JS_ASSERT(*ptr == 0x3D); |
|
1678 *ptr = 0xE9; |
|
1679 } |
|
1680 static void ToggleToCmp(CodeLocationLabel inst) { |
|
1681 uint8_t *ptr = (uint8_t *)inst.raw(); |
|
1682 JS_ASSERT(*ptr == 0xE9); |
|
1683 *ptr = 0x3D; |
|
1684 } |
|
1685 static void ToggleCall(CodeLocationLabel inst, bool enabled) { |
|
1686 uint8_t *ptr = (uint8_t *)inst.raw(); |
|
1687 JS_ASSERT(*ptr == 0x3D || // CMP |
|
1688 *ptr == 0xE8); // CALL |
|
1689 *ptr = enabled ? 0xE8 : 0x3D; |
|
1690 } |
|
1691 }; |
|
1692 |
|
1693 } // namespace jit |
|
1694 } // namespace js |
|
1695 |
|
1696 #endif /* jit_shared_Assembler_x86_shared_h */ |