js/src/jit/arm/BaselineHelpers-arm.h

Sat, 03 Jan 2015 20:18:00 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Sat, 03 Jan 2015 20:18:00 +0100
branch
TOR_BUG_3246
changeset 7
129ffea94266
permissions
-rw-r--r--

Conditionally enable double key logic according to:
private browsing mode or privacy.thirdparty.isolate preference and
implement in GetCookieStringCommon and FindCookie where it counts...
With some reservations of how to convince FindCookie users to test
condition and pass a nullptr when disabling double key logic.

michael@0 1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
michael@0 2 * vim: set ts=8 sts=4 et sw=4 tw=99:
michael@0 3 * This Source Code Form is subject to the terms of the Mozilla Public
michael@0 4 * License, v. 2.0. If a copy of the MPL was not distributed with this
michael@0 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
michael@0 6
michael@0 7 #ifndef jit_arm_BaselineHelpers_arm_h
michael@0 8 #define jit_arm_BaselineHelpers_arm_h
michael@0 9
michael@0 10 #ifdef JS_ION
michael@0 11 #include "jit/BaselineFrame.h"
michael@0 12 #include "jit/BaselineIC.h"
michael@0 13 #include "jit/BaselineRegisters.h"
michael@0 14 #include "jit/IonMacroAssembler.h"
michael@0 15
michael@0 16 namespace js {
michael@0 17 namespace jit {
michael@0 18
michael@0 19 // Distance from sp to the top Value inside an IC stub (no return address on the stack on ARM).
michael@0 20 static const size_t ICStackValueOffset = 0;
michael@0 21
michael@0 22 inline void
michael@0 23 EmitRestoreTailCallReg(MacroAssembler &masm)
michael@0 24 {
michael@0 25 // No-op on ARM because link register is always holding the return address.
michael@0 26 }
michael@0 27
michael@0 28 inline void
michael@0 29 EmitRepushTailCallReg(MacroAssembler &masm)
michael@0 30 {
michael@0 31 // No-op on ARM because link register is always holding the return address.
michael@0 32 }
michael@0 33
michael@0 34 inline void
michael@0 35 EmitCallIC(CodeOffsetLabel *patchOffset, MacroAssembler &masm)
michael@0 36 {
michael@0 37 // Move ICEntry offset into BaselineStubReg
michael@0 38 CodeOffsetLabel offset = masm.movWithPatch(ImmWord(-1), BaselineStubReg);
michael@0 39 *patchOffset = offset;
michael@0 40
michael@0 41 // Load stub pointer into BaselineStubReg
michael@0 42 masm.loadPtr(Address(BaselineStubReg, ICEntry::offsetOfFirstStub()), BaselineStubReg);
michael@0 43
michael@0 44 // Load stubcode pointer from BaselineStubEntry.
michael@0 45 // R2 won't be active when we call ICs, so we can use r0.
michael@0 46 JS_ASSERT(R2 == ValueOperand(r1, r0));
michael@0 47 masm.loadPtr(Address(BaselineStubReg, ICStub::offsetOfStubCode()), r0);
michael@0 48
michael@0 49 // Call the stubcode via a direct branch-and-link
michael@0 50 masm.ma_blx(r0);
michael@0 51 }
michael@0 52
michael@0 53 inline void
michael@0 54 EmitEnterTypeMonitorIC(MacroAssembler &masm,
michael@0 55 size_t monitorStubOffset = ICMonitoredStub::offsetOfFirstMonitorStub())
michael@0 56 {
michael@0 57 // This is expected to be called from within an IC, when BaselineStubReg
michael@0 58 // is properly initialized to point to the stub.
michael@0 59 masm.loadPtr(Address(BaselineStubReg, (uint32_t) monitorStubOffset), BaselineStubReg);
michael@0 60
michael@0 61 // Load stubcode pointer from BaselineStubEntry.
michael@0 62 // R2 won't be active when we call ICs, so we can use r0.
michael@0 63 JS_ASSERT(R2 == ValueOperand(r1, r0));
michael@0 64 masm.loadPtr(Address(BaselineStubReg, ICStub::offsetOfStubCode()), r0);
michael@0 65
michael@0 66 // Jump to the stubcode.
michael@0 67 masm.branch(r0);
michael@0 68 }
michael@0 69
michael@0 70 inline void
michael@0 71 EmitReturnFromIC(MacroAssembler &masm)
michael@0 72 {
michael@0 73 masm.ma_mov(lr, pc);
michael@0 74 }
michael@0 75
michael@0 76 inline void
michael@0 77 EmitChangeICReturnAddress(MacroAssembler &masm, Register reg)
michael@0 78 {
michael@0 79 masm.ma_mov(reg, lr);
michael@0 80 }
michael@0 81
michael@0 82 inline void
michael@0 83 EmitTailCallVM(JitCode *target, MacroAssembler &masm, uint32_t argSize)
michael@0 84 {
michael@0 85 // We assume during this that R0 and R1 have been pushed, and that R2 is
michael@0 86 // unused.
michael@0 87 JS_ASSERT(R2 == ValueOperand(r1, r0));
michael@0 88
michael@0 89 // Compute frame size.
michael@0 90 masm.movePtr(BaselineFrameReg, r0);
michael@0 91 masm.ma_add(Imm32(BaselineFrame::FramePointerOffset), r0);
michael@0 92 masm.ma_sub(BaselineStackReg, r0);
michael@0 93
michael@0 94 // Store frame size without VMFunction arguments for GC marking.
michael@0 95 masm.ma_sub(r0, Imm32(argSize), r1);
michael@0 96 masm.store32(r1, Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfFrameSize()));
michael@0 97
michael@0 98 // Push frame descriptor and perform the tail call.
michael@0 99 // BaselineTailCallReg (lr) already contains the return address (as we keep it there through
michael@0 100 // the stub calls), but the VMWrapper code being called expects the return address to also
michael@0 101 // be pushed on the stack.
michael@0 102 JS_ASSERT(BaselineTailCallReg == lr);
michael@0 103 masm.makeFrameDescriptor(r0, JitFrame_BaselineJS);
michael@0 104 masm.push(r0);
michael@0 105 masm.push(lr);
michael@0 106 masm.branch(target);
michael@0 107 }
michael@0 108
michael@0 109 inline void
michael@0 110 EmitCreateStubFrameDescriptor(MacroAssembler &masm, Register reg)
michael@0 111 {
michael@0 112 // Compute stub frame size. We have to add two pointers: the stub reg and previous
michael@0 113 // frame pointer pushed by EmitEnterStubFrame.
michael@0 114 masm.mov(BaselineFrameReg, reg);
michael@0 115 masm.ma_add(Imm32(sizeof(void *) * 2), reg);
michael@0 116 masm.ma_sub(BaselineStackReg, reg);
michael@0 117
michael@0 118 masm.makeFrameDescriptor(reg, JitFrame_BaselineStub);
michael@0 119 }
michael@0 120
michael@0 121 inline void
michael@0 122 EmitCallVM(JitCode *target, MacroAssembler &masm)
michael@0 123 {
michael@0 124 EmitCreateStubFrameDescriptor(masm, r0);
michael@0 125 masm.push(r0);
michael@0 126 masm.call(target);
michael@0 127 }
michael@0 128
michael@0 129 // Size of vales pushed by EmitEnterStubFrame.
michael@0 130 static const uint32_t STUB_FRAME_SIZE = 4 * sizeof(void *);
michael@0 131 static const uint32_t STUB_FRAME_SAVED_STUB_OFFSET = sizeof(void *);
michael@0 132
michael@0 133 inline void
michael@0 134 EmitEnterStubFrame(MacroAssembler &masm, Register scratch)
michael@0 135 {
michael@0 136 JS_ASSERT(scratch != BaselineTailCallReg);
michael@0 137
michael@0 138 // Compute frame size.
michael@0 139 masm.mov(BaselineFrameReg, scratch);
michael@0 140 masm.ma_add(Imm32(BaselineFrame::FramePointerOffset), scratch);
michael@0 141 masm.ma_sub(BaselineStackReg, scratch);
michael@0 142
michael@0 143 masm.store32(scratch, Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfFrameSize()));
michael@0 144
michael@0 145 // Note: when making changes here, don't forget to update STUB_FRAME_SIZE
michael@0 146 // if needed.
michael@0 147
michael@0 148 // Push frame descriptor and return address.
michael@0 149 masm.makeFrameDescriptor(scratch, JitFrame_BaselineJS);
michael@0 150 masm.push(scratch);
michael@0 151 masm.push(BaselineTailCallReg);
michael@0 152
michael@0 153 // Save old frame pointer, stack pointer and stub reg.
michael@0 154 masm.push(BaselineStubReg);
michael@0 155 masm.push(BaselineFrameReg);
michael@0 156 masm.mov(BaselineStackReg, BaselineFrameReg);
michael@0 157
michael@0 158 // We pushed 4 words, so the stack is still aligned to 8 bytes.
michael@0 159 masm.checkStackAlignment();
michael@0 160 }
michael@0 161
michael@0 162 inline void
michael@0 163 EmitLeaveStubFrameHead(MacroAssembler &masm, bool calledIntoIon = false)
michael@0 164 {
michael@0 165 // Ion frames do not save and restore the frame pointer. If we called
michael@0 166 // into Ion, we have to restore the stack pointer from the frame descriptor.
michael@0 167 // If we performed a VM call, the descriptor has been popped already so
michael@0 168 // in that case we use the frame pointer.
michael@0 169 if (calledIntoIon) {
michael@0 170 masm.pop(ScratchRegister);
michael@0 171 masm.ma_lsr(Imm32(FRAMESIZE_SHIFT), ScratchRegister, ScratchRegister);
michael@0 172 masm.ma_add(ScratchRegister, BaselineStackReg);
michael@0 173 } else {
michael@0 174 masm.mov(BaselineFrameReg, BaselineStackReg);
michael@0 175 }
michael@0 176 }
michael@0 177
michael@0 178 inline void
michael@0 179 EmitLeaveStubFrameCommonTail(MacroAssembler &masm)
michael@0 180 {
michael@0 181 masm.pop(BaselineFrameReg);
michael@0 182 masm.pop(BaselineStubReg);
michael@0 183
michael@0 184 // Load the return address.
michael@0 185 masm.pop(BaselineTailCallReg);
michael@0 186
michael@0 187 // Discard the frame descriptor.
michael@0 188 masm.pop(ScratchRegister);
michael@0 189 }
michael@0 190
michael@0 191 inline void
michael@0 192 EmitLeaveStubFrame(MacroAssembler &masm, bool calledIntoIon = false)
michael@0 193 {
michael@0 194 EmitLeaveStubFrameHead(masm, calledIntoIon);
michael@0 195 EmitLeaveStubFrameCommonTail(masm);
michael@0 196 }
michael@0 197
michael@0 198 inline void
michael@0 199 EmitStowICValues(MacroAssembler &masm, int values)
michael@0 200 {
michael@0 201 JS_ASSERT(values >= 0 && values <= 2);
michael@0 202 switch(values) {
michael@0 203 case 1:
michael@0 204 // Stow R0
michael@0 205 masm.pushValue(R0);
michael@0 206 break;
michael@0 207 case 2:
michael@0 208 // Stow R0 and R1
michael@0 209 masm.pushValue(R0);
michael@0 210 masm.pushValue(R1);
michael@0 211 break;
michael@0 212 }
michael@0 213 }
michael@0 214
michael@0 215 inline void
michael@0 216 EmitUnstowICValues(MacroAssembler &masm, int values, bool discard = false)
michael@0 217 {
michael@0 218 JS_ASSERT(values >= 0 && values <= 2);
michael@0 219 switch(values) {
michael@0 220 case 1:
michael@0 221 // Unstow R0
michael@0 222 if (discard)
michael@0 223 masm.addPtr(Imm32(sizeof(Value)), BaselineStackReg);
michael@0 224 else
michael@0 225 masm.popValue(R0);
michael@0 226 break;
michael@0 227 case 2:
michael@0 228 // Unstow R0 and R1
michael@0 229 if (discard) {
michael@0 230 masm.addPtr(Imm32(sizeof(Value) * 2), BaselineStackReg);
michael@0 231 } else {
michael@0 232 masm.popValue(R1);
michael@0 233 masm.popValue(R0);
michael@0 234 }
michael@0 235 break;
michael@0 236 }
michael@0 237 }
michael@0 238
michael@0 239 inline void
michael@0 240 EmitCallTypeUpdateIC(MacroAssembler &masm, JitCode *code, uint32_t objectOffset)
michael@0 241 {
michael@0 242 JS_ASSERT(R2 == ValueOperand(r1, r0));
michael@0 243
michael@0 244 // R0 contains the value that needs to be typechecked.
michael@0 245 // The object we're updating is a boxed Value on the stack, at offset
michael@0 246 // objectOffset from esp, excluding the return address.
michael@0 247
michael@0 248 // Save the current BaselineStubReg to stack, as well as the TailCallReg,
michael@0 249 // since on ARM, the LR is live.
michael@0 250 masm.push(BaselineStubReg);
michael@0 251 masm.push(BaselineTailCallReg);
michael@0 252
michael@0 253 // This is expected to be called from within an IC, when BaselineStubReg
michael@0 254 // is properly initialized to point to the stub.
michael@0 255 masm.loadPtr(Address(BaselineStubReg, ICUpdatedStub::offsetOfFirstUpdateStub()),
michael@0 256 BaselineStubReg);
michael@0 257
michael@0 258 // TODO: Change r0 uses below to use masm's configurable scratch register instead.
michael@0 259
michael@0 260 // Load stubcode pointer from BaselineStubReg into BaselineTailCallReg.
michael@0 261 masm.loadPtr(Address(BaselineStubReg, ICStub::offsetOfStubCode()), r0);
michael@0 262
michael@0 263 // Call the stubcode.
michael@0 264 masm.ma_blx(r0);
michael@0 265
michael@0 266 // Restore the old stub reg and tailcall reg.
michael@0 267 masm.pop(BaselineTailCallReg);
michael@0 268 masm.pop(BaselineStubReg);
michael@0 269
michael@0 270 // The update IC will store 0 or 1 in R1.scratchReg() reflecting if the
michael@0 271 // value in R0 type-checked properly or not.
michael@0 272 Label success;
michael@0 273 masm.cmp32(R1.scratchReg(), Imm32(1));
michael@0 274 masm.j(Assembler::Equal, &success);
michael@0 275
michael@0 276 // If the IC failed, then call the update fallback function.
michael@0 277 EmitEnterStubFrame(masm, R1.scratchReg());
michael@0 278
michael@0 279 masm.loadValue(Address(BaselineStackReg, STUB_FRAME_SIZE + objectOffset), R1);
michael@0 280
michael@0 281 masm.pushValue(R0);
michael@0 282 masm.pushValue(R1);
michael@0 283 masm.push(BaselineStubReg);
michael@0 284
michael@0 285 // Load previous frame pointer, push BaselineFrame *.
michael@0 286 masm.loadPtr(Address(BaselineFrameReg, 0), R0.scratchReg());
michael@0 287 masm.pushBaselineFramePtr(R0.scratchReg(), R0.scratchReg());
michael@0 288
michael@0 289 EmitCallVM(code, masm);
michael@0 290 EmitLeaveStubFrame(masm);
michael@0 291
michael@0 292 // Success at end.
michael@0 293 masm.bind(&success);
michael@0 294 }
michael@0 295
michael@0 296 template <typename AddrType>
michael@0 297 inline void
michael@0 298 EmitPreBarrier(MacroAssembler &masm, const AddrType &addr, MIRType type)
michael@0 299 {
michael@0 300 // on ARM, lr is clobbered by patchableCallPreBarrier. Save it first.
michael@0 301 masm.push(lr);
michael@0 302 masm.patchableCallPreBarrier(addr, type);
michael@0 303 masm.pop(lr);
michael@0 304 }
michael@0 305
michael@0 306 inline void
michael@0 307 EmitStubGuardFailure(MacroAssembler &masm)
michael@0 308 {
michael@0 309 JS_ASSERT(R2 == ValueOperand(r1, r0));
michael@0 310
michael@0 311 // NOTE: This routine assumes that the stub guard code left the stack in the
michael@0 312 // same state it was in when it was entered.
michael@0 313
michael@0 314 // BaselineStubEntry points to the current stub.
michael@0 315
michael@0 316 // Load next stub into BaselineStubReg
michael@0 317 masm.loadPtr(Address(BaselineStubReg, ICStub::offsetOfNext()), BaselineStubReg);
michael@0 318
michael@0 319 // Load stubcode pointer from BaselineStubEntry into scratch register.
michael@0 320 masm.loadPtr(Address(BaselineStubReg, ICStub::offsetOfStubCode()), r0);
michael@0 321
michael@0 322 // Return address is already loaded, just jump to the next stubcode.
michael@0 323 JS_ASSERT(BaselineTailCallReg == lr);
michael@0 324 masm.branch(r0);
michael@0 325 }
michael@0 326
michael@0 327
michael@0 328 } // namespace jit
michael@0 329 } // namespace js
michael@0 330
michael@0 331 #endif // JS_ION
michael@0 332
michael@0 333 #endif /* jit_arm_BaselineHelpers_arm_h */
michael@0 334

mercurial