js/src/jit/x64/Assembler-x64.cpp

changeset 0
6474c204b198
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/js/src/jit/x64/Assembler-x64.cpp	Wed Dec 31 06:09:35 2014 +0100
     1.3 @@ -0,0 +1,265 @@
     1.4 +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
     1.5 + * vim: set ts=8 sts=4 et sw=4 tw=99:
     1.6 + * This Source Code Form is subject to the terms of the Mozilla Public
     1.7 + * License, v. 2.0. If a copy of the MPL was not distributed with this
     1.8 + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
     1.9 +
    1.10 +#include "jit/x64/Assembler-x64.h"
    1.11 +
    1.12 +#include "gc/Marking.h"
    1.13 +
    1.14 +using namespace js;
    1.15 +using namespace js::jit;
    1.16 +
    1.17 +ABIArgGenerator::ABIArgGenerator()
    1.18 +  :
    1.19 +#if defined(XP_WIN)
    1.20 +    regIndex_(0),
    1.21 +    stackOffset_(ShadowStackSpace),
    1.22 +#else
    1.23 +    intRegIndex_(0),
    1.24 +    floatRegIndex_(0),
    1.25 +    stackOffset_(0),
    1.26 +#endif
    1.27 +    current_()
    1.28 +{}
    1.29 +
    1.30 +ABIArg
    1.31 +ABIArgGenerator::next(MIRType type)
    1.32 +{
    1.33 +#if defined(XP_WIN)
    1.34 +    JS_STATIC_ASSERT(NumIntArgRegs == NumFloatArgRegs);
    1.35 +    if (regIndex_ == NumIntArgRegs) {
    1.36 +        current_ = ABIArg(stackOffset_);
    1.37 +        stackOffset_ += sizeof(uint64_t);
    1.38 +        return current_;
    1.39 +    }
    1.40 +    switch (type) {
    1.41 +      case MIRType_Int32:
    1.42 +      case MIRType_Pointer:
    1.43 +        current_ = ABIArg(IntArgRegs[regIndex_++]);
    1.44 +        break;
    1.45 +      case MIRType_Float32:
    1.46 +      case MIRType_Double:
    1.47 +        current_ = ABIArg(FloatArgRegs[regIndex_++]);
    1.48 +        break;
    1.49 +      default:
    1.50 +        MOZ_ASSUME_UNREACHABLE("Unexpected argument type");
    1.51 +    }
    1.52 +    return current_;
    1.53 +#else
    1.54 +    switch (type) {
    1.55 +      case MIRType_Int32:
    1.56 +      case MIRType_Pointer:
    1.57 +        if (intRegIndex_ == NumIntArgRegs) {
    1.58 +            current_ = ABIArg(stackOffset_);
    1.59 +            stackOffset_ += sizeof(uint64_t);
    1.60 +            break;
    1.61 +        }
    1.62 +        current_ = ABIArg(IntArgRegs[intRegIndex_++]);
    1.63 +        break;
    1.64 +      case MIRType_Double:
    1.65 +      case MIRType_Float32:
    1.66 +        if (floatRegIndex_ == NumFloatArgRegs) {
    1.67 +            current_ = ABIArg(stackOffset_);
    1.68 +            stackOffset_ += sizeof(uint64_t);
    1.69 +            break;
    1.70 +        }
    1.71 +        current_ = ABIArg(FloatArgRegs[floatRegIndex_++]);
    1.72 +        break;
    1.73 +      default:
    1.74 +        MOZ_ASSUME_UNREACHABLE("Unexpected argument type");
    1.75 +    }
    1.76 +    return current_;
    1.77 +#endif
    1.78 +}
    1.79 +
    1.80 +// Avoid r11, which is the MacroAssembler's ScratchReg.
    1.81 +const Register ABIArgGenerator::NonArgReturnVolatileReg0 = r10;
    1.82 +const Register ABIArgGenerator::NonArgReturnVolatileReg1 = r12;
    1.83 +const Register ABIArgGenerator::NonVolatileReg = r13;
    1.84 +
    1.85 +void
    1.86 +Assembler::writeRelocation(JmpSrc src, Relocation::Kind reloc)
    1.87 +{
    1.88 +    if (!jumpRelocations_.length()) {
    1.89 +        // The jump relocation table starts with a fixed-width integer pointing
    1.90 +        // to the start of the extended jump table. But, we don't know the
    1.91 +        // actual extended jump table offset yet, so write a 0 which we'll
    1.92 +        // patch later.
    1.93 +        jumpRelocations_.writeFixedUint32_t(0);
    1.94 +    }
    1.95 +    if (reloc == Relocation::JITCODE) {
    1.96 +        jumpRelocations_.writeUnsigned(src.offset());
    1.97 +        jumpRelocations_.writeUnsigned(jumps_.length());
    1.98 +    }
    1.99 +}
   1.100 +
   1.101 +void
   1.102 +Assembler::addPendingJump(JmpSrc src, ImmPtr target, Relocation::Kind reloc)
   1.103 +{
   1.104 +    JS_ASSERT(target.value != nullptr);
   1.105 +
   1.106 +    // Emit reloc before modifying the jump table, since it computes a 0-based
   1.107 +    // index. This jump is not patchable at runtime.
   1.108 +    if (reloc == Relocation::JITCODE)
   1.109 +        writeRelocation(src, reloc);
   1.110 +    enoughMemory_ &= jumps_.append(RelativePatch(src.offset(), target.value, reloc));
   1.111 +}
   1.112 +
   1.113 +size_t
   1.114 +Assembler::addPatchableJump(JmpSrc src, Relocation::Kind reloc)
   1.115 +{
   1.116 +    // This jump is patchable at runtime so we always need to make sure the
   1.117 +    // jump table is emitted.
   1.118 +    writeRelocation(src, reloc);
   1.119 +
   1.120 +    size_t index = jumps_.length();
   1.121 +    enoughMemory_ &= jumps_.append(RelativePatch(src.offset(), nullptr, reloc));
   1.122 +    return index;
   1.123 +}
   1.124 +
   1.125 +/* static */
   1.126 +uint8_t *
   1.127 +Assembler::PatchableJumpAddress(JitCode *code, size_t index)
   1.128 +{
   1.129 +    // The assembler stashed the offset into the code of the fragments used
   1.130 +    // for far jumps at the start of the relocation table.
   1.131 +    uint32_t jumpOffset = * (uint32_t *) code->jumpRelocTable();
   1.132 +    jumpOffset += index * SizeOfJumpTableEntry;
   1.133 +
   1.134 +    JS_ASSERT(jumpOffset + SizeOfExtendedJump <= code->instructionsSize());
   1.135 +    return code->raw() + jumpOffset;
   1.136 +}
   1.137 +
   1.138 +/* static */
   1.139 +void
   1.140 +Assembler::PatchJumpEntry(uint8_t *entry, uint8_t *target)
   1.141 +{
   1.142 +    uint8_t **index = (uint8_t **) (entry + SizeOfExtendedJump - sizeof(void*));
   1.143 +    *index = target;
   1.144 +}
   1.145 +
   1.146 +void
   1.147 +Assembler::finish()
   1.148 +{
   1.149 +    if (!jumps_.length() || oom())
   1.150 +        return;
   1.151 +
   1.152 +    // Emit the jump table.
   1.153 +    masm.align(SizeOfJumpTableEntry);
   1.154 +    extendedJumpTable_ = masm.size();
   1.155 +
   1.156 +    // Now that we know the offset to the jump table, squirrel it into the
   1.157 +    // jump relocation buffer if any JitCode references exist and must be
   1.158 +    // tracked for GC.
   1.159 +    JS_ASSERT_IF(jumpRelocations_.length(), jumpRelocations_.length() >= sizeof(uint32_t));
   1.160 +    if (jumpRelocations_.length())
   1.161 +        *(uint32_t *)jumpRelocations_.buffer() = extendedJumpTable_;
   1.162 +
   1.163 +    // Zero the extended jumps table.
   1.164 +    for (size_t i = 0; i < jumps_.length(); i++) {
   1.165 +#ifdef DEBUG
   1.166 +        size_t oldSize = masm.size();
   1.167 +#endif
   1.168 +        masm.jmp_rip(2);
   1.169 +        JS_ASSERT(masm.size() - oldSize == 6);
   1.170 +        // Following an indirect branch with ud2 hints to the hardware that
   1.171 +        // there's no fall-through. This also aligns the 64-bit immediate.
   1.172 +        masm.ud2();
   1.173 +        JS_ASSERT(masm.size() - oldSize == 8);
   1.174 +        masm.immediate64(0);
   1.175 +        JS_ASSERT(masm.size() - oldSize == SizeOfExtendedJump);
   1.176 +        JS_ASSERT(masm.size() - oldSize == SizeOfJumpTableEntry);
   1.177 +    }
   1.178 +}
   1.179 +
   1.180 +void
   1.181 +Assembler::executableCopy(uint8_t *buffer)
   1.182 +{
   1.183 +    AssemblerX86Shared::executableCopy(buffer);
   1.184 +
   1.185 +    for (size_t i = 0; i < jumps_.length(); i++) {
   1.186 +        RelativePatch &rp = jumps_[i];
   1.187 +        uint8_t *src = buffer + rp.offset;
   1.188 +        if (!rp.target) {
   1.189 +            // The patch target is nullptr for jumps that have been linked to
   1.190 +            // a label within the same code block, but may be repatched later
   1.191 +            // to jump to a different code block.
   1.192 +            continue;
   1.193 +        }
   1.194 +        if (JSC::X86Assembler::canRelinkJump(src, rp.target)) {
   1.195 +            JSC::X86Assembler::setRel32(src, rp.target);
   1.196 +        } else {
   1.197 +            // An extended jump table must exist, and its offset must be in
   1.198 +            // range.
   1.199 +            JS_ASSERT(extendedJumpTable_);
   1.200 +            JS_ASSERT((extendedJumpTable_ + i * SizeOfJumpTableEntry) <= size() - SizeOfJumpTableEntry);
   1.201 +
   1.202 +            // Patch the jump to go to the extended jump entry.
   1.203 +            uint8_t *entry = buffer + extendedJumpTable_ + i * SizeOfJumpTableEntry;
   1.204 +            JSC::X86Assembler::setRel32(src, entry);
   1.205 +
   1.206 +            // Now patch the pointer, note that we need to align it to
   1.207 +            // *after* the extended jump, i.e. after the 64-bit immedate.
   1.208 +            JSC::X86Assembler::repatchPointer(entry + SizeOfExtendedJump, rp.target);
   1.209 +        }
   1.210 +    }
   1.211 +}
   1.212 +
   1.213 +class RelocationIterator
   1.214 +{
   1.215 +    CompactBufferReader reader_;
   1.216 +    uint32_t tableStart_;
   1.217 +    uint32_t offset_;
   1.218 +    uint32_t extOffset_;
   1.219 +
   1.220 +  public:
   1.221 +    RelocationIterator(CompactBufferReader &reader)
   1.222 +      : reader_(reader)
   1.223 +    {
   1.224 +        tableStart_ = reader_.readFixedUint32_t();
   1.225 +    }
   1.226 +
   1.227 +    bool read() {
   1.228 +        if (!reader_.more())
   1.229 +            return false;
   1.230 +        offset_ = reader_.readUnsigned();
   1.231 +        extOffset_ = reader_.readUnsigned();
   1.232 +        return true;
   1.233 +    }
   1.234 +
   1.235 +    uint32_t offset() const {
   1.236 +        return offset_;
   1.237 +    }
   1.238 +    uint32_t extendedOffset() const {
   1.239 +        return extOffset_;
   1.240 +    }
   1.241 +};
   1.242 +
   1.243 +JitCode *
   1.244 +Assembler::CodeFromJump(JitCode *code, uint8_t *jump)
   1.245 +{
   1.246 +    uint8_t *target = (uint8_t *)JSC::X86Assembler::getRel32Target(jump);
   1.247 +    if (target >= code->raw() && target < code->raw() + code->instructionsSize()) {
   1.248 +        // This jump is within the code buffer, so it has been redirected to
   1.249 +        // the extended jump table.
   1.250 +        JS_ASSERT(target + SizeOfJumpTableEntry <= code->raw() + code->instructionsSize());
   1.251 +
   1.252 +        target = (uint8_t *)JSC::X86Assembler::getPointer(target + SizeOfExtendedJump);
   1.253 +    }
   1.254 +
   1.255 +    return JitCode::FromExecutable(target);
   1.256 +}
   1.257 +
   1.258 +void
   1.259 +Assembler::TraceJumpRelocations(JSTracer *trc, JitCode *code, CompactBufferReader &reader)
   1.260 +{
   1.261 +    RelocationIterator iter(reader);
   1.262 +    while (iter.read()) {
   1.263 +        JitCode *child = CodeFromJump(code, code->raw() + iter.offset());
   1.264 +        MarkJitCodeUnbarriered(trc, &child, "rel32");
   1.265 +        JS_ASSERT(child == CodeFromJump(code, code->raw() + iter.offset()));
   1.266 +    }
   1.267 +}
   1.268 +

mercurial