Wed, 31 Dec 2014 06:09:35 +0100
Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.
michael@0 | 1 | /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- |
michael@0 | 2 | * vim: set ts=8 sts=4 et sw=4 tw=99: |
michael@0 | 3 | * This Source Code Form is subject to the terms of the Mozilla Public |
michael@0 | 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this |
michael@0 | 5 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
michael@0 | 6 | |
michael@0 | 7 | #include "jit/UnreachableCodeElimination.h" |
michael@0 | 8 | |
michael@0 | 9 | #include "jit/AliasAnalysis.h" |
michael@0 | 10 | #include "jit/IonAnalysis.h" |
michael@0 | 11 | #include "jit/MIRGenerator.h" |
michael@0 | 12 | #include "jit/ValueNumbering.h" |
michael@0 | 13 | |
michael@0 | 14 | using namespace js; |
michael@0 | 15 | using namespace jit; |
michael@0 | 16 | |
michael@0 | 17 | bool |
michael@0 | 18 | UnreachableCodeElimination::analyze() |
michael@0 | 19 | { |
michael@0 | 20 | // The goal of this routine is to eliminate code that is |
michael@0 | 21 | // unreachable, either because there is no path from the entry |
michael@0 | 22 | // block to the code, or because the path traverses a conditional |
michael@0 | 23 | // branch where the condition is a constant (e.g., "if (false) { |
michael@0 | 24 | // ... }"). The latter can either appear in the source form or |
michael@0 | 25 | // arise due to optimizations. |
michael@0 | 26 | // |
michael@0 | 27 | // The stategy is straightforward. The pass begins with a |
michael@0 | 28 | // depth-first search. We set a bit on each basic block that |
michael@0 | 29 | // is visited. If a block terminates in a conditional branch |
michael@0 | 30 | // predicated on a constant, we rewrite the block to an unconditional |
michael@0 | 31 | // jump and do not visit the now irrelevant basic block. |
michael@0 | 32 | // |
michael@0 | 33 | // Once the initial DFS is complete, we do a second pass over the |
michael@0 | 34 | // blocks to find those that were not reached. Those blocks are |
michael@0 | 35 | // simply removed wholesale. We must also correct any phis that |
michael@0 | 36 | // may be affected.. |
michael@0 | 37 | |
michael@0 | 38 | // Pass 1: Identify unreachable blocks (if any). |
michael@0 | 39 | if (!prunePointlessBranchesAndMarkReachableBlocks()) |
michael@0 | 40 | return false; |
michael@0 | 41 | |
michael@0 | 42 | return removeUnmarkedBlocksAndCleanup(); |
michael@0 | 43 | } |
michael@0 | 44 | |
michael@0 | 45 | bool |
michael@0 | 46 | UnreachableCodeElimination::removeUnmarkedBlocks(size_t marked) |
michael@0 | 47 | { |
michael@0 | 48 | marked_ = marked; |
michael@0 | 49 | return removeUnmarkedBlocksAndCleanup(); |
michael@0 | 50 | } |
michael@0 | 51 | |
michael@0 | 52 | bool |
michael@0 | 53 | UnreachableCodeElimination::removeUnmarkedBlocksAndCleanup() |
michael@0 | 54 | { |
michael@0 | 55 | // Everything is reachable, no work required. |
michael@0 | 56 | JS_ASSERT(marked_ <= graph_.numBlocks()); |
michael@0 | 57 | if (marked_ == graph_.numBlocks()) { |
michael@0 | 58 | graph_.unmarkBlocks(); |
michael@0 | 59 | return true; |
michael@0 | 60 | } |
michael@0 | 61 | |
michael@0 | 62 | // Pass 2: Remove unmarked blocks (see analyze() above). |
michael@0 | 63 | if (!removeUnmarkedBlocksAndClearDominators()) |
michael@0 | 64 | return false; |
michael@0 | 65 | graph_.unmarkBlocks(); |
michael@0 | 66 | |
michael@0 | 67 | AssertGraphCoherency(graph_); |
michael@0 | 68 | |
michael@0 | 69 | IonSpewPass("UCE-mid-point"); |
michael@0 | 70 | |
michael@0 | 71 | // Pass 3: Recompute dominators and tweak phis. |
michael@0 | 72 | BuildDominatorTree(graph_); |
michael@0 | 73 | if (redundantPhis_ && !EliminatePhis(mir_, graph_, ConservativeObservability)) |
michael@0 | 74 | return false; |
michael@0 | 75 | |
michael@0 | 76 | // Pass 4: Rerun alias analysis |
michael@0 | 77 | if (rerunAliasAnalysis_) { |
michael@0 | 78 | AliasAnalysis analysis(mir_, graph_); |
michael@0 | 79 | if (!analysis.analyze()) |
michael@0 | 80 | return false; |
michael@0 | 81 | } |
michael@0 | 82 | |
michael@0 | 83 | // Pass 5: It's important for optimizations to re-run GVN (and in |
michael@0 | 84 | // turn alias analysis) after UCE if we eliminated branches. |
michael@0 | 85 | if (rerunAliasAnalysis_ && mir_->optimizationInfo().gvnEnabled()) { |
michael@0 | 86 | ValueNumberer gvn(mir_, graph_, mir_->optimizationInfo().gvnKind() == GVN_Optimistic); |
michael@0 | 87 | if (!gvn.clear() || !gvn.analyze()) |
michael@0 | 88 | return false; |
michael@0 | 89 | IonSpewPass("GVN-after-UCE"); |
michael@0 | 90 | AssertExtendedGraphCoherency(graph_); |
michael@0 | 91 | |
michael@0 | 92 | if (mir_->shouldCancel("GVN-after-UCE")) |
michael@0 | 93 | return false; |
michael@0 | 94 | } |
michael@0 | 95 | |
michael@0 | 96 | return true; |
michael@0 | 97 | } |
michael@0 | 98 | |
michael@0 | 99 | bool |
michael@0 | 100 | UnreachableCodeElimination::enqueue(MBasicBlock *block, BlockList &list) |
michael@0 | 101 | { |
michael@0 | 102 | if (block->isMarked()) |
michael@0 | 103 | return true; |
michael@0 | 104 | |
michael@0 | 105 | block->mark(); |
michael@0 | 106 | marked_++; |
michael@0 | 107 | return list.append(block); |
michael@0 | 108 | } |
michael@0 | 109 | |
michael@0 | 110 | MBasicBlock * |
michael@0 | 111 | UnreachableCodeElimination::optimizableSuccessor(MBasicBlock *block) |
michael@0 | 112 | { |
michael@0 | 113 | // If the last instruction in `block` is a test instruction of a |
michael@0 | 114 | // constant value, returns the successor that the branch will |
michael@0 | 115 | // always branch to at runtime. Otherwise, returns nullptr. |
michael@0 | 116 | |
michael@0 | 117 | MControlInstruction *ins = block->lastIns(); |
michael@0 | 118 | if (!ins->isTest()) |
michael@0 | 119 | return nullptr; |
michael@0 | 120 | |
michael@0 | 121 | MTest *testIns = ins->toTest(); |
michael@0 | 122 | MDefinition *v = testIns->getOperand(0); |
michael@0 | 123 | if (!v->isConstant()) |
michael@0 | 124 | return nullptr; |
michael@0 | 125 | |
michael@0 | 126 | BranchDirection bdir = v->toConstant()->valueToBoolean() ? TRUE_BRANCH : FALSE_BRANCH; |
michael@0 | 127 | return testIns->branchSuccessor(bdir); |
michael@0 | 128 | } |
michael@0 | 129 | |
michael@0 | 130 | bool |
michael@0 | 131 | UnreachableCodeElimination::prunePointlessBranchesAndMarkReachableBlocks() |
michael@0 | 132 | { |
michael@0 | 133 | BlockList worklist, optimizableBlocks; |
michael@0 | 134 | |
michael@0 | 135 | // Process everything reachable from the start block, ignoring any |
michael@0 | 136 | // OSR block. |
michael@0 | 137 | if (!enqueue(graph_.entryBlock(), worklist)) |
michael@0 | 138 | return false; |
michael@0 | 139 | while (!worklist.empty()) { |
michael@0 | 140 | if (mir_->shouldCancel("Eliminate Unreachable Code")) |
michael@0 | 141 | return false; |
michael@0 | 142 | |
michael@0 | 143 | MBasicBlock *block = worklist.popCopy(); |
michael@0 | 144 | |
michael@0 | 145 | // If this block is a test on a constant operand, only enqueue |
michael@0 | 146 | // the relevant successor. Also, remember the block for later. |
michael@0 | 147 | if (MBasicBlock *succ = optimizableSuccessor(block)) { |
michael@0 | 148 | if (!optimizableBlocks.append(block)) |
michael@0 | 149 | return false; |
michael@0 | 150 | if (!enqueue(succ, worklist)) |
michael@0 | 151 | return false; |
michael@0 | 152 | } else { |
michael@0 | 153 | // Otherwise just visit all successors. |
michael@0 | 154 | for (size_t i = 0; i < block->numSuccessors(); i++) { |
michael@0 | 155 | MBasicBlock *succ = block->getSuccessor(i); |
michael@0 | 156 | if (!enqueue(succ, worklist)) |
michael@0 | 157 | return false; |
michael@0 | 158 | } |
michael@0 | 159 | } |
michael@0 | 160 | } |
michael@0 | 161 | |
michael@0 | 162 | // Now, if there is an OSR block, check that all of its successors |
michael@0 | 163 | // were reachable (bug 880377). If not, we are in danger of |
michael@0 | 164 | // creating a CFG with two disjoint parts, so simply mark all |
michael@0 | 165 | // blocks as reachable. This generally occurs when the TI info for |
michael@0 | 166 | // stack types is incorrect or incomplete, due to operations that |
michael@0 | 167 | // have not yet executed in baseline. |
michael@0 | 168 | if (graph_.osrBlock()) { |
michael@0 | 169 | MBasicBlock *osrBlock = graph_.osrBlock(); |
michael@0 | 170 | JS_ASSERT(!osrBlock->isMarked()); |
michael@0 | 171 | if (!enqueue(osrBlock, worklist)) |
michael@0 | 172 | return false; |
michael@0 | 173 | for (size_t i = 0; i < osrBlock->numSuccessors(); i++) { |
michael@0 | 174 | if (!osrBlock->getSuccessor(i)->isMarked()) { |
michael@0 | 175 | // OSR block has an otherwise unreachable successor, abort. |
michael@0 | 176 | for (MBasicBlockIterator iter(graph_.begin()); iter != graph_.end(); iter++) |
michael@0 | 177 | iter->mark(); |
michael@0 | 178 | marked_ = graph_.numBlocks(); |
michael@0 | 179 | return true; |
michael@0 | 180 | } |
michael@0 | 181 | } |
michael@0 | 182 | } |
michael@0 | 183 | |
michael@0 | 184 | // Now that we know we will not abort due to OSR, go back and |
michael@0 | 185 | // transform any tests on constant operands into gotos. |
michael@0 | 186 | for (uint32_t i = 0; i < optimizableBlocks.length(); i++) { |
michael@0 | 187 | MBasicBlock *block = optimizableBlocks[i]; |
michael@0 | 188 | MBasicBlock *succ = optimizableSuccessor(block); |
michael@0 | 189 | JS_ASSERT(succ); |
michael@0 | 190 | |
michael@0 | 191 | MGoto *gotoIns = MGoto::New(graph_.alloc(), succ); |
michael@0 | 192 | block->discardLastIns(); |
michael@0 | 193 | block->end(gotoIns); |
michael@0 | 194 | MBasicBlock *successorWithPhis = block->successorWithPhis(); |
michael@0 | 195 | if (successorWithPhis && successorWithPhis != succ) |
michael@0 | 196 | block->setSuccessorWithPhis(nullptr, 0); |
michael@0 | 197 | } |
michael@0 | 198 | |
michael@0 | 199 | return true; |
michael@0 | 200 | } |
michael@0 | 201 | |
michael@0 | 202 | void |
michael@0 | 203 | UnreachableCodeElimination::checkDependencyAndRemoveUsesFromUnmarkedBlocks(MDefinition *instr) |
michael@0 | 204 | { |
michael@0 | 205 | // When the instruction depends on removed block, |
michael@0 | 206 | // alias analysis needs to get rerun to have the right dependency. |
michael@0 | 207 | if (!disableAliasAnalysis_ && instr->dependency() && !instr->dependency()->block()->isMarked()) |
michael@0 | 208 | rerunAliasAnalysis_ = true; |
michael@0 | 209 | |
michael@0 | 210 | for (MUseIterator iter(instr->usesBegin()); iter != instr->usesEnd(); ) { |
michael@0 | 211 | if (!iter->consumer()->block()->isMarked()) { |
michael@0 | 212 | instr->setUseRemovedUnchecked(); |
michael@0 | 213 | iter = instr->removeUse(iter); |
michael@0 | 214 | } else { |
michael@0 | 215 | iter++; |
michael@0 | 216 | } |
michael@0 | 217 | } |
michael@0 | 218 | } |
michael@0 | 219 | |
michael@0 | 220 | bool |
michael@0 | 221 | UnreachableCodeElimination::removeUnmarkedBlocksAndClearDominators() |
michael@0 | 222 | { |
michael@0 | 223 | // Removes blocks that are not marked from the graph. For blocks |
michael@0 | 224 | // that *are* marked, clears the mark and adjusts the id to its |
michael@0 | 225 | // new value. Also adds blocks that are immediately reachable |
michael@0 | 226 | // from an unmarked block to the frontier. |
michael@0 | 227 | |
michael@0 | 228 | size_t id = marked_; |
michael@0 | 229 | for (PostorderIterator iter(graph_.poBegin()); iter != graph_.poEnd();) { |
michael@0 | 230 | if (mir_->shouldCancel("Eliminate Unreachable Code")) |
michael@0 | 231 | return false; |
michael@0 | 232 | |
michael@0 | 233 | MBasicBlock *block = *iter; |
michael@0 | 234 | iter++; |
michael@0 | 235 | |
michael@0 | 236 | // Unconditionally clear the dominators. It's somewhat complex to |
michael@0 | 237 | // adjust the values and relatively fast to just recompute. |
michael@0 | 238 | block->clearDominatorInfo(); |
michael@0 | 239 | |
michael@0 | 240 | if (block->isMarked()) { |
michael@0 | 241 | block->setId(--id); |
michael@0 | 242 | for (MPhiIterator iter(block->phisBegin()); iter != block->phisEnd(); iter++) |
michael@0 | 243 | checkDependencyAndRemoveUsesFromUnmarkedBlocks(*iter); |
michael@0 | 244 | for (MInstructionIterator iter(block->begin()); iter != block->end(); iter++) |
michael@0 | 245 | checkDependencyAndRemoveUsesFromUnmarkedBlocks(*iter); |
michael@0 | 246 | } else { |
michael@0 | 247 | if (block->numPredecessors() > 1) { |
michael@0 | 248 | // If this block had phis, then any reachable |
michael@0 | 249 | // predecessors need to have the successorWithPhis |
michael@0 | 250 | // flag cleared. |
michael@0 | 251 | for (size_t i = 0; i < block->numPredecessors(); i++) |
michael@0 | 252 | block->getPredecessor(i)->setSuccessorWithPhis(nullptr, 0); |
michael@0 | 253 | } |
michael@0 | 254 | |
michael@0 | 255 | if (block->isLoopBackedge()) { |
michael@0 | 256 | // NB. We have to update the loop header if we |
michael@0 | 257 | // eliminate the backedge. At first I thought this |
michael@0 | 258 | // check would be insufficient, because it would be |
michael@0 | 259 | // possible to have code like this: |
michael@0 | 260 | // |
michael@0 | 261 | // while (true) { |
michael@0 | 262 | // ...; |
michael@0 | 263 | // if (1 == 1) break; |
michael@0 | 264 | // } |
michael@0 | 265 | // |
michael@0 | 266 | // in which the backedge is removed as part of |
michael@0 | 267 | // rewriting the condition, but no actual blocks are |
michael@0 | 268 | // removed. However, in all such cases, the backedge |
michael@0 | 269 | // would be a critical edge and hence the critical |
michael@0 | 270 | // edge block is being removed. |
michael@0 | 271 | block->loopHeaderOfBackedge()->clearLoopHeader(); |
michael@0 | 272 | } |
michael@0 | 273 | |
michael@0 | 274 | for (size_t i = 0, c = block->numSuccessors(); i < c; i++) { |
michael@0 | 275 | MBasicBlock *succ = block->getSuccessor(i); |
michael@0 | 276 | if (succ->isMarked()) { |
michael@0 | 277 | // succ is on the frontier of blocks to be removed: |
michael@0 | 278 | succ->removePredecessor(block); |
michael@0 | 279 | |
michael@0 | 280 | if (!redundantPhis_) { |
michael@0 | 281 | for (MPhiIterator iter(succ->phisBegin()); iter != succ->phisEnd(); iter++) { |
michael@0 | 282 | if (iter->operandIfRedundant()) { |
michael@0 | 283 | redundantPhis_ = true; |
michael@0 | 284 | break; |
michael@0 | 285 | } |
michael@0 | 286 | } |
michael@0 | 287 | } |
michael@0 | 288 | } |
michael@0 | 289 | } |
michael@0 | 290 | |
michael@0 | 291 | graph_.removeBlock(block); |
michael@0 | 292 | } |
michael@0 | 293 | } |
michael@0 | 294 | |
michael@0 | 295 | JS_ASSERT(id == 0); |
michael@0 | 296 | |
michael@0 | 297 | return true; |
michael@0 | 298 | } |