1// Copyright 2012 the V8 project authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5#if V8_TARGET_ARCH_ARM 6 7#include "src/codegen.h" 8#include "src/ic/ic.h" 9#include "src/ic/stub-cache.h" 10 11namespace v8 { 12namespace internal { 13 14 15Condition CompareIC::ComputeCondition(Token::Value op) { 16 switch (op) { 17 case Token::EQ_STRICT: 18 case Token::EQ: 19 return eq; 20 case Token::LT: 21 return lt; 22 case Token::GT: 23 return gt; 24 case Token::LTE: 25 return le; 26 case Token::GTE: 27 return ge; 28 default: 29 UNREACHABLE(); 30 return kNoCondition; 31 } 32} 33 34 35bool CompareIC::HasInlinedSmiCode(Address address) { 36 // The address of the instruction following the call. 37 Address cmp_instruction_address = 38 Assembler::return_address_from_call_start(address); 39 40 // If the instruction following the call is not a cmp rx, #yyy, nothing 41 // was inlined. 42 Instr instr = Assembler::instr_at(cmp_instruction_address); 43 return Assembler::IsCmpImmediate(instr); 44} 45 46 47void PatchInlinedSmiCode(Isolate* isolate, Address address, 48 InlinedSmiCheck check) { 49 Address cmp_instruction_address = 50 Assembler::return_address_from_call_start(address); 51 52 // If the instruction following the call is not a cmp rx, #yyy, nothing 53 // was inlined. 54 Instr instr = Assembler::instr_at(cmp_instruction_address); 55 if (!Assembler::IsCmpImmediate(instr)) { 56 return; 57 } 58 59 // The delta to the start of the map check instruction and the 60 // condition code uses at the patched jump. 61 int delta = Assembler::GetCmpImmediateRawImmediate(instr); 62 delta += Assembler::GetCmpImmediateRegister(instr).code() * kOff12Mask; 63 // If the delta is 0 the instruction is cmp r0, #0 which also signals that 64 // nothing was inlined. 65 if (delta == 0) { 66 return; 67 } 68 69 if (FLAG_trace_ic) { 70 LOG(isolate, PatchIC(address, cmp_instruction_address, delta)); 71 } 72 73 Address patch_address = 74 cmp_instruction_address - delta * Instruction::kInstrSize; 75 Instr instr_at_patch = Assembler::instr_at(patch_address); 76 Instr branch_instr = 77 Assembler::instr_at(patch_address + Instruction::kInstrSize); 78 // This is patching a conditional "jump if not smi/jump if smi" site. 79 // Enabling by changing from 80 // cmp rx, rx 81 // b eq/ne, <target> 82 // to 83 // tst rx, #kSmiTagMask 84 // b ne/eq, <target> 85 // and vice-versa to be disabled again. 86 CodePatcher patcher(isolate, patch_address, 2); 87 Register reg = Assembler::GetRn(instr_at_patch); 88 if (check == ENABLE_INLINED_SMI_CHECK) { 89 DCHECK(Assembler::IsCmpRegister(instr_at_patch)); 90 DCHECK_EQ(Assembler::GetRn(instr_at_patch).code(), 91 Assembler::GetRm(instr_at_patch).code()); 92 patcher.masm()->tst(reg, Operand(kSmiTagMask)); 93 } else { 94 DCHECK(check == DISABLE_INLINED_SMI_CHECK); 95 DCHECK(Assembler::IsTstImmediate(instr_at_patch)); 96 patcher.masm()->cmp(reg, reg); 97 } 98 DCHECK(Assembler::IsBranch(branch_instr)); 99 if (Assembler::GetCondition(branch_instr) == eq) { 100 patcher.EmitCondition(ne); 101 } else { 102 DCHECK(Assembler::GetCondition(branch_instr) == ne); 103 patcher.EmitCondition(eq); 104 } 105} 106} // namespace internal 107} // namespace v8 108 109#endif // V8_TARGET_ARCH_ARM 110