1// Copyright 2014 the V8 project authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5#if V8_TARGET_ARCH_PPC 6 7#include "src/codegen.h" 8#include "src/ic/ic.h" 9#include "src/ic/stub-cache.h" 10 11namespace v8 { 12namespace internal { 13 14 15Condition CompareIC::ComputeCondition(Token::Value op) { 16 switch (op) { 17 case Token::EQ_STRICT: 18 case Token::EQ: 19 return eq; 20 case Token::LT: 21 return lt; 22 case Token::GT: 23 return gt; 24 case Token::LTE: 25 return le; 26 case Token::GTE: 27 return ge; 28 default: 29 UNREACHABLE(); 30 return kNoCondition; 31 } 32} 33 34 35bool CompareIC::HasInlinedSmiCode(Address address) { 36 // The address of the instruction following the call. 37 Address cmp_instruction_address = 38 Assembler::return_address_from_call_start(address); 39 40 // If the instruction following the call is not a cmp rx, #yyy, nothing 41 // was inlined. 42 Instr instr = Assembler::instr_at(cmp_instruction_address); 43 return Assembler::IsCmpImmediate(instr); 44} 45 46 47// 48// This code is paired with the JumpPatchSite class in full-codegen-ppc.cc 49// 50void PatchInlinedSmiCode(Isolate* isolate, Address address, 51 InlinedSmiCheck check) { 52 Address cmp_instruction_address = 53 Assembler::return_address_from_call_start(address); 54 55 // If the instruction following the call is not a cmp rx, #yyy, nothing 56 // was inlined. 57 Instr instr = Assembler::instr_at(cmp_instruction_address); 58 if (!Assembler::IsCmpImmediate(instr)) { 59 return; 60 } 61 62 // The delta to the start of the map check instruction and the 63 // condition code uses at the patched jump. 64 int delta = Assembler::GetCmpImmediateRawImmediate(instr); 65 delta += Assembler::GetCmpImmediateRegister(instr).code() * kOff16Mask; 66 // If the delta is 0 the instruction is cmp r0, #0 which also signals that 67 // nothing was inlined. 68 if (delta == 0) { 69 return; 70 } 71 72 if (FLAG_trace_ic) { 73 LOG(isolate, PatchIC(address, cmp_instruction_address, delta)); 74 } 75 76 Address patch_address = 77 cmp_instruction_address - delta * Instruction::kInstrSize; 78 Instr instr_at_patch = Assembler::instr_at(patch_address); 79 Instr branch_instr = 80 Assembler::instr_at(patch_address + Instruction::kInstrSize); 81 // This is patching a conditional "jump if not smi/jump if smi" site. 82 // Enabling by changing from 83 // cmp cr0, rx, rx 84 // to 85 // rlwinm(r0, value, 0, 31, 31, SetRC); 86 // bc(label, BT/BF, 2) 87 // and vice-versa to be disabled again. 88 CodePatcher patcher(isolate, patch_address, 2); 89 Register reg = Assembler::GetRA(instr_at_patch); 90 if (check == ENABLE_INLINED_SMI_CHECK) { 91 DCHECK(Assembler::IsCmpRegister(instr_at_patch)); 92 DCHECK_EQ(Assembler::GetRA(instr_at_patch).code(), 93 Assembler::GetRB(instr_at_patch).code()); 94 patcher.masm()->TestIfSmi(reg, r0); 95 } else { 96 DCHECK(check == DISABLE_INLINED_SMI_CHECK); 97 DCHECK(Assembler::IsAndi(instr_at_patch)); 98 patcher.masm()->cmp(reg, reg, cr0); 99 } 100 DCHECK(Assembler::IsBranch(branch_instr)); 101 102 // Invert the logic of the branch 103 if (Assembler::GetCondition(branch_instr) == eq) { 104 patcher.EmitCondition(ne); 105 } else { 106 DCHECK(Assembler::GetCondition(branch_instr) == ne); 107 patcher.EmitCondition(eq); 108 } 109} 110} // namespace internal 111} // namespace v8 112 113#endif // V8_TARGET_ARCH_PPC 114