1// Copyright (c) 1994-2006 Sun Microsystems Inc.
2// All Rights Reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions
6// are met:
7//
8// - Redistributions of source code must retain the above copyright notice,
9// this list of conditions and the following disclaimer.
10//
11// - Redistribution in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the
14// distribution.
15//
16// - Neither the name of Sun Microsystems or the names of contributors may
17// be used to endorse or promote products derived from this software without
18// specific prior written permission.
19//
20// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24// COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31// OF THE POSSIBILITY OF SUCH DAMAGE.
32
33// The original source code covered by the above license above has been modified
34// significantly by Google Inc.
35// Copyright 2012 the V8 project authors. All rights reserved.
36
37#ifndef V8_ARM_ASSEMBLER_ARM_INL_H_
38#define V8_ARM_ASSEMBLER_ARM_INL_H_
39
40#include "arm/assembler-arm.h"
41
42#include "cpu.h"
43#include "debug.h"
44
45
46namespace v8 {
47namespace internal {
48
49
50int Register::NumAllocatableRegisters() {
51  return kMaxNumAllocatableRegisters;
52}
53
54
55int DwVfpRegister::NumRegisters() {
56  return CpuFeatures::IsSupported(VFP32DREGS) ? 32 : 16;
57}
58
59
60int DwVfpRegister::NumAllocatableRegisters() {
61  return NumRegisters() - kNumReservedRegisters;
62}
63
64
65int DwVfpRegister::ToAllocationIndex(DwVfpRegister reg) {
66  ASSERT(!reg.is(kDoubleRegZero));
67  ASSERT(!reg.is(kScratchDoubleReg));
68  if (reg.code() > kDoubleRegZero.code()) {
69    return reg.code() - kNumReservedRegisters;
70  }
71  return reg.code();
72}
73
74
75DwVfpRegister DwVfpRegister::FromAllocationIndex(int index) {
76  ASSERT(index >= 0 && index < NumAllocatableRegisters());
77  ASSERT(kScratchDoubleReg.code() - kDoubleRegZero.code() ==
78         kNumReservedRegisters - 1);
79  if (index >= kDoubleRegZero.code()) {
80    return from_code(index + kNumReservedRegisters);
81  }
82  return from_code(index);
83}
84
85
86void RelocInfo::apply(intptr_t delta) {
87  if (RelocInfo::IsInternalReference(rmode_)) {
88    // absolute code pointer inside code object moves with the code object.
89    int32_t* p = reinterpret_cast<int32_t*>(pc_);
90    *p += delta;  // relocate entry
91  }
92  // We do not use pc relative addressing on ARM, so there is
93  // nothing else to do.
94}
95
96
97Address RelocInfo::target_address() {
98  ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
99  return Assembler::target_address_at(pc_);
100}
101
102
103Address RelocInfo::target_address_address() {
104  ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
105                              || rmode_ == EMBEDDED_OBJECT
106                              || rmode_ == EXTERNAL_REFERENCE);
107  return reinterpret_cast<Address>(Assembler::target_pointer_address_at(pc_));
108}
109
110
111int RelocInfo::target_address_size() {
112  return kPointerSize;
113}
114
115
116void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
117  ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
118  Assembler::set_target_address_at(pc_, target);
119  if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) {
120    Object* target_code = Code::GetCodeFromTargetAddress(target);
121    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
122        host(), this, HeapObject::cast(target_code));
123  }
124}
125
126
127Object* RelocInfo::target_object() {
128  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
129  return reinterpret_cast<Object*>(Assembler::target_pointer_at(pc_));
130}
131
132
133Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
134  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
135  return Handle<Object>(reinterpret_cast<Object**>(
136      Assembler::target_pointer_at(pc_)));
137}
138
139
140Object** RelocInfo::target_object_address() {
141  // Provide a "natural pointer" to the embedded object,
142  // which can be de-referenced during heap iteration.
143  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
144  reconstructed_obj_ptr_ =
145      reinterpret_cast<Object*>(Assembler::target_pointer_at(pc_));
146  return &reconstructed_obj_ptr_;
147}
148
149
150void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
151  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
152  ASSERT(!target->IsConsString());
153  Assembler::set_target_pointer_at(pc_, reinterpret_cast<Address>(target));
154  if (mode == UPDATE_WRITE_BARRIER &&
155      host() != NULL &&
156      target->IsHeapObject()) {
157    host()->GetHeap()->incremental_marking()->RecordWrite(
158        host(), &Memory::Object_at(pc_), HeapObject::cast(target));
159  }
160}
161
162
163Address* RelocInfo::target_reference_address() {
164  ASSERT(rmode_ == EXTERNAL_REFERENCE);
165  reconstructed_adr_ptr_ = Assembler::target_address_at(pc_);
166  return &reconstructed_adr_ptr_;
167}
168
169
170Address RelocInfo::target_runtime_entry(Assembler* origin) {
171  ASSERT(IsRuntimeEntry(rmode_));
172  return target_address();
173}
174
175
176void RelocInfo::set_target_runtime_entry(Address target,
177                                         WriteBarrierMode mode) {
178  ASSERT(IsRuntimeEntry(rmode_));
179  if (target_address() != target) set_target_address(target, mode);
180}
181
182
183Handle<Cell> RelocInfo::target_cell_handle() {
184  ASSERT(rmode_ == RelocInfo::CELL);
185  Address address = Memory::Address_at(pc_);
186  return Handle<Cell>(reinterpret_cast<Cell**>(address));
187}
188
189
190Cell* RelocInfo::target_cell() {
191  ASSERT(rmode_ == RelocInfo::CELL);
192  return Cell::FromValueAddress(Memory::Address_at(pc_));
193}
194
195
196void RelocInfo::set_target_cell(Cell* cell, WriteBarrierMode mode) {
197  ASSERT(rmode_ == RelocInfo::CELL);
198  Address address = cell->address() + Cell::kValueOffset;
199  Memory::Address_at(pc_) = address;
200  if (mode == UPDATE_WRITE_BARRIER && host() != NULL) {
201    // TODO(1550) We are passing NULL as a slot because cell can never be on
202    // evacuation candidate.
203    host()->GetHeap()->incremental_marking()->RecordWrite(
204        host(), NULL, cell);
205  }
206}
207
208
209static const int kNoCodeAgeSequenceLength = 3;
210
211Code* RelocInfo::code_age_stub() {
212  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
213  return Code::GetCodeFromTargetAddress(
214      Memory::Address_at(pc_ + Assembler::kInstrSize *
215                         (kNoCodeAgeSequenceLength - 1)));
216}
217
218
219void RelocInfo::set_code_age_stub(Code* stub) {
220  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
221  Memory::Address_at(pc_ + Assembler::kInstrSize *
222                     (kNoCodeAgeSequenceLength - 1)) =
223      stub->instruction_start();
224}
225
226
227Address RelocInfo::call_address() {
228  // The 2 instructions offset assumes patched debug break slot or return
229  // sequence.
230  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
231         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
232  return Memory::Address_at(pc_ + 2 * Assembler::kInstrSize);
233}
234
235
236void RelocInfo::set_call_address(Address target) {
237  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
238         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
239  Memory::Address_at(pc_ + 2 * Assembler::kInstrSize) = target;
240  if (host() != NULL) {
241    Object* target_code = Code::GetCodeFromTargetAddress(target);
242    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
243        host(), this, HeapObject::cast(target_code));
244  }
245}
246
247
248Object* RelocInfo::call_object() {
249  return *call_object_address();
250}
251
252
253void RelocInfo::set_call_object(Object* target) {
254  *call_object_address() = target;
255}
256
257
258Object** RelocInfo::call_object_address() {
259  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
260         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
261  return reinterpret_cast<Object**>(pc_ + 2 * Assembler::kInstrSize);
262}
263
264
265bool RelocInfo::IsPatchedReturnSequence() {
266  Instr current_instr = Assembler::instr_at(pc_);
267  Instr next_instr = Assembler::instr_at(pc_ + Assembler::kInstrSize);
268  // A patched return sequence is:
269  //  ldr ip, [pc, #0]
270  //  blx ip
271  return ((current_instr & kLdrPCMask) == kLdrPCPattern)
272          && ((next_instr & kBlxRegMask) == kBlxRegPattern);
273}
274
275
276bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
277  Instr current_instr = Assembler::instr_at(pc_);
278  return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
279}
280
281
282void RelocInfo::Visit(ObjectVisitor* visitor) {
283  RelocInfo::Mode mode = rmode();
284  if (mode == RelocInfo::EMBEDDED_OBJECT) {
285    visitor->VisitEmbeddedPointer(this);
286  } else if (RelocInfo::IsCodeTarget(mode)) {
287    visitor->VisitCodeTarget(this);
288  } else if (mode == RelocInfo::CELL) {
289    visitor->VisitCell(this);
290  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
291    visitor->VisitExternalReference(this);
292  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
293    visitor->VisitCodeAgeSequence(this);
294#ifdef ENABLE_DEBUGGER_SUPPORT
295  // TODO(isolates): Get a cached isolate below.
296  } else if (((RelocInfo::IsJSReturn(mode) &&
297              IsPatchedReturnSequence()) ||
298             (RelocInfo::IsDebugBreakSlot(mode) &&
299              IsPatchedDebugBreakSlotSequence())) &&
300             Isolate::Current()->debug()->has_break_points()) {
301    visitor->VisitDebugTarget(this);
302#endif
303  } else if (RelocInfo::IsRuntimeEntry(mode)) {
304    visitor->VisitRuntimeEntry(this);
305  }
306}
307
308
309template<typename StaticVisitor>
310void RelocInfo::Visit(Heap* heap) {
311  RelocInfo::Mode mode = rmode();
312  if (mode == RelocInfo::EMBEDDED_OBJECT) {
313    StaticVisitor::VisitEmbeddedPointer(heap, this);
314  } else if (RelocInfo::IsCodeTarget(mode)) {
315    StaticVisitor::VisitCodeTarget(heap, this);
316  } else if (mode == RelocInfo::CELL) {
317    StaticVisitor::VisitCell(heap, this);
318  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
319    StaticVisitor::VisitExternalReference(this);
320  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
321    StaticVisitor::VisitCodeAgeSequence(heap, this);
322#ifdef ENABLE_DEBUGGER_SUPPORT
323  } else if (heap->isolate()->debug()->has_break_points() &&
324             ((RelocInfo::IsJSReturn(mode) &&
325              IsPatchedReturnSequence()) ||
326             (RelocInfo::IsDebugBreakSlot(mode) &&
327              IsPatchedDebugBreakSlotSequence()))) {
328    StaticVisitor::VisitDebugTarget(heap, this);
329#endif
330  } else if (RelocInfo::IsRuntimeEntry(mode)) {
331    StaticVisitor::VisitRuntimeEntry(this);
332  }
333}
334
335
336Operand::Operand(int32_t immediate, RelocInfo::Mode rmode)  {
337  rm_ = no_reg;
338  imm32_ = immediate;
339  rmode_ = rmode;
340}
341
342
343Operand::Operand(const ExternalReference& f)  {
344  rm_ = no_reg;
345  imm32_ = reinterpret_cast<int32_t>(f.address());
346  rmode_ = RelocInfo::EXTERNAL_REFERENCE;
347}
348
349
350Operand::Operand(Smi* value) {
351  rm_ = no_reg;
352  imm32_ =  reinterpret_cast<intptr_t>(value);
353  rmode_ = RelocInfo::NONE32;
354}
355
356
357Operand::Operand(Register rm) {
358  rm_ = rm;
359  rs_ = no_reg;
360  shift_op_ = LSL;
361  shift_imm_ = 0;
362}
363
364
365bool Operand::is_reg() const {
366  return rm_.is_valid() &&
367         rs_.is(no_reg) &&
368         shift_op_ == LSL &&
369         shift_imm_ == 0;
370}
371
372
373void Assembler::CheckBuffer() {
374  if (buffer_space() <= kGap) {
375    GrowBuffer();
376  }
377  if (pc_offset() >= next_buffer_check_) {
378    CheckConstPool(false, true);
379  }
380}
381
382
383void Assembler::emit(Instr x) {
384  CheckBuffer();
385  *reinterpret_cast<Instr*>(pc_) = x;
386  pc_ += kInstrSize;
387}
388
389
390Address Assembler::target_pointer_address_at(Address pc) {
391  Address target_pc = pc;
392  Instr instr = Memory::int32_at(target_pc);
393  // If we have a bx instruction, the instruction before the bx is
394  // what we need to patch.
395  static const int32_t kBxInstMask = 0x0ffffff0;
396  static const int32_t kBxInstPattern = 0x012fff10;
397  if ((instr & kBxInstMask) == kBxInstPattern) {
398    target_pc -= kInstrSize;
399    instr = Memory::int32_at(target_pc);
400  }
401
402  // With a blx instruction, the instruction before is what needs to be patched.
403  if ((instr & kBlxRegMask) == kBlxRegPattern) {
404    target_pc -= kInstrSize;
405    instr = Memory::int32_at(target_pc);
406  }
407
408  ASSERT(IsLdrPcImmediateOffset(instr));
409  int offset = instr & 0xfff;  // offset_12 is unsigned
410  if ((instr & (1 << 23)) == 0) offset = -offset;  // U bit defines offset sign
411  // Verify that the constant pool comes after the instruction referencing it.
412  ASSERT(offset >= -4);
413  return target_pc + offset + 8;
414}
415
416
417Address Assembler::target_pointer_at(Address pc) {
418  if (IsMovW(Memory::int32_at(pc))) {
419    ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
420    Instruction* instr = Instruction::At(pc);
421    Instruction* next_instr = Instruction::At(pc + kInstrSize);
422    return reinterpret_cast<Address>(
423        (next_instr->ImmedMovwMovtValue() << 16) |
424        instr->ImmedMovwMovtValue());
425  }
426  return Memory::Address_at(target_pointer_address_at(pc));
427}
428
429
430Address Assembler::target_address_from_return_address(Address pc) {
431  // Returns the address of the call target from the return address that will
432  // be returned to after a call.
433  // Call sequence on V7 or later is :
434  //  movw  ip, #... @ call address low 16
435  //  movt  ip, #... @ call address high 16
436  //  blx   ip
437  //                      @ return address
438  // Or pre-V7 or cases that need frequent patching:
439  //  ldr   ip, [pc, #...] @ call address
440  //  blx   ip
441  //                      @ return address
442  Address candidate = pc - 2 * Assembler::kInstrSize;
443  Instr candidate_instr(Memory::int32_at(candidate));
444  if (IsLdrPcImmediateOffset(candidate_instr)) {
445    return candidate;
446  }
447  candidate = pc - 3 * Assembler::kInstrSize;
448  ASSERT(IsMovW(Memory::int32_at(candidate)) &&
449         IsMovT(Memory::int32_at(candidate + kInstrSize)));
450  return candidate;
451}
452
453
454Address Assembler::return_address_from_call_start(Address pc) {
455  if (IsLdrPcImmediateOffset(Memory::int32_at(pc))) {
456    return pc + kInstrSize * 2;
457  } else {
458    ASSERT(IsMovW(Memory::int32_at(pc)));
459    ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
460    return pc + kInstrSize * 3;
461  }
462}
463
464
465void Assembler::deserialization_set_special_target_at(
466    Address constant_pool_entry, Address target) {
467  Memory::Address_at(constant_pool_entry) = target;
468}
469
470
471void Assembler::set_external_target_at(Address constant_pool_entry,
472                                       Address target) {
473  Memory::Address_at(constant_pool_entry) = target;
474}
475
476
477static Instr EncodeMovwImmediate(uint32_t immediate) {
478  ASSERT(immediate < 0x10000);
479  return ((immediate & 0xf000) << 4) | (immediate & 0xfff);
480}
481
482
483void Assembler::set_target_pointer_at(Address pc, Address target) {
484  if (IsMovW(Memory::int32_at(pc))) {
485    ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
486    uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
487    uint32_t immediate = reinterpret_cast<uint32_t>(target);
488    uint32_t intermediate = instr_ptr[0];
489    intermediate &= ~EncodeMovwImmediate(0xFFFF);
490    intermediate |= EncodeMovwImmediate(immediate & 0xFFFF);
491    instr_ptr[0] = intermediate;
492    intermediate = instr_ptr[1];
493    intermediate &= ~EncodeMovwImmediate(0xFFFF);
494    intermediate |= EncodeMovwImmediate(immediate >> 16);
495    instr_ptr[1] = intermediate;
496    ASSERT(IsMovW(Memory::int32_at(pc)));
497    ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
498    CPU::FlushICache(pc, 2 * kInstrSize);
499  } else {
500    ASSERT(IsLdrPcImmediateOffset(Memory::int32_at(pc)));
501    Memory::Address_at(target_pointer_address_at(pc)) = target;
502    // Intuitively, we would think it is necessary to always flush the
503    // instruction cache after patching a target address in the code as follows:
504    //   CPU::FlushICache(pc, sizeof(target));
505    // However, on ARM, no instruction is actually patched in the case
506    // of embedded constants of the form:
507    // ldr   ip, [pc, #...]
508    // since the instruction accessing this address in the constant pool remains
509    // unchanged.
510  }
511}
512
513
514Address Assembler::target_address_at(Address pc) {
515  return target_pointer_at(pc);
516}
517
518
519void Assembler::set_target_address_at(Address pc, Address target) {
520  set_target_pointer_at(pc, target);
521}
522
523
524} }  // namespace v8::internal
525
526#endif  // V8_ARM_ASSEMBLER_ARM_INL_H_
527