1// Copyright (c) 1994-2006 Sun Microsystems Inc.
2// All Rights Reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions
6// are met:
7//
8// - Redistributions of source code must retain the above copyright notice,
9// this list of conditions and the following disclaimer.
10//
11// - Redistribution in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the
14// distribution.
15//
16// - Neither the name of Sun Microsystems or the names of contributors may
17// be used to endorse or promote products derived from this software without
18// specific prior written permission.
19//
20// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24// COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31// OF THE POSSIBILITY OF SUCH DAMAGE.
32
33// The original source code covered by the above license above has been modified
34// significantly by Google Inc.
35// Copyright 2012 the V8 project authors. All rights reserved.
36
37#ifndef V8_ARM_ASSEMBLER_ARM_INL_H_
38#define V8_ARM_ASSEMBLER_ARM_INL_H_
39
40#include "src/arm/assembler-arm.h"
41
42#include "src/assembler.h"
43#include "src/debug.h"
44
45
46namespace v8 {
47namespace internal {
48
49
50bool CpuFeatures::SupportsCrankshaft() { return IsSupported(VFP3); }
51
52
53int Register::NumAllocatableRegisters() {
54  return kMaxNumAllocatableRegisters;
55}
56
57
58int DwVfpRegister::NumRegisters() {
59  return CpuFeatures::IsSupported(VFP32DREGS) ? 32 : 16;
60}
61
62
63int DwVfpRegister::NumReservedRegisters() {
64  return kNumReservedRegisters;
65}
66
67
68int DwVfpRegister::NumAllocatableRegisters() {
69  return NumRegisters() - kNumReservedRegisters;
70}
71
72
73int DwVfpRegister::ToAllocationIndex(DwVfpRegister reg) {
74  DCHECK(!reg.is(kDoubleRegZero));
75  DCHECK(!reg.is(kScratchDoubleReg));
76  if (reg.code() > kDoubleRegZero.code()) {
77    return reg.code() - kNumReservedRegisters;
78  }
79  return reg.code();
80}
81
82
83DwVfpRegister DwVfpRegister::FromAllocationIndex(int index) {
84  DCHECK(index >= 0 && index < NumAllocatableRegisters());
85  DCHECK(kScratchDoubleReg.code() - kDoubleRegZero.code() ==
86         kNumReservedRegisters - 1);
87  if (index >= kDoubleRegZero.code()) {
88    return from_code(index + kNumReservedRegisters);
89  }
90  return from_code(index);
91}
92
93
94void RelocInfo::apply(intptr_t delta, ICacheFlushMode icache_flush_mode) {
95  if (RelocInfo::IsInternalReference(rmode_)) {
96    // absolute code pointer inside code object moves with the code object.
97    int32_t* p = reinterpret_cast<int32_t*>(pc_);
98    *p += delta;  // relocate entry
99  }
100  // We do not use pc relative addressing on ARM, so there is
101  // nothing else to do.
102}
103
104
105Address RelocInfo::target_address() {
106  DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
107  return Assembler::target_address_at(pc_, host_);
108}
109
110
111Address RelocInfo::target_address_address() {
112  DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
113                              || rmode_ == EMBEDDED_OBJECT
114                              || rmode_ == EXTERNAL_REFERENCE);
115  if (FLAG_enable_ool_constant_pool ||
116      Assembler::IsMovW(Memory::int32_at(pc_))) {
117    // We return the PC for ool constant pool since this function is used by the
118    // serializerer and expects the address to reside within the code object.
119    return reinterpret_cast<Address>(pc_);
120  } else {
121    DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc_)));
122    return constant_pool_entry_address();
123  }
124}
125
126
127Address RelocInfo::constant_pool_entry_address() {
128  DCHECK(IsInConstantPool());
129  return Assembler::constant_pool_entry_address(pc_, host_->constant_pool());
130}
131
132
133int RelocInfo::target_address_size() {
134  return kPointerSize;
135}
136
137
138void RelocInfo::set_target_address(Address target,
139                                   WriteBarrierMode write_barrier_mode,
140                                   ICacheFlushMode icache_flush_mode) {
141  DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
142  Assembler::set_target_address_at(pc_, host_, target, icache_flush_mode);
143  if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
144      host() != NULL && IsCodeTarget(rmode_)) {
145    Object* target_code = Code::GetCodeFromTargetAddress(target);
146    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
147        host(), this, HeapObject::cast(target_code));
148  }
149}
150
151
152Object* RelocInfo::target_object() {
153  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
154  return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
155}
156
157
158Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
159  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
160  return Handle<Object>(reinterpret_cast<Object**>(
161      Assembler::target_address_at(pc_, host_)));
162}
163
164
165void RelocInfo::set_target_object(Object* target,
166                                  WriteBarrierMode write_barrier_mode,
167                                  ICacheFlushMode icache_flush_mode) {
168  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
169  Assembler::set_target_address_at(pc_, host_,
170                                   reinterpret_cast<Address>(target),
171                                   icache_flush_mode);
172  if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
173      host() != NULL &&
174      target->IsHeapObject()) {
175    host()->GetHeap()->incremental_marking()->RecordWrite(
176        host(), &Memory::Object_at(pc_), HeapObject::cast(target));
177  }
178}
179
180
181Address RelocInfo::target_reference() {
182  DCHECK(rmode_ == EXTERNAL_REFERENCE);
183  return Assembler::target_address_at(pc_, host_);
184}
185
186
187Address RelocInfo::target_runtime_entry(Assembler* origin) {
188  DCHECK(IsRuntimeEntry(rmode_));
189  return target_address();
190}
191
192
193void RelocInfo::set_target_runtime_entry(Address target,
194                                         WriteBarrierMode write_barrier_mode,
195                                         ICacheFlushMode icache_flush_mode) {
196  DCHECK(IsRuntimeEntry(rmode_));
197  if (target_address() != target)
198    set_target_address(target, write_barrier_mode, icache_flush_mode);
199}
200
201
202Handle<Cell> RelocInfo::target_cell_handle() {
203  DCHECK(rmode_ == RelocInfo::CELL);
204  Address address = Memory::Address_at(pc_);
205  return Handle<Cell>(reinterpret_cast<Cell**>(address));
206}
207
208
209Cell* RelocInfo::target_cell() {
210  DCHECK(rmode_ == RelocInfo::CELL);
211  return Cell::FromValueAddress(Memory::Address_at(pc_));
212}
213
214
215void RelocInfo::set_target_cell(Cell* cell,
216                                WriteBarrierMode write_barrier_mode,
217                                ICacheFlushMode icache_flush_mode) {
218  DCHECK(rmode_ == RelocInfo::CELL);
219  Address address = cell->address() + Cell::kValueOffset;
220  Memory::Address_at(pc_) = address;
221  if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
222    // TODO(1550) We are passing NULL as a slot because cell can never be on
223    // evacuation candidate.
224    host()->GetHeap()->incremental_marking()->RecordWrite(
225        host(), NULL, cell);
226  }
227}
228
229
230static const int kNoCodeAgeSequenceLength = 3 * Assembler::kInstrSize;
231
232
233Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
234  UNREACHABLE();  // This should never be reached on Arm.
235  return Handle<Object>();
236}
237
238
239Code* RelocInfo::code_age_stub() {
240  DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
241  return Code::GetCodeFromTargetAddress(
242      Memory::Address_at(pc_ +
243                         (kNoCodeAgeSequenceLength - Assembler::kInstrSize)));
244}
245
246
247void RelocInfo::set_code_age_stub(Code* stub,
248                                  ICacheFlushMode icache_flush_mode) {
249  DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
250  Memory::Address_at(pc_ +
251                     (kNoCodeAgeSequenceLength - Assembler::kInstrSize)) =
252      stub->instruction_start();
253}
254
255
256Address RelocInfo::call_address() {
257  // The 2 instructions offset assumes patched debug break slot or return
258  // sequence.
259  DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
260         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
261  return Memory::Address_at(pc_ + 2 * Assembler::kInstrSize);
262}
263
264
265void RelocInfo::set_call_address(Address target) {
266  DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
267         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
268  Memory::Address_at(pc_ + 2 * Assembler::kInstrSize) = target;
269  if (host() != NULL) {
270    Object* target_code = Code::GetCodeFromTargetAddress(target);
271    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
272        host(), this, HeapObject::cast(target_code));
273  }
274}
275
276
277Object* RelocInfo::call_object() {
278  return *call_object_address();
279}
280
281
282void RelocInfo::set_call_object(Object* target) {
283  *call_object_address() = target;
284}
285
286
287Object** RelocInfo::call_object_address() {
288  DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
289         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
290  return reinterpret_cast<Object**>(pc_ + 2 * Assembler::kInstrSize);
291}
292
293
294void RelocInfo::WipeOut() {
295  DCHECK(IsEmbeddedObject(rmode_) ||
296         IsCodeTarget(rmode_) ||
297         IsRuntimeEntry(rmode_) ||
298         IsExternalReference(rmode_));
299  Assembler::set_target_address_at(pc_, host_, NULL);
300}
301
302
303bool RelocInfo::IsPatchedReturnSequence() {
304  Instr current_instr = Assembler::instr_at(pc_);
305  Instr next_instr = Assembler::instr_at(pc_ + Assembler::kInstrSize);
306  // A patched return sequence is:
307  //  ldr ip, [pc, #0]
308  //  blx ip
309  return Assembler::IsLdrPcImmediateOffset(current_instr) &&
310         Assembler::IsBlxReg(next_instr);
311}
312
313
314bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
315  Instr current_instr = Assembler::instr_at(pc_);
316  return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
317}
318
319
320void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
321  RelocInfo::Mode mode = rmode();
322  if (mode == RelocInfo::EMBEDDED_OBJECT) {
323    visitor->VisitEmbeddedPointer(this);
324  } else if (RelocInfo::IsCodeTarget(mode)) {
325    visitor->VisitCodeTarget(this);
326  } else if (mode == RelocInfo::CELL) {
327    visitor->VisitCell(this);
328  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
329    visitor->VisitExternalReference(this);
330  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
331    visitor->VisitCodeAgeSequence(this);
332  } else if (((RelocInfo::IsJSReturn(mode) &&
333              IsPatchedReturnSequence()) ||
334             (RelocInfo::IsDebugBreakSlot(mode) &&
335              IsPatchedDebugBreakSlotSequence())) &&
336             isolate->debug()->has_break_points()) {
337    visitor->VisitDebugTarget(this);
338  } else if (RelocInfo::IsRuntimeEntry(mode)) {
339    visitor->VisitRuntimeEntry(this);
340  }
341}
342
343
344template<typename StaticVisitor>
345void RelocInfo::Visit(Heap* heap) {
346  RelocInfo::Mode mode = rmode();
347  if (mode == RelocInfo::EMBEDDED_OBJECT) {
348    StaticVisitor::VisitEmbeddedPointer(heap, this);
349  } else if (RelocInfo::IsCodeTarget(mode)) {
350    StaticVisitor::VisitCodeTarget(heap, this);
351  } else if (mode == RelocInfo::CELL) {
352    StaticVisitor::VisitCell(heap, this);
353  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
354    StaticVisitor::VisitExternalReference(this);
355  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
356    StaticVisitor::VisitCodeAgeSequence(heap, this);
357  } else if (heap->isolate()->debug()->has_break_points() &&
358             ((RelocInfo::IsJSReturn(mode) &&
359              IsPatchedReturnSequence()) ||
360             (RelocInfo::IsDebugBreakSlot(mode) &&
361              IsPatchedDebugBreakSlotSequence()))) {
362    StaticVisitor::VisitDebugTarget(heap, this);
363  } else if (RelocInfo::IsRuntimeEntry(mode)) {
364    StaticVisitor::VisitRuntimeEntry(this);
365  }
366}
367
368
369Operand::Operand(int32_t immediate, RelocInfo::Mode rmode)  {
370  rm_ = no_reg;
371  imm32_ = immediate;
372  rmode_ = rmode;
373}
374
375
376Operand::Operand(const ExternalReference& f)  {
377  rm_ = no_reg;
378  imm32_ = reinterpret_cast<int32_t>(f.address());
379  rmode_ = RelocInfo::EXTERNAL_REFERENCE;
380}
381
382
383Operand::Operand(Smi* value) {
384  rm_ = no_reg;
385  imm32_ =  reinterpret_cast<intptr_t>(value);
386  rmode_ = RelocInfo::NONE32;
387}
388
389
390Operand::Operand(Register rm) {
391  rm_ = rm;
392  rs_ = no_reg;
393  shift_op_ = LSL;
394  shift_imm_ = 0;
395}
396
397
398bool Operand::is_reg() const {
399  return rm_.is_valid() &&
400         rs_.is(no_reg) &&
401         shift_op_ == LSL &&
402         shift_imm_ == 0;
403}
404
405
406void Assembler::CheckBuffer() {
407  if (buffer_space() <= kGap) {
408    GrowBuffer();
409  }
410  if (pc_offset() >= next_buffer_check_) {
411    CheckConstPool(false, true);
412  }
413}
414
415
416void Assembler::emit(Instr x) {
417  CheckBuffer();
418  *reinterpret_cast<Instr*>(pc_) = x;
419  pc_ += kInstrSize;
420}
421
422
423Address Assembler::target_address_from_return_address(Address pc) {
424  // Returns the address of the call target from the return address that will
425  // be returned to after a call.
426  // Call sequence on V7 or later is:
427  //  movw  ip, #... @ call address low 16
428  //  movt  ip, #... @ call address high 16
429  //  blx   ip
430  //                      @ return address
431  // For V6 when the constant pool is unavailable, it is:
432  //  mov  ip, #...     @ call address low 8
433  //  orr  ip, ip, #... @ call address 2nd 8
434  //  orr  ip, ip, #... @ call address 3rd 8
435  //  orr  ip, ip, #... @ call address high 8
436  //  blx   ip
437  //                      @ return address
438  // In cases that need frequent patching, the address is in the
439  // constant pool.  It could be a small constant pool load:
440  //  ldr   ip, [pc / pp, #...] @ call address
441  //  blx   ip
442  //                      @ return address
443  // Or an extended constant pool load (ARMv7):
444  //  movw  ip, #...
445  //  movt  ip, #...
446  //  ldr   ip, [pc, ip]  @ call address
447  //  blx   ip
448  //                      @ return address
449  // Or an extended constant pool load (ARMv6):
450  //  mov  ip, #...
451  //  orr  ip, ip, #...
452  //  orr  ip, ip, #...
453  //  orr  ip, ip, #...
454  //  ldr   ip, [pc, ip]  @ call address
455  //  blx   ip
456  //                      @ return address
457  Address candidate = pc - 2 * Assembler::kInstrSize;
458  Instr candidate_instr(Memory::int32_at(candidate));
459  if (IsLdrPcImmediateOffset(candidate_instr) |
460      IsLdrPpImmediateOffset(candidate_instr)) {
461    return candidate;
462  } else {
463    if (IsLdrPpRegOffset(candidate_instr)) {
464      candidate -= Assembler::kInstrSize;
465    }
466    if (CpuFeatures::IsSupported(ARMv7)) {
467      candidate -= 1 * Assembler::kInstrSize;
468      DCHECK(IsMovW(Memory::int32_at(candidate)) &&
469             IsMovT(Memory::int32_at(candidate + Assembler::kInstrSize)));
470    } else {
471      candidate -= 3 * Assembler::kInstrSize;
472      DCHECK(
473          IsMovImmed(Memory::int32_at(candidate)) &&
474          IsOrrImmed(Memory::int32_at(candidate + Assembler::kInstrSize)) &&
475          IsOrrImmed(Memory::int32_at(candidate + 2 * Assembler::kInstrSize)) &&
476          IsOrrImmed(Memory::int32_at(candidate + 3 * Assembler::kInstrSize)));
477    }
478    return candidate;
479  }
480}
481
482
483Address Assembler::break_address_from_return_address(Address pc) {
484  return pc - Assembler::kPatchDebugBreakSlotReturnOffset;
485}
486
487
488Address Assembler::return_address_from_call_start(Address pc) {
489  if (IsLdrPcImmediateOffset(Memory::int32_at(pc)) |
490      IsLdrPpImmediateOffset(Memory::int32_at(pc))) {
491    // Load from constant pool, small section.
492    return pc + kInstrSize * 2;
493  } else {
494    if (CpuFeatures::IsSupported(ARMv7)) {
495      DCHECK(IsMovW(Memory::int32_at(pc)));
496      DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
497      if (IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize))) {
498        // Load from constant pool, extended section.
499        return pc + kInstrSize * 4;
500      } else {
501        // A movw / movt load immediate.
502        return pc + kInstrSize * 3;
503      }
504    } else {
505      DCHECK(IsMovImmed(Memory::int32_at(pc)));
506      DCHECK(IsOrrImmed(Memory::int32_at(pc + kInstrSize)));
507      DCHECK(IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)));
508      DCHECK(IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
509      if (IsLdrPpRegOffset(Memory::int32_at(pc + 4 * kInstrSize))) {
510        // Load from constant pool, extended section.
511        return pc + kInstrSize * 6;
512      } else {
513        // A mov / orr load immediate.
514        return pc + kInstrSize * 5;
515      }
516    }
517  }
518}
519
520
521void Assembler::deserialization_set_special_target_at(
522    Address constant_pool_entry, Code* code, Address target) {
523  if (FLAG_enable_ool_constant_pool) {
524    set_target_address_at(constant_pool_entry, code, target);
525  } else {
526    Memory::Address_at(constant_pool_entry) = target;
527  }
528}
529
530
531bool Assembler::is_constant_pool_load(Address pc) {
532  if (CpuFeatures::IsSupported(ARMv7)) {
533    return !Assembler::IsMovW(Memory::int32_at(pc)) ||
534           (FLAG_enable_ool_constant_pool &&
535            Assembler::IsLdrPpRegOffset(
536                Memory::int32_at(pc + 2 * Assembler::kInstrSize)));
537  } else {
538    return !Assembler::IsMovImmed(Memory::int32_at(pc)) ||
539           (FLAG_enable_ool_constant_pool &&
540            Assembler::IsLdrPpRegOffset(
541                Memory::int32_at(pc + 4 * Assembler::kInstrSize)));
542  }
543}
544
545
546Address Assembler::constant_pool_entry_address(
547    Address pc, ConstantPoolArray* constant_pool) {
548  if (FLAG_enable_ool_constant_pool) {
549    DCHECK(constant_pool != NULL);
550    int cp_offset;
551    if (!CpuFeatures::IsSupported(ARMv7) && IsMovImmed(Memory::int32_at(pc))) {
552      DCHECK(IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
553             IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
554             IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)) &&
555             IsLdrPpRegOffset(Memory::int32_at(pc + 4 * kInstrSize)));
556      // This is an extended constant pool lookup (ARMv6).
557      Instr mov_instr = instr_at(pc);
558      Instr orr_instr_1 = instr_at(pc + kInstrSize);
559      Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize);
560      Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize);
561      cp_offset = DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) |
562                  DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3);
563    } else if (IsMovW(Memory::int32_at(pc))) {
564      DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)) &&
565             IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize)));
566      // This is an extended constant pool lookup (ARMv7).
567      Instruction* movw_instr = Instruction::At(pc);
568      Instruction* movt_instr = Instruction::At(pc + kInstrSize);
569      cp_offset = (movt_instr->ImmedMovwMovtValue() << 16) |
570                  movw_instr->ImmedMovwMovtValue();
571    } else {
572      // This is a small constant pool lookup.
573      DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(pc)));
574      cp_offset = GetLdrRegisterImmediateOffset(Memory::int32_at(pc));
575    }
576    return reinterpret_cast<Address>(constant_pool) + cp_offset;
577  } else {
578    DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc)));
579    Instr instr = Memory::int32_at(pc);
580    return pc + GetLdrRegisterImmediateOffset(instr) + kPcLoadDelta;
581  }
582}
583
584
585Address Assembler::target_address_at(Address pc,
586                                     ConstantPoolArray* constant_pool) {
587  if (is_constant_pool_load(pc)) {
588    // This is a constant pool lookup. Return the value in the constant pool.
589    return Memory::Address_at(constant_pool_entry_address(pc, constant_pool));
590  } else if (CpuFeatures::IsSupported(ARMv7)) {
591    // This is an movw / movt immediate load. Return the immediate.
592    DCHECK(IsMovW(Memory::int32_at(pc)) &&
593           IsMovT(Memory::int32_at(pc + kInstrSize)));
594    Instruction* movw_instr = Instruction::At(pc);
595    Instruction* movt_instr = Instruction::At(pc + kInstrSize);
596    return reinterpret_cast<Address>(
597        (movt_instr->ImmedMovwMovtValue() << 16) |
598         movw_instr->ImmedMovwMovtValue());
599  } else {
600    // This is an mov / orr immediate load. Return the immediate.
601    DCHECK(IsMovImmed(Memory::int32_at(pc)) &&
602           IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
603           IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
604           IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
605    Instr mov_instr = instr_at(pc);
606    Instr orr_instr_1 = instr_at(pc + kInstrSize);
607    Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize);
608    Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize);
609    Address ret = reinterpret_cast<Address>(
610        DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) |
611        DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3));
612    return ret;
613  }
614}
615
616
617void Assembler::set_target_address_at(Address pc,
618                                      ConstantPoolArray* constant_pool,
619                                      Address target,
620                                      ICacheFlushMode icache_flush_mode) {
621  if (is_constant_pool_load(pc)) {
622    // This is a constant pool lookup. Update the entry in the constant pool.
623    Memory::Address_at(constant_pool_entry_address(pc, constant_pool)) = target;
624    // Intuitively, we would think it is necessary to always flush the
625    // instruction cache after patching a target address in the code as follows:
626    //   CpuFeatures::FlushICache(pc, sizeof(target));
627    // However, on ARM, no instruction is actually patched in the case
628    // of embedded constants of the form:
629    // ldr   ip, [pp, #...]
630    // since the instruction accessing this address in the constant pool remains
631    // unchanged.
632  } else if (CpuFeatures::IsSupported(ARMv7)) {
633    // This is an movw / movt immediate load. Patch the immediate embedded in
634    // the instructions.
635    DCHECK(IsMovW(Memory::int32_at(pc)));
636    DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
637    uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
638    uint32_t immediate = reinterpret_cast<uint32_t>(target);
639    instr_ptr[0] = PatchMovwImmediate(instr_ptr[0], immediate & 0xFFFF);
640    instr_ptr[1] = PatchMovwImmediate(instr_ptr[1], immediate >> 16);
641    DCHECK(IsMovW(Memory::int32_at(pc)));
642    DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
643    if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
644      CpuFeatures::FlushICache(pc, 2 * kInstrSize);
645    }
646  } else {
647    // This is an mov / orr immediate load. Patch the immediate embedded in
648    // the instructions.
649    DCHECK(IsMovImmed(Memory::int32_at(pc)) &&
650           IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
651           IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
652           IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
653    uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
654    uint32_t immediate = reinterpret_cast<uint32_t>(target);
655    instr_ptr[0] = PatchShiftImm(instr_ptr[0], immediate & kImm8Mask);
656    instr_ptr[1] = PatchShiftImm(instr_ptr[1], immediate & (kImm8Mask << 8));
657    instr_ptr[2] = PatchShiftImm(instr_ptr[2], immediate & (kImm8Mask << 16));
658    instr_ptr[3] = PatchShiftImm(instr_ptr[3], immediate & (kImm8Mask << 24));
659    DCHECK(IsMovImmed(Memory::int32_at(pc)) &&
660           IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
661           IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
662           IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
663    if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
664      CpuFeatures::FlushICache(pc, 4 * kInstrSize);
665    }
666  }
667}
668
669
670} }  // namespace v8::internal
671
672#endif  // V8_ARM_ASSEMBLER_ARM_INL_H_
673