1// Copyright (c) 1994-2006 Sun Microsystems Inc.
2// All Rights Reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions
6// are met:
7//
8// - Redistributions of source code must retain the above copyright notice,
9// this list of conditions and the following disclaimer.
10//
11// - Redistribution in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the
14// distribution.
15//
16// - Neither the name of Sun Microsystems or the names of contributors may
17// be used to endorse or promote products derived from this software without
18// specific prior written permission.
19//
20// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24// COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31// OF THE POSSIBILITY OF SUCH DAMAGE.
32
33// The original source code covered by the above license above has been modified
34// significantly by Google Inc.
35// Copyright 2012 the V8 project authors. All rights reserved.
36
37#ifndef V8_ARM_ASSEMBLER_ARM_INL_H_
38#define V8_ARM_ASSEMBLER_ARM_INL_H_
39
40#include "src/arm/assembler-arm.h"
41
42#include "src/assembler.h"
43#include "src/debug/debug.h"
44
45
46namespace v8 {
47namespace internal {
48
49
50bool CpuFeatures::SupportsCrankshaft() { return IsSupported(VFP3); }
51
52
53int DoubleRegister::NumRegisters() {
54  return CpuFeatures::IsSupported(VFP32DREGS) ? 32 : 16;
55}
56
57
58void RelocInfo::apply(intptr_t delta) {
59  if (RelocInfo::IsInternalReference(rmode_)) {
60    // absolute code pointer inside code object moves with the code object.
61    int32_t* p = reinterpret_cast<int32_t*>(pc_);
62    *p += delta;  // relocate entry
63  }
64  // We do not use pc relative addressing on ARM, so there is
65  // nothing else to do.
66}
67
68
69Address RelocInfo::target_address() {
70  DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
71  return Assembler::target_address_at(pc_, host_);
72}
73
74Address RelocInfo::target_address_address() {
75  DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
76                              || rmode_ == EMBEDDED_OBJECT
77                              || rmode_ == EXTERNAL_REFERENCE);
78  if (FLAG_enable_embedded_constant_pool ||
79      Assembler::IsMovW(Memory::int32_at(pc_))) {
80    // We return the PC for embedded constant pool since this function is used
81    // by the serializer and expects the address to reside within the code
82    // object.
83    return reinterpret_cast<Address>(pc_);
84  } else {
85    DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc_)));
86    return constant_pool_entry_address();
87  }
88}
89
90
91Address RelocInfo::constant_pool_entry_address() {
92  DCHECK(IsInConstantPool());
93  return Assembler::constant_pool_entry_address(pc_, host_->constant_pool());
94}
95
96
97int RelocInfo::target_address_size() {
98  return kPointerSize;
99}
100
101
102void RelocInfo::set_target_address(Address target,
103                                   WriteBarrierMode write_barrier_mode,
104                                   ICacheFlushMode icache_flush_mode) {
105  DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
106  Assembler::set_target_address_at(isolate_, pc_, host_, target,
107                                   icache_flush_mode);
108  if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
109      host() != NULL && IsCodeTarget(rmode_)) {
110    Object* target_code = Code::GetCodeFromTargetAddress(target);
111    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
112        host(), this, HeapObject::cast(target_code));
113  }
114}
115
116Object* RelocInfo::target_object() {
117  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
118  return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
119}
120
121
122Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
123  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
124  return Handle<Object>(reinterpret_cast<Object**>(
125      Assembler::target_address_at(pc_, host_)));
126}
127
128
129void RelocInfo::set_target_object(Object* target,
130                                  WriteBarrierMode write_barrier_mode,
131                                  ICacheFlushMode icache_flush_mode) {
132  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
133  Assembler::set_target_address_at(isolate_, pc_, host_,
134                                   reinterpret_cast<Address>(target),
135                                   icache_flush_mode);
136  if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
137      host() != NULL &&
138      target->IsHeapObject()) {
139    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
140        host(), this, HeapObject::cast(target));
141  }
142}
143
144
145Address RelocInfo::target_external_reference() {
146  DCHECK(rmode_ == EXTERNAL_REFERENCE);
147  return Assembler::target_address_at(pc_, host_);
148}
149
150
151Address RelocInfo::target_internal_reference() {
152  DCHECK(rmode_ == INTERNAL_REFERENCE);
153  return Memory::Address_at(pc_);
154}
155
156
157Address RelocInfo::target_internal_reference_address() {
158  DCHECK(rmode_ == INTERNAL_REFERENCE);
159  return reinterpret_cast<Address>(pc_);
160}
161
162
163Address RelocInfo::target_runtime_entry(Assembler* origin) {
164  DCHECK(IsRuntimeEntry(rmode_));
165  return target_address();
166}
167
168
169void RelocInfo::set_target_runtime_entry(Address target,
170                                         WriteBarrierMode write_barrier_mode,
171                                         ICacheFlushMode icache_flush_mode) {
172  DCHECK(IsRuntimeEntry(rmode_));
173  if (target_address() != target)
174    set_target_address(target, write_barrier_mode, icache_flush_mode);
175}
176
177
178Handle<Cell> RelocInfo::target_cell_handle() {
179  DCHECK(rmode_ == RelocInfo::CELL);
180  Address address = Memory::Address_at(pc_);
181  return Handle<Cell>(reinterpret_cast<Cell**>(address));
182}
183
184
185Cell* RelocInfo::target_cell() {
186  DCHECK(rmode_ == RelocInfo::CELL);
187  return Cell::FromValueAddress(Memory::Address_at(pc_));
188}
189
190
191void RelocInfo::set_target_cell(Cell* cell,
192                                WriteBarrierMode write_barrier_mode,
193                                ICacheFlushMode icache_flush_mode) {
194  DCHECK(rmode_ == RelocInfo::CELL);
195  Address address = cell->address() + Cell::kValueOffset;
196  Memory::Address_at(pc_) = address;
197  if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
198    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(host(), this,
199                                                                  cell);
200  }
201}
202
203
204static const int kNoCodeAgeSequenceLength = 3 * Assembler::kInstrSize;
205
206
207Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
208  UNREACHABLE();  // This should never be reached on Arm.
209  return Handle<Object>();
210}
211
212
213Code* RelocInfo::code_age_stub() {
214  DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
215  return Code::GetCodeFromTargetAddress(
216      Memory::Address_at(pc_ +
217                         (kNoCodeAgeSequenceLength - Assembler::kInstrSize)));
218}
219
220
221void RelocInfo::set_code_age_stub(Code* stub,
222                                  ICacheFlushMode icache_flush_mode) {
223  DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
224  Memory::Address_at(pc_ +
225                     (kNoCodeAgeSequenceLength - Assembler::kInstrSize)) =
226      stub->instruction_start();
227}
228
229
230Address RelocInfo::debug_call_address() {
231  // The 2 instructions offset assumes patched debug break slot or return
232  // sequence.
233  DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
234  return Memory::Address_at(pc_ + Assembler::kPatchDebugBreakSlotAddressOffset);
235}
236
237
238void RelocInfo::set_debug_call_address(Address target) {
239  DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
240  Memory::Address_at(pc_ + Assembler::kPatchDebugBreakSlotAddressOffset) =
241      target;
242  if (host() != NULL) {
243    Object* target_code = Code::GetCodeFromTargetAddress(target);
244    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
245        host(), this, HeapObject::cast(target_code));
246  }
247}
248
249
250void RelocInfo::WipeOut() {
251  DCHECK(IsEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
252         IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
253         IsInternalReference(rmode_));
254  if (IsInternalReference(rmode_)) {
255    Memory::Address_at(pc_) = NULL;
256  } else {
257    Assembler::set_target_address_at(isolate_, pc_, host_, NULL);
258  }
259}
260
261template <typename ObjectVisitor>
262void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
263  RelocInfo::Mode mode = rmode();
264  if (mode == RelocInfo::EMBEDDED_OBJECT) {
265    visitor->VisitEmbeddedPointer(this);
266  } else if (RelocInfo::IsCodeTarget(mode)) {
267    visitor->VisitCodeTarget(this);
268  } else if (mode == RelocInfo::CELL) {
269    visitor->VisitCell(this);
270  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
271    visitor->VisitExternalReference(this);
272  } else if (mode == RelocInfo::INTERNAL_REFERENCE) {
273    visitor->VisitInternalReference(this);
274  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
275    visitor->VisitCodeAgeSequence(this);
276  } else if (RelocInfo::IsDebugBreakSlot(mode) &&
277             IsPatchedDebugBreakSlotSequence()) {
278    visitor->VisitDebugTarget(this);
279  } else if (RelocInfo::IsRuntimeEntry(mode)) {
280    visitor->VisitRuntimeEntry(this);
281  }
282}
283
284
285template<typename StaticVisitor>
286void RelocInfo::Visit(Heap* heap) {
287  RelocInfo::Mode mode = rmode();
288  if (mode == RelocInfo::EMBEDDED_OBJECT) {
289    StaticVisitor::VisitEmbeddedPointer(heap, this);
290  } else if (RelocInfo::IsCodeTarget(mode)) {
291    StaticVisitor::VisitCodeTarget(heap, this);
292  } else if (mode == RelocInfo::CELL) {
293    StaticVisitor::VisitCell(heap, this);
294  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
295    StaticVisitor::VisitExternalReference(this);
296  } else if (mode == RelocInfo::INTERNAL_REFERENCE) {
297    StaticVisitor::VisitInternalReference(this);
298  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
299    StaticVisitor::VisitCodeAgeSequence(heap, this);
300  } else if (RelocInfo::IsDebugBreakSlot(mode) &&
301             IsPatchedDebugBreakSlotSequence()) {
302    StaticVisitor::VisitDebugTarget(heap, this);
303  } else if (RelocInfo::IsRuntimeEntry(mode)) {
304    StaticVisitor::VisitRuntimeEntry(this);
305  }
306}
307
308
309Operand::Operand(int32_t immediate, RelocInfo::Mode rmode)  {
310  rm_ = no_reg;
311  imm32_ = immediate;
312  rmode_ = rmode;
313}
314
315
316Operand::Operand(const ExternalReference& f)  {
317  rm_ = no_reg;
318  imm32_ = reinterpret_cast<int32_t>(f.address());
319  rmode_ = RelocInfo::EXTERNAL_REFERENCE;
320}
321
322
323Operand::Operand(Smi* value) {
324  rm_ = no_reg;
325  imm32_ =  reinterpret_cast<intptr_t>(value);
326  rmode_ = RelocInfo::NONE32;
327}
328
329
330Operand::Operand(Register rm) {
331  rm_ = rm;
332  rs_ = no_reg;
333  shift_op_ = LSL;
334  shift_imm_ = 0;
335}
336
337
338bool Operand::is_reg() const {
339  return rm_.is_valid() &&
340         rs_.is(no_reg) &&
341         shift_op_ == LSL &&
342         shift_imm_ == 0;
343}
344
345
346void Assembler::CheckBuffer() {
347  if (buffer_space() <= kGap) {
348    GrowBuffer();
349  }
350  MaybeCheckConstPool();
351}
352
353
354void Assembler::emit(Instr x) {
355  CheckBuffer();
356  *reinterpret_cast<Instr*>(pc_) = x;
357  pc_ += kInstrSize;
358}
359
360
361Address Assembler::target_address_from_return_address(Address pc) {
362  // Returns the address of the call target from the return address that will
363  // be returned to after a call.
364  // Call sequence on V7 or later is:
365  //  movw  ip, #... @ call address low 16
366  //  movt  ip, #... @ call address high 16
367  //  blx   ip
368  //                      @ return address
369  // For V6 when the constant pool is unavailable, it is:
370  //  mov  ip, #...     @ call address low 8
371  //  orr  ip, ip, #... @ call address 2nd 8
372  //  orr  ip, ip, #... @ call address 3rd 8
373  //  orr  ip, ip, #... @ call address high 8
374  //  blx   ip
375  //                      @ return address
376  // In cases that need frequent patching, the address is in the
377  // constant pool.  It could be a small constant pool load:
378  //  ldr   ip, [pc / pp, #...] @ call address
379  //  blx   ip
380  //                      @ return address
381  // Or an extended constant pool load (ARMv7):
382  //  movw  ip, #...
383  //  movt  ip, #...
384  //  ldr   ip, [pc, ip]  @ call address
385  //  blx   ip
386  //                      @ return address
387  // Or an extended constant pool load (ARMv6):
388  //  mov  ip, #...
389  //  orr  ip, ip, #...
390  //  orr  ip, ip, #...
391  //  orr  ip, ip, #...
392  //  ldr   ip, [pc, ip]  @ call address
393  //  blx   ip
394  //                      @ return address
395  Address candidate = pc - 2 * Assembler::kInstrSize;
396  Instr candidate_instr(Memory::int32_at(candidate));
397  if (IsLdrPcImmediateOffset(candidate_instr) |
398      IsLdrPpImmediateOffset(candidate_instr)) {
399    return candidate;
400  } else {
401    if (IsLdrPpRegOffset(candidate_instr)) {
402      candidate -= Assembler::kInstrSize;
403    }
404    if (CpuFeatures::IsSupported(ARMv7)) {
405      candidate -= 1 * Assembler::kInstrSize;
406      DCHECK(IsMovW(Memory::int32_at(candidate)) &&
407             IsMovT(Memory::int32_at(candidate + Assembler::kInstrSize)));
408    } else {
409      candidate -= 3 * Assembler::kInstrSize;
410      DCHECK(
411          IsMovImmed(Memory::int32_at(candidate)) &&
412          IsOrrImmed(Memory::int32_at(candidate + Assembler::kInstrSize)) &&
413          IsOrrImmed(Memory::int32_at(candidate + 2 * Assembler::kInstrSize)) &&
414          IsOrrImmed(Memory::int32_at(candidate + 3 * Assembler::kInstrSize)));
415    }
416    return candidate;
417  }
418}
419
420
421Address Assembler::return_address_from_call_start(Address pc) {
422  if (IsLdrPcImmediateOffset(Memory::int32_at(pc)) |
423      IsLdrPpImmediateOffset(Memory::int32_at(pc))) {
424    // Load from constant pool, small section.
425    return pc + kInstrSize * 2;
426  } else {
427    if (CpuFeatures::IsSupported(ARMv7)) {
428      DCHECK(IsMovW(Memory::int32_at(pc)));
429      DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
430      if (IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize))) {
431        // Load from constant pool, extended section.
432        return pc + kInstrSize * 4;
433      } else {
434        // A movw / movt load immediate.
435        return pc + kInstrSize * 3;
436      }
437    } else {
438      DCHECK(IsMovImmed(Memory::int32_at(pc)));
439      DCHECK(IsOrrImmed(Memory::int32_at(pc + kInstrSize)));
440      DCHECK(IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)));
441      DCHECK(IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
442      if (IsLdrPpRegOffset(Memory::int32_at(pc + 4 * kInstrSize))) {
443        // Load from constant pool, extended section.
444        return pc + kInstrSize * 6;
445      } else {
446        // A mov / orr load immediate.
447        return pc + kInstrSize * 5;
448      }
449    }
450  }
451}
452
453
454void Assembler::deserialization_set_special_target_at(
455    Isolate* isolate, Address constant_pool_entry, Code* code, Address target) {
456  if (FLAG_enable_embedded_constant_pool) {
457    set_target_address_at(isolate, constant_pool_entry, code, target);
458  } else {
459    Memory::Address_at(constant_pool_entry) = target;
460  }
461}
462
463
464void Assembler::deserialization_set_target_internal_reference_at(
465    Isolate* isolate, Address pc, Address target, RelocInfo::Mode mode) {
466  Memory::Address_at(pc) = target;
467}
468
469
470bool Assembler::is_constant_pool_load(Address pc) {
471  if (CpuFeatures::IsSupported(ARMv7)) {
472    return !Assembler::IsMovW(Memory::int32_at(pc)) ||
473           (FLAG_enable_embedded_constant_pool &&
474            Assembler::IsLdrPpRegOffset(
475                Memory::int32_at(pc + 2 * Assembler::kInstrSize)));
476  } else {
477    return !Assembler::IsMovImmed(Memory::int32_at(pc)) ||
478           (FLAG_enable_embedded_constant_pool &&
479            Assembler::IsLdrPpRegOffset(
480                Memory::int32_at(pc + 4 * Assembler::kInstrSize)));
481  }
482}
483
484
485Address Assembler::constant_pool_entry_address(Address pc,
486                                               Address constant_pool) {
487  if (FLAG_enable_embedded_constant_pool) {
488    DCHECK(constant_pool != NULL);
489    int cp_offset;
490    if (!CpuFeatures::IsSupported(ARMv7) && IsMovImmed(Memory::int32_at(pc))) {
491      DCHECK(IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
492             IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
493             IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)) &&
494             IsLdrPpRegOffset(Memory::int32_at(pc + 4 * kInstrSize)));
495      // This is an extended constant pool lookup (ARMv6).
496      Instr mov_instr = instr_at(pc);
497      Instr orr_instr_1 = instr_at(pc + kInstrSize);
498      Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize);
499      Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize);
500      cp_offset = DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) |
501                  DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3);
502    } else if (IsMovW(Memory::int32_at(pc))) {
503      DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)) &&
504             IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize)));
505      // This is an extended constant pool lookup (ARMv7).
506      Instruction* movw_instr = Instruction::At(pc);
507      Instruction* movt_instr = Instruction::At(pc + kInstrSize);
508      cp_offset = (movt_instr->ImmedMovwMovtValue() << 16) |
509                  movw_instr->ImmedMovwMovtValue();
510    } else {
511      // This is a small constant pool lookup.
512      DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(pc)));
513      cp_offset = GetLdrRegisterImmediateOffset(Memory::int32_at(pc));
514    }
515    return constant_pool + cp_offset;
516  } else {
517    DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc)));
518    Instr instr = Memory::int32_at(pc);
519    return pc + GetLdrRegisterImmediateOffset(instr) + kPcLoadDelta;
520  }
521}
522
523
524Address Assembler::target_address_at(Address pc, Address constant_pool) {
525  if (is_constant_pool_load(pc)) {
526    // This is a constant pool lookup. Return the value in the constant pool.
527    return Memory::Address_at(constant_pool_entry_address(pc, constant_pool));
528  } else if (CpuFeatures::IsSupported(ARMv7)) {
529    // This is an movw / movt immediate load. Return the immediate.
530    DCHECK(IsMovW(Memory::int32_at(pc)) &&
531           IsMovT(Memory::int32_at(pc + kInstrSize)));
532    Instruction* movw_instr = Instruction::At(pc);
533    Instruction* movt_instr = Instruction::At(pc + kInstrSize);
534    return reinterpret_cast<Address>(
535        (movt_instr->ImmedMovwMovtValue() << 16) |
536         movw_instr->ImmedMovwMovtValue());
537  } else {
538    // This is an mov / orr immediate load. Return the immediate.
539    DCHECK(IsMovImmed(Memory::int32_at(pc)) &&
540           IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
541           IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
542           IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
543    Instr mov_instr = instr_at(pc);
544    Instr orr_instr_1 = instr_at(pc + kInstrSize);
545    Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize);
546    Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize);
547    Address ret = reinterpret_cast<Address>(
548        DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) |
549        DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3));
550    return ret;
551  }
552}
553
554
555void Assembler::set_target_address_at(Isolate* isolate, Address pc,
556                                      Address constant_pool, Address target,
557                                      ICacheFlushMode icache_flush_mode) {
558  if (is_constant_pool_load(pc)) {
559    // This is a constant pool lookup. Update the entry in the constant pool.
560    Memory::Address_at(constant_pool_entry_address(pc, constant_pool)) = target;
561    // Intuitively, we would think it is necessary to always flush the
562    // instruction cache after patching a target address in the code as follows:
563    //   Assembler::FlushICache(isolate, pc, sizeof(target));
564    // However, on ARM, no instruction is actually patched in the case
565    // of embedded constants of the form:
566    // ldr   ip, [pp, #...]
567    // since the instruction accessing this address in the constant pool remains
568    // unchanged.
569  } else if (CpuFeatures::IsSupported(ARMv7)) {
570    // This is an movw / movt immediate load. Patch the immediate embedded in
571    // the instructions.
572    DCHECK(IsMovW(Memory::int32_at(pc)));
573    DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
574    uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
575    uint32_t immediate = reinterpret_cast<uint32_t>(target);
576    instr_ptr[0] = PatchMovwImmediate(instr_ptr[0], immediate & 0xFFFF);
577    instr_ptr[1] = PatchMovwImmediate(instr_ptr[1], immediate >> 16);
578    DCHECK(IsMovW(Memory::int32_at(pc)));
579    DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
580    if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
581      Assembler::FlushICache(isolate, pc, 2 * kInstrSize);
582    }
583  } else {
584    // This is an mov / orr immediate load. Patch the immediate embedded in
585    // the instructions.
586    DCHECK(IsMovImmed(Memory::int32_at(pc)) &&
587           IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
588           IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
589           IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
590    uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
591    uint32_t immediate = reinterpret_cast<uint32_t>(target);
592    instr_ptr[0] = PatchShiftImm(instr_ptr[0], immediate & kImm8Mask);
593    instr_ptr[1] = PatchShiftImm(instr_ptr[1], immediate & (kImm8Mask << 8));
594    instr_ptr[2] = PatchShiftImm(instr_ptr[2], immediate & (kImm8Mask << 16));
595    instr_ptr[3] = PatchShiftImm(instr_ptr[3], immediate & (kImm8Mask << 24));
596    DCHECK(IsMovImmed(Memory::int32_at(pc)) &&
597           IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
598           IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
599           IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
600    if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
601      Assembler::FlushICache(isolate, pc, 4 * kInstrSize);
602    }
603  }
604}
605
606
607}  // namespace internal
608}  // namespace v8
609
610#endif  // V8_ARM_ASSEMBLER_ARM_INL_H_
611