1// Copyright (c) 1994-2006 Sun Microsystems Inc.
2// All Rights Reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions
6// are met:
7//
8// - Redistributions of source code must retain the above copyright notice,
9// this list of conditions and the following disclaimer.
10//
11// - Redistribution in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the
14// distribution.
15//
16// - Neither the name of Sun Microsystems or the names of contributors may
17// be used to endorse or promote products derived from this software without
18// specific prior written permission.
19//
20// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24// COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31// OF THE POSSIBILITY OF SUCH DAMAGE.
32
33// The original source code covered by the above license above has been modified
34// significantly by Google Inc.
35// Copyright 2012 the V8 project authors. All rights reserved.
36
37#ifndef V8_ARM_ASSEMBLER_ARM_INL_H_
38#define V8_ARM_ASSEMBLER_ARM_INL_H_
39
40#include "src/arm/assembler-arm.h"
41
42#include "src/assembler.h"
43#include "src/debug/debug.h"
44
45
46namespace v8 {
47namespace internal {
48
49bool CpuFeatures::SupportsCrankshaft() { return true; }
50
51bool CpuFeatures::SupportsSimd128() { return false; }
52
53int DoubleRegister::NumRegisters() {
54  return CpuFeatures::IsSupported(VFP32DREGS) ? 32 : 16;
55}
56
57
58void RelocInfo::apply(intptr_t delta) {
59  if (RelocInfo::IsInternalReference(rmode_)) {
60    // absolute code pointer inside code object moves with the code object.
61    int32_t* p = reinterpret_cast<int32_t*>(pc_);
62    *p += delta;  // relocate entry
63  }
64  // We do not use pc relative addressing on ARM, so there is
65  // nothing else to do.
66}
67
68
69Address RelocInfo::target_address() {
70  DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
71  return Assembler::target_address_at(pc_, host_);
72}
73
74Address RelocInfo::target_address_address() {
75  DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
76                              || rmode_ == EMBEDDED_OBJECT
77                              || rmode_ == EXTERNAL_REFERENCE);
78  if (FLAG_enable_embedded_constant_pool ||
79      Assembler::IsMovW(Memory::int32_at(pc_))) {
80    // We return the PC for embedded constant pool since this function is used
81    // by the serializer and expects the address to reside within the code
82    // object.
83    return reinterpret_cast<Address>(pc_);
84  } else {
85    DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc_)));
86    return constant_pool_entry_address();
87  }
88}
89
90
91Address RelocInfo::constant_pool_entry_address() {
92  DCHECK(IsInConstantPool());
93  return Assembler::constant_pool_entry_address(pc_, host_->constant_pool());
94}
95
96
97int RelocInfo::target_address_size() {
98  return kPointerSize;
99}
100
101
102Object* RelocInfo::target_object() {
103  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
104  return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
105}
106
107
108Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
109  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
110  return Handle<Object>(reinterpret_cast<Object**>(
111      Assembler::target_address_at(pc_, host_)));
112}
113
114
115void RelocInfo::set_target_object(Object* target,
116                                  WriteBarrierMode write_barrier_mode,
117                                  ICacheFlushMode icache_flush_mode) {
118  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
119  Assembler::set_target_address_at(isolate_, pc_, host_,
120                                   reinterpret_cast<Address>(target),
121                                   icache_flush_mode);
122  if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
123      host() != NULL &&
124      target->IsHeapObject()) {
125    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
126        host(), this, HeapObject::cast(target));
127    host()->GetHeap()->RecordWriteIntoCode(host(), this, target);
128  }
129}
130
131
132Address RelocInfo::target_external_reference() {
133  DCHECK(rmode_ == EXTERNAL_REFERENCE);
134  return Assembler::target_address_at(pc_, host_);
135}
136
137
138Address RelocInfo::target_internal_reference() {
139  DCHECK(rmode_ == INTERNAL_REFERENCE);
140  return Memory::Address_at(pc_);
141}
142
143
144Address RelocInfo::target_internal_reference_address() {
145  DCHECK(rmode_ == INTERNAL_REFERENCE);
146  return reinterpret_cast<Address>(pc_);
147}
148
149
150Address RelocInfo::target_runtime_entry(Assembler* origin) {
151  DCHECK(IsRuntimeEntry(rmode_));
152  return target_address();
153}
154
155
156void RelocInfo::set_target_runtime_entry(Address target,
157                                         WriteBarrierMode write_barrier_mode,
158                                         ICacheFlushMode icache_flush_mode) {
159  DCHECK(IsRuntimeEntry(rmode_));
160  if (target_address() != target)
161    set_target_address(target, write_barrier_mode, icache_flush_mode);
162}
163
164
165Handle<Cell> RelocInfo::target_cell_handle() {
166  DCHECK(rmode_ == RelocInfo::CELL);
167  Address address = Memory::Address_at(pc_);
168  return Handle<Cell>(reinterpret_cast<Cell**>(address));
169}
170
171
172Cell* RelocInfo::target_cell() {
173  DCHECK(rmode_ == RelocInfo::CELL);
174  return Cell::FromValueAddress(Memory::Address_at(pc_));
175}
176
177
178void RelocInfo::set_target_cell(Cell* cell,
179                                WriteBarrierMode write_barrier_mode,
180                                ICacheFlushMode icache_flush_mode) {
181  DCHECK(rmode_ == RelocInfo::CELL);
182  Address address = cell->address() + Cell::kValueOffset;
183  Memory::Address_at(pc_) = address;
184  if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
185    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(host(), this,
186                                                                  cell);
187  }
188}
189
190
191static const int kNoCodeAgeSequenceLength = 3 * Assembler::kInstrSize;
192
193
194Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
195  UNREACHABLE();  // This should never be reached on Arm.
196  return Handle<Object>();
197}
198
199
200Code* RelocInfo::code_age_stub() {
201  DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
202  return Code::GetCodeFromTargetAddress(
203      Memory::Address_at(pc_ +
204                         (kNoCodeAgeSequenceLength - Assembler::kInstrSize)));
205}
206
207
208void RelocInfo::set_code_age_stub(Code* stub,
209                                  ICacheFlushMode icache_flush_mode) {
210  DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
211  Memory::Address_at(pc_ +
212                     (kNoCodeAgeSequenceLength - Assembler::kInstrSize)) =
213      stub->instruction_start();
214}
215
216
217Address RelocInfo::debug_call_address() {
218  // The 2 instructions offset assumes patched debug break slot or return
219  // sequence.
220  DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
221  return Memory::Address_at(pc_ + Assembler::kPatchDebugBreakSlotAddressOffset);
222}
223
224
225void RelocInfo::set_debug_call_address(Address target) {
226  DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
227  Memory::Address_at(pc_ + Assembler::kPatchDebugBreakSlotAddressOffset) =
228      target;
229  if (host() != NULL) {
230    Object* target_code = Code::GetCodeFromTargetAddress(target);
231    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
232        host(), this, HeapObject::cast(target_code));
233  }
234}
235
236
237void RelocInfo::WipeOut() {
238  DCHECK(IsEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
239         IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
240         IsInternalReference(rmode_));
241  if (IsInternalReference(rmode_)) {
242    Memory::Address_at(pc_) = NULL;
243  } else {
244    Assembler::set_target_address_at(isolate_, pc_, host_, NULL);
245  }
246}
247
248template <typename ObjectVisitor>
249void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
250  RelocInfo::Mode mode = rmode();
251  if (mode == RelocInfo::EMBEDDED_OBJECT) {
252    visitor->VisitEmbeddedPointer(this);
253  } else if (RelocInfo::IsCodeTarget(mode)) {
254    visitor->VisitCodeTarget(this);
255  } else if (mode == RelocInfo::CELL) {
256    visitor->VisitCell(this);
257  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
258    visitor->VisitExternalReference(this);
259  } else if (mode == RelocInfo::INTERNAL_REFERENCE) {
260    visitor->VisitInternalReference(this);
261  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
262    visitor->VisitCodeAgeSequence(this);
263  } else if (RelocInfo::IsDebugBreakSlot(mode) &&
264             IsPatchedDebugBreakSlotSequence()) {
265    visitor->VisitDebugTarget(this);
266  } else if (RelocInfo::IsRuntimeEntry(mode)) {
267    visitor->VisitRuntimeEntry(this);
268  }
269}
270
271
272template<typename StaticVisitor>
273void RelocInfo::Visit(Heap* heap) {
274  RelocInfo::Mode mode = rmode();
275  if (mode == RelocInfo::EMBEDDED_OBJECT) {
276    StaticVisitor::VisitEmbeddedPointer(heap, this);
277  } else if (RelocInfo::IsCodeTarget(mode)) {
278    StaticVisitor::VisitCodeTarget(heap, this);
279  } else if (mode == RelocInfo::CELL) {
280    StaticVisitor::VisitCell(heap, this);
281  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
282    StaticVisitor::VisitExternalReference(this);
283  } else if (mode == RelocInfo::INTERNAL_REFERENCE) {
284    StaticVisitor::VisitInternalReference(this);
285  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
286    StaticVisitor::VisitCodeAgeSequence(heap, this);
287  } else if (RelocInfo::IsDebugBreakSlot(mode) &&
288             IsPatchedDebugBreakSlotSequence()) {
289    StaticVisitor::VisitDebugTarget(heap, this);
290  } else if (RelocInfo::IsRuntimeEntry(mode)) {
291    StaticVisitor::VisitRuntimeEntry(this);
292  }
293}
294
295
296Operand::Operand(int32_t immediate, RelocInfo::Mode rmode)  {
297  rm_ = no_reg;
298  imm32_ = immediate;
299  rmode_ = rmode;
300}
301
302
303Operand::Operand(const ExternalReference& f)  {
304  rm_ = no_reg;
305  imm32_ = reinterpret_cast<int32_t>(f.address());
306  rmode_ = RelocInfo::EXTERNAL_REFERENCE;
307}
308
309
310Operand::Operand(Smi* value) {
311  rm_ = no_reg;
312  imm32_ =  reinterpret_cast<intptr_t>(value);
313  rmode_ = RelocInfo::NONE32;
314}
315
316
317Operand::Operand(Register rm) {
318  rm_ = rm;
319  rs_ = no_reg;
320  shift_op_ = LSL;
321  shift_imm_ = 0;
322}
323
324
325bool Operand::is_reg() const {
326  return rm_.is_valid() &&
327         rs_.is(no_reg) &&
328         shift_op_ == LSL &&
329         shift_imm_ == 0;
330}
331
332
333void Assembler::CheckBuffer() {
334  if (buffer_space() <= kGap) {
335    GrowBuffer();
336  }
337  MaybeCheckConstPool();
338}
339
340
341void Assembler::emit(Instr x) {
342  CheckBuffer();
343  *reinterpret_cast<Instr*>(pc_) = x;
344  pc_ += kInstrSize;
345}
346
347
348Address Assembler::target_address_from_return_address(Address pc) {
349  // Returns the address of the call target from the return address that will
350  // be returned to after a call.
351  // Call sequence on V7 or later is:
352  //  movw  ip, #... @ call address low 16
353  //  movt  ip, #... @ call address high 16
354  //  blx   ip
355  //                      @ return address
356  // For V6 when the constant pool is unavailable, it is:
357  //  mov  ip, #...     @ call address low 8
358  //  orr  ip, ip, #... @ call address 2nd 8
359  //  orr  ip, ip, #... @ call address 3rd 8
360  //  orr  ip, ip, #... @ call address high 8
361  //  blx   ip
362  //                      @ return address
363  // In cases that need frequent patching, the address is in the
364  // constant pool.  It could be a small constant pool load:
365  //  ldr   ip, [pc / pp, #...] @ call address
366  //  blx   ip
367  //                      @ return address
368  // Or an extended constant pool load (ARMv7):
369  //  movw  ip, #...
370  //  movt  ip, #...
371  //  ldr   ip, [pc, ip]  @ call address
372  //  blx   ip
373  //                      @ return address
374  // Or an extended constant pool load (ARMv6):
375  //  mov  ip, #...
376  //  orr  ip, ip, #...
377  //  orr  ip, ip, #...
378  //  orr  ip, ip, #...
379  //  ldr   ip, [pc, ip]  @ call address
380  //  blx   ip
381  //                      @ return address
382  Address candidate = pc - 2 * Assembler::kInstrSize;
383  Instr candidate_instr(Memory::int32_at(candidate));
384  if (IsLdrPcImmediateOffset(candidate_instr) |
385      IsLdrPpImmediateOffset(candidate_instr)) {
386    return candidate;
387  } else {
388    if (IsLdrPpRegOffset(candidate_instr)) {
389      candidate -= Assembler::kInstrSize;
390    }
391    if (CpuFeatures::IsSupported(ARMv7)) {
392      candidate -= 1 * Assembler::kInstrSize;
393      DCHECK(IsMovW(Memory::int32_at(candidate)) &&
394             IsMovT(Memory::int32_at(candidate + Assembler::kInstrSize)));
395    } else {
396      candidate -= 3 * Assembler::kInstrSize;
397      DCHECK(
398          IsMovImmed(Memory::int32_at(candidate)) &&
399          IsOrrImmed(Memory::int32_at(candidate + Assembler::kInstrSize)) &&
400          IsOrrImmed(Memory::int32_at(candidate + 2 * Assembler::kInstrSize)) &&
401          IsOrrImmed(Memory::int32_at(candidate + 3 * Assembler::kInstrSize)));
402    }
403    return candidate;
404  }
405}
406
407
408Address Assembler::return_address_from_call_start(Address pc) {
409  if (IsLdrPcImmediateOffset(Memory::int32_at(pc)) |
410      IsLdrPpImmediateOffset(Memory::int32_at(pc))) {
411    // Load from constant pool, small section.
412    return pc + kInstrSize * 2;
413  } else {
414    if (CpuFeatures::IsSupported(ARMv7)) {
415      DCHECK(IsMovW(Memory::int32_at(pc)));
416      DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
417      if (IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize))) {
418        // Load from constant pool, extended section.
419        return pc + kInstrSize * 4;
420      } else {
421        // A movw / movt load immediate.
422        return pc + kInstrSize * 3;
423      }
424    } else {
425      DCHECK(IsMovImmed(Memory::int32_at(pc)));
426      DCHECK(IsOrrImmed(Memory::int32_at(pc + kInstrSize)));
427      DCHECK(IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)));
428      DCHECK(IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
429      if (IsLdrPpRegOffset(Memory::int32_at(pc + 4 * kInstrSize))) {
430        // Load from constant pool, extended section.
431        return pc + kInstrSize * 6;
432      } else {
433        // A mov / orr load immediate.
434        return pc + kInstrSize * 5;
435      }
436    }
437  }
438}
439
440
441void Assembler::deserialization_set_special_target_at(
442    Isolate* isolate, Address constant_pool_entry, Code* code, Address target) {
443  if (FLAG_enable_embedded_constant_pool) {
444    set_target_address_at(isolate, constant_pool_entry, code, target);
445  } else {
446    Memory::Address_at(constant_pool_entry) = target;
447  }
448}
449
450
451void Assembler::deserialization_set_target_internal_reference_at(
452    Isolate* isolate, Address pc, Address target, RelocInfo::Mode mode) {
453  Memory::Address_at(pc) = target;
454}
455
456
457bool Assembler::is_constant_pool_load(Address pc) {
458  if (CpuFeatures::IsSupported(ARMv7)) {
459    return !Assembler::IsMovW(Memory::int32_at(pc)) ||
460           (FLAG_enable_embedded_constant_pool &&
461            Assembler::IsLdrPpRegOffset(
462                Memory::int32_at(pc + 2 * Assembler::kInstrSize)));
463  } else {
464    return !Assembler::IsMovImmed(Memory::int32_at(pc)) ||
465           (FLAG_enable_embedded_constant_pool &&
466            Assembler::IsLdrPpRegOffset(
467                Memory::int32_at(pc + 4 * Assembler::kInstrSize)));
468  }
469}
470
471
472Address Assembler::constant_pool_entry_address(Address pc,
473                                               Address constant_pool) {
474  if (FLAG_enable_embedded_constant_pool) {
475    DCHECK(constant_pool != NULL);
476    int cp_offset;
477    if (!CpuFeatures::IsSupported(ARMv7) && IsMovImmed(Memory::int32_at(pc))) {
478      DCHECK(IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
479             IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
480             IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)) &&
481             IsLdrPpRegOffset(Memory::int32_at(pc + 4 * kInstrSize)));
482      // This is an extended constant pool lookup (ARMv6).
483      Instr mov_instr = instr_at(pc);
484      Instr orr_instr_1 = instr_at(pc + kInstrSize);
485      Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize);
486      Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize);
487      cp_offset = DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) |
488                  DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3);
489    } else if (IsMovW(Memory::int32_at(pc))) {
490      DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)) &&
491             IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize)));
492      // This is an extended constant pool lookup (ARMv7).
493      Instruction* movw_instr = Instruction::At(pc);
494      Instruction* movt_instr = Instruction::At(pc + kInstrSize);
495      cp_offset = (movt_instr->ImmedMovwMovtValue() << 16) |
496                  movw_instr->ImmedMovwMovtValue();
497    } else {
498      // This is a small constant pool lookup.
499      DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(pc)));
500      cp_offset = GetLdrRegisterImmediateOffset(Memory::int32_at(pc));
501    }
502    return constant_pool + cp_offset;
503  } else {
504    DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc)));
505    Instr instr = Memory::int32_at(pc);
506    return pc + GetLdrRegisterImmediateOffset(instr) + kPcLoadDelta;
507  }
508}
509
510
511Address Assembler::target_address_at(Address pc, Address constant_pool) {
512  if (is_constant_pool_load(pc)) {
513    // This is a constant pool lookup. Return the value in the constant pool.
514    return Memory::Address_at(constant_pool_entry_address(pc, constant_pool));
515  } else if (CpuFeatures::IsSupported(ARMv7)) {
516    // This is an movw / movt immediate load. Return the immediate.
517    DCHECK(IsMovW(Memory::int32_at(pc)) &&
518           IsMovT(Memory::int32_at(pc + kInstrSize)));
519    Instruction* movw_instr = Instruction::At(pc);
520    Instruction* movt_instr = Instruction::At(pc + kInstrSize);
521    return reinterpret_cast<Address>(
522        (movt_instr->ImmedMovwMovtValue() << 16) |
523         movw_instr->ImmedMovwMovtValue());
524  } else {
525    // This is an mov / orr immediate load. Return the immediate.
526    DCHECK(IsMovImmed(Memory::int32_at(pc)) &&
527           IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
528           IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
529           IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
530    Instr mov_instr = instr_at(pc);
531    Instr orr_instr_1 = instr_at(pc + kInstrSize);
532    Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize);
533    Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize);
534    Address ret = reinterpret_cast<Address>(
535        DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) |
536        DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3));
537    return ret;
538  }
539}
540
541
542void Assembler::set_target_address_at(Isolate* isolate, Address pc,
543                                      Address constant_pool, Address target,
544                                      ICacheFlushMode icache_flush_mode) {
545  if (is_constant_pool_load(pc)) {
546    // This is a constant pool lookup. Update the entry in the constant pool.
547    Memory::Address_at(constant_pool_entry_address(pc, constant_pool)) = target;
548    // Intuitively, we would think it is necessary to always flush the
549    // instruction cache after patching a target address in the code as follows:
550    //   Assembler::FlushICache(isolate, pc, sizeof(target));
551    // However, on ARM, no instruction is actually patched in the case
552    // of embedded constants of the form:
553    // ldr   ip, [pp, #...]
554    // since the instruction accessing this address in the constant pool remains
555    // unchanged.
556  } else if (CpuFeatures::IsSupported(ARMv7)) {
557    // This is an movw / movt immediate load. Patch the immediate embedded in
558    // the instructions.
559    DCHECK(IsMovW(Memory::int32_at(pc)));
560    DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
561    uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
562    uint32_t immediate = reinterpret_cast<uint32_t>(target);
563    instr_ptr[0] = PatchMovwImmediate(instr_ptr[0], immediate & 0xFFFF);
564    instr_ptr[1] = PatchMovwImmediate(instr_ptr[1], immediate >> 16);
565    DCHECK(IsMovW(Memory::int32_at(pc)));
566    DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
567    if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
568      Assembler::FlushICache(isolate, pc, 2 * kInstrSize);
569    }
570  } else {
571    // This is an mov / orr immediate load. Patch the immediate embedded in
572    // the instructions.
573    DCHECK(IsMovImmed(Memory::int32_at(pc)) &&
574           IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
575           IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
576           IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
577    uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
578    uint32_t immediate = reinterpret_cast<uint32_t>(target);
579    instr_ptr[0] = PatchShiftImm(instr_ptr[0], immediate & kImm8Mask);
580    instr_ptr[1] = PatchShiftImm(instr_ptr[1], immediate & (kImm8Mask << 8));
581    instr_ptr[2] = PatchShiftImm(instr_ptr[2], immediate & (kImm8Mask << 16));
582    instr_ptr[3] = PatchShiftImm(instr_ptr[3], immediate & (kImm8Mask << 24));
583    DCHECK(IsMovImmed(Memory::int32_at(pc)) &&
584           IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
585           IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
586           IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
587    if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
588      Assembler::FlushICache(isolate, pc, 4 * kInstrSize);
589    }
590  }
591}
592
593
594}  // namespace internal
595}  // namespace v8
596
597#endif  // V8_ARM_ASSEMBLER_ARM_INL_H_
598