1// Copyright (c) 1994-2006 Sun Microsystems Inc.
2// All Rights Reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions
6// are met:
7//
8// - Redistributions of source code must retain the above copyright notice,
9// this list of conditions and the following disclaimer.
10//
11// - Redistribution in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the
14// distribution.
15//
16// - Neither the name of Sun Microsystems or the names of contributors may
17// be used to endorse or promote products derived from this software without
18// specific prior written permission.
19//
20// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24// COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31// OF THE POSSIBILITY OF SUCH DAMAGE.
32
33// The original source code covered by the above license above has been modified
34// significantly by Google Inc.
35// Copyright 2014 the V8 project authors. All rights reserved.
36
37#ifndef V8_PPC_ASSEMBLER_PPC_INL_H_
38#define V8_PPC_ASSEMBLER_PPC_INL_H_
39
40#include "src/ppc/assembler-ppc.h"
41
42#include "src/assembler.h"
43#include "src/debug/debug.h"
44
45
46namespace v8 {
47namespace internal {
48
49
50bool CpuFeatures::SupportsCrankshaft() { return true; }
51
52
53void RelocInfo::apply(intptr_t delta) {
54  // absolute code pointer inside code object moves with the code object.
55  if (IsInternalReference(rmode_)) {
56    // Jump table entry
57    Address target = Memory::Address_at(pc_);
58    Memory::Address_at(pc_) = target + delta;
59  } else {
60    // mov sequence
61    DCHECK(IsInternalReferenceEncoded(rmode_));
62    Address target = Assembler::target_address_at(pc_, host_);
63    Assembler::set_target_address_at(isolate_, pc_, host_, target + delta,
64                                     SKIP_ICACHE_FLUSH);
65  }
66}
67
68
69Address RelocInfo::target_internal_reference() {
70  if (IsInternalReference(rmode_)) {
71    // Jump table entry
72    return Memory::Address_at(pc_);
73  } else {
74    // mov sequence
75    DCHECK(IsInternalReferenceEncoded(rmode_));
76    return Assembler::target_address_at(pc_, host_);
77  }
78}
79
80
81Address RelocInfo::target_internal_reference_address() {
82  DCHECK(IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_));
83  return reinterpret_cast<Address>(pc_);
84}
85
86
87Address RelocInfo::target_address() {
88  DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
89  return Assembler::target_address_at(pc_, host_);
90}
91
92
93Address RelocInfo::target_address_address() {
94  DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) ||
95         rmode_ == EMBEDDED_OBJECT || rmode_ == EXTERNAL_REFERENCE);
96
97  if (FLAG_enable_embedded_constant_pool &&
98      Assembler::IsConstantPoolLoadStart(pc_)) {
99    // We return the PC for embedded constant pool since this function is used
100    // by the serializer and expects the address to reside within the code
101    // object.
102    return reinterpret_cast<Address>(pc_);
103  }
104
105  // Read the address of the word containing the target_address in an
106  // instruction stream.
107  // The only architecture-independent user of this function is the serializer.
108  // The serializer uses it to find out how many raw bytes of instruction to
109  // output before the next target.
110  // For an instruction like LIS/ORI where the target bits are mixed into the
111  // instruction bits, the size of the target will be zero, indicating that the
112  // serializer should not step forward in memory after a target is resolved
113  // and written.
114  return reinterpret_cast<Address>(pc_);
115}
116
117
118Address RelocInfo::constant_pool_entry_address() {
119  if (FLAG_enable_embedded_constant_pool) {
120    Address constant_pool = host_->constant_pool();
121    DCHECK(constant_pool);
122    ConstantPoolEntry::Access access;
123    if (Assembler::IsConstantPoolLoadStart(pc_, &access))
124      return Assembler::target_constant_pool_address_at(
125          pc_, constant_pool, access, ConstantPoolEntry::INTPTR);
126  }
127  UNREACHABLE();
128  return NULL;
129}
130
131
132int RelocInfo::target_address_size() { return Assembler::kSpecialTargetSize; }
133
134
135void RelocInfo::set_target_address(Address target,
136                                   WriteBarrierMode write_barrier_mode,
137                                   ICacheFlushMode icache_flush_mode) {
138  DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
139  Assembler::set_target_address_at(isolate_, pc_, host_, target,
140                                   icache_flush_mode);
141  if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL &&
142      IsCodeTarget(rmode_)) {
143    Object* target_code = Code::GetCodeFromTargetAddress(target);
144    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
145        host(), this, HeapObject::cast(target_code));
146  }
147}
148
149
150Address Assembler::target_address_from_return_address(Address pc) {
151// Returns the address of the call target from the return address that will
152// be returned to after a call.
153// Call sequence is :
154//  mov   ip, @ call address
155//  mtlr  ip
156//  blrl
157//                      @ return address
158  int len;
159  ConstantPoolEntry::Access access;
160  if (FLAG_enable_embedded_constant_pool &&
161      IsConstantPoolLoadEnd(pc - 3 * kInstrSize, &access)) {
162    len = (access == ConstantPoolEntry::OVERFLOWED) ? 2 : 1;
163  } else {
164    len = kMovInstructionsNoConstantPool;
165  }
166  return pc - (len + 2) * kInstrSize;
167}
168
169
170Address Assembler::return_address_from_call_start(Address pc) {
171  int len;
172  ConstantPoolEntry::Access access;
173  if (FLAG_enable_embedded_constant_pool &&
174      IsConstantPoolLoadStart(pc, &access)) {
175    len = (access == ConstantPoolEntry::OVERFLOWED) ? 2 : 1;
176  } else {
177    len = kMovInstructionsNoConstantPool;
178  }
179  return pc + (len + 2) * kInstrSize;
180}
181
182
183Object* RelocInfo::target_object() {
184  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
185  return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
186}
187
188
189Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
190  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
191  return Handle<Object>(
192      reinterpret_cast<Object**>(Assembler::target_address_at(pc_, host_)));
193}
194
195
196void RelocInfo::set_target_object(Object* target,
197                                  WriteBarrierMode write_barrier_mode,
198                                  ICacheFlushMode icache_flush_mode) {
199  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
200  Assembler::set_target_address_at(isolate_, pc_, host_,
201                                   reinterpret_cast<Address>(target),
202                                   icache_flush_mode);
203  if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL &&
204      target->IsHeapObject()) {
205    host()->GetHeap()->incremental_marking()->RecordWrite(
206        host(), &Memory::Object_at(pc_), HeapObject::cast(target));
207  }
208}
209
210
211Address RelocInfo::target_external_reference() {
212  DCHECK(rmode_ == EXTERNAL_REFERENCE);
213  return Assembler::target_address_at(pc_, host_);
214}
215
216
217Address RelocInfo::target_runtime_entry(Assembler* origin) {
218  DCHECK(IsRuntimeEntry(rmode_));
219  return target_address();
220}
221
222
223void RelocInfo::set_target_runtime_entry(Address target,
224                                         WriteBarrierMode write_barrier_mode,
225                                         ICacheFlushMode icache_flush_mode) {
226  DCHECK(IsRuntimeEntry(rmode_));
227  if (target_address() != target)
228    set_target_address(target, write_barrier_mode, icache_flush_mode);
229}
230
231
232Handle<Cell> RelocInfo::target_cell_handle() {
233  DCHECK(rmode_ == RelocInfo::CELL);
234  Address address = Memory::Address_at(pc_);
235  return Handle<Cell>(reinterpret_cast<Cell**>(address));
236}
237
238
239Cell* RelocInfo::target_cell() {
240  DCHECK(rmode_ == RelocInfo::CELL);
241  return Cell::FromValueAddress(Memory::Address_at(pc_));
242}
243
244
245void RelocInfo::set_target_cell(Cell* cell, WriteBarrierMode write_barrier_mode,
246                                ICacheFlushMode icache_flush_mode) {
247  DCHECK(rmode_ == RelocInfo::CELL);
248  Address address = cell->address() + Cell::kValueOffset;
249  Memory::Address_at(pc_) = address;
250  if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
251    // TODO(1550) We are passing NULL as a slot because cell can never be on
252    // evacuation candidate.
253    host()->GetHeap()->incremental_marking()->RecordWrite(host(), NULL, cell);
254  }
255}
256
257
258static const int kNoCodeAgeInstructions =
259    FLAG_enable_embedded_constant_pool ? 7 : 6;
260static const int kCodeAgingInstructions =
261    Assembler::kMovInstructionsNoConstantPool + 3;
262static const int kNoCodeAgeSequenceInstructions =
263    ((kNoCodeAgeInstructions >= kCodeAgingInstructions)
264         ? kNoCodeAgeInstructions
265         : kCodeAgingInstructions);
266static const int kNoCodeAgeSequenceNops =
267    (kNoCodeAgeSequenceInstructions - kNoCodeAgeInstructions);
268static const int kCodeAgingSequenceNops =
269    (kNoCodeAgeSequenceInstructions - kCodeAgingInstructions);
270static const int kCodeAgingTargetDelta = 1 * Assembler::kInstrSize;
271static const int kNoCodeAgeSequenceLength =
272    (kNoCodeAgeSequenceInstructions * Assembler::kInstrSize);
273
274
275Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
276  UNREACHABLE();  // This should never be reached on PPC.
277  return Handle<Object>();
278}
279
280
281Code* RelocInfo::code_age_stub() {
282  DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
283  return Code::GetCodeFromTargetAddress(
284      Assembler::target_address_at(pc_ + kCodeAgingTargetDelta, host_));
285}
286
287
288void RelocInfo::set_code_age_stub(Code* stub,
289                                  ICacheFlushMode icache_flush_mode) {
290  DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
291  Assembler::set_target_address_at(isolate_, pc_ + kCodeAgingTargetDelta, host_,
292                                   stub->instruction_start(),
293                                   icache_flush_mode);
294}
295
296
297Address RelocInfo::debug_call_address() {
298  DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
299  return Assembler::target_address_at(pc_, host_);
300}
301
302
303void RelocInfo::set_debug_call_address(Address target) {
304  DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
305  Assembler::set_target_address_at(isolate_, pc_, host_, target);
306  if (host() != NULL) {
307    Object* target_code = Code::GetCodeFromTargetAddress(target);
308    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
309        host(), this, HeapObject::cast(target_code));
310  }
311}
312
313
314void RelocInfo::WipeOut() {
315  DCHECK(IsEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
316         IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
317         IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_));
318  if (IsInternalReference(rmode_)) {
319    // Jump table entry
320    Memory::Address_at(pc_) = NULL;
321  } else if (IsInternalReferenceEncoded(rmode_)) {
322    // mov sequence
323    // Currently used only by deserializer, no need to flush.
324    Assembler::set_target_address_at(isolate_, pc_, host_, NULL,
325                                     SKIP_ICACHE_FLUSH);
326  } else {
327    Assembler::set_target_address_at(isolate_, pc_, host_, NULL);
328  }
329}
330
331
332bool RelocInfo::IsPatchedReturnSequence() {
333  //
334  // The patched return sequence is defined by
335  // BreakLocation::SetDebugBreakAtReturn()
336  // FIXED_SEQUENCE
337
338  Instr instr0 = Assembler::instr_at(pc_);
339  Instr instr1 = Assembler::instr_at(pc_ + 1 * Assembler::kInstrSize);
340#if V8_TARGET_ARCH_PPC64
341  Instr instr3 = Assembler::instr_at(pc_ + (3 * Assembler::kInstrSize));
342  Instr instr4 = Assembler::instr_at(pc_ + (4 * Assembler::kInstrSize));
343  Instr binstr = Assembler::instr_at(pc_ + (7 * Assembler::kInstrSize));
344#else
345  Instr binstr = Assembler::instr_at(pc_ + 4 * Assembler::kInstrSize);
346#endif
347  bool patched_return =
348      ((instr0 & kOpcodeMask) == ADDIS && (instr1 & kOpcodeMask) == ORI &&
349#if V8_TARGET_ARCH_PPC64
350       (instr3 & kOpcodeMask) == ORIS && (instr4 & kOpcodeMask) == ORI &&
351#endif
352       (binstr == 0x7d821008));  // twge r2, r2
353
354  // printf("IsPatchedReturnSequence: %d\n", patched_return);
355  return patched_return;
356}
357
358
359bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
360  Instr current_instr = Assembler::instr_at(pc_);
361  return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
362}
363
364
365void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
366  RelocInfo::Mode mode = rmode();
367  if (mode == RelocInfo::EMBEDDED_OBJECT) {
368    visitor->VisitEmbeddedPointer(this);
369  } else if (RelocInfo::IsCodeTarget(mode)) {
370    visitor->VisitCodeTarget(this);
371  } else if (mode == RelocInfo::CELL) {
372    visitor->VisitCell(this);
373  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
374    visitor->VisitExternalReference(this);
375  } else if (mode == RelocInfo::INTERNAL_REFERENCE ||
376             mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
377    visitor->VisitInternalReference(this);
378  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
379    visitor->VisitCodeAgeSequence(this);
380  } else if (RelocInfo::IsDebugBreakSlot(mode) &&
381             IsPatchedDebugBreakSlotSequence()) {
382    visitor->VisitDebugTarget(this);
383  } else if (IsRuntimeEntry(mode)) {
384    visitor->VisitRuntimeEntry(this);
385  }
386}
387
388
389template <typename StaticVisitor>
390void RelocInfo::Visit(Heap* heap) {
391  RelocInfo::Mode mode = rmode();
392  if (mode == RelocInfo::EMBEDDED_OBJECT) {
393    StaticVisitor::VisitEmbeddedPointer(heap, this);
394  } else if (RelocInfo::IsCodeTarget(mode)) {
395    StaticVisitor::VisitCodeTarget(heap, this);
396  } else if (mode == RelocInfo::CELL) {
397    StaticVisitor::VisitCell(heap, this);
398  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
399    StaticVisitor::VisitExternalReference(this);
400  } else if (mode == RelocInfo::INTERNAL_REFERENCE ||
401             mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
402    StaticVisitor::VisitInternalReference(this);
403  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
404    StaticVisitor::VisitCodeAgeSequence(heap, this);
405  } else if (RelocInfo::IsDebugBreakSlot(mode) &&
406             IsPatchedDebugBreakSlotSequence()) {
407    StaticVisitor::VisitDebugTarget(heap, this);
408  } else if (IsRuntimeEntry(mode)) {
409    StaticVisitor::VisitRuntimeEntry(this);
410  }
411}
412
413Operand::Operand(intptr_t immediate, RelocInfo::Mode rmode) {
414  rm_ = no_reg;
415  imm_ = immediate;
416  rmode_ = rmode;
417}
418
419Operand::Operand(const ExternalReference& f) {
420  rm_ = no_reg;
421  imm_ = reinterpret_cast<intptr_t>(f.address());
422  rmode_ = RelocInfo::EXTERNAL_REFERENCE;
423}
424
425Operand::Operand(Smi* value) {
426  rm_ = no_reg;
427  imm_ = reinterpret_cast<intptr_t>(value);
428  rmode_ = kRelocInfo_NONEPTR;
429}
430
431Operand::Operand(Register rm) {
432  rm_ = rm;
433  rmode_ = kRelocInfo_NONEPTR;  // PPC -why doesn't ARM do this?
434}
435
436void Assembler::CheckBuffer() {
437  if (buffer_space() <= kGap) {
438    GrowBuffer();
439  }
440}
441
442void Assembler::TrackBranch() {
443  DCHECK(!trampoline_emitted_);
444  int count = tracked_branch_count_++;
445  if (count == 0) {
446    // We leave space (kMaxBlockTrampolineSectionSize)
447    // for BlockTrampolinePoolScope buffer.
448    next_trampoline_check_ =
449        pc_offset() + kMaxCondBranchReach - kMaxBlockTrampolineSectionSize;
450  } else {
451    next_trampoline_check_ -= kTrampolineSlotsSize;
452  }
453}
454
455void Assembler::UntrackBranch() {
456  DCHECK(!trampoline_emitted_);
457  DCHECK(tracked_branch_count_ > 0);
458  int count = --tracked_branch_count_;
459  if (count == 0) {
460    // Reset
461    next_trampoline_check_ = kMaxInt;
462  } else {
463    next_trampoline_check_ += kTrampolineSlotsSize;
464  }
465}
466
467void Assembler::CheckTrampolinePoolQuick() {
468  if (pc_offset() >= next_trampoline_check_) {
469    CheckTrampolinePool();
470  }
471}
472
473void Assembler::emit(Instr x) {
474  CheckBuffer();
475  *reinterpret_cast<Instr*>(pc_) = x;
476  pc_ += kInstrSize;
477  CheckTrampolinePoolQuick();
478}
479
480bool Operand::is_reg() const { return rm_.is_valid(); }
481
482
483// Fetch the 32bit value from the FIXED_SEQUENCE lis/ori
484Address Assembler::target_address_at(Address pc, Address constant_pool) {
485  if (FLAG_enable_embedded_constant_pool && constant_pool) {
486    ConstantPoolEntry::Access access;
487    if (IsConstantPoolLoadStart(pc, &access))
488      return Memory::Address_at(target_constant_pool_address_at(
489          pc, constant_pool, access, ConstantPoolEntry::INTPTR));
490  }
491
492  Instr instr1 = instr_at(pc);
493  Instr instr2 = instr_at(pc + kInstrSize);
494  // Interpret 2 instructions generated by lis/ori
495  if (IsLis(instr1) && IsOri(instr2)) {
496#if V8_TARGET_ARCH_PPC64
497    Instr instr4 = instr_at(pc + (3 * kInstrSize));
498    Instr instr5 = instr_at(pc + (4 * kInstrSize));
499    // Assemble the 64 bit value.
500    uint64_t hi = (static_cast<uint32_t>((instr1 & kImm16Mask) << 16) |
501                   static_cast<uint32_t>(instr2 & kImm16Mask));
502    uint64_t lo = (static_cast<uint32_t>((instr4 & kImm16Mask) << 16) |
503                   static_cast<uint32_t>(instr5 & kImm16Mask));
504    return reinterpret_cast<Address>((hi << 32) | lo);
505#else
506    // Assemble the 32 bit value.
507    return reinterpret_cast<Address>(((instr1 & kImm16Mask) << 16) |
508                                     (instr2 & kImm16Mask));
509#endif
510  }
511
512  UNREACHABLE();
513  return NULL;
514}
515
516
517#if V8_TARGET_ARCH_PPC64
518const int kLoadIntptrOpcode = LD;
519#else
520const int kLoadIntptrOpcode = LWZ;
521#endif
522
523// Constant pool load sequence detection:
524// 1) REGULAR access:
525//    load <dst>, kConstantPoolRegister + <offset>
526//
527// 2) OVERFLOWED access:
528//    addis <scratch>, kConstantPoolRegister, <offset_high>
529//    load <dst>, <scratch> + <offset_low>
530bool Assembler::IsConstantPoolLoadStart(Address pc,
531                                        ConstantPoolEntry::Access* access) {
532  Instr instr = instr_at(pc);
533  int opcode = instr & kOpcodeMask;
534  if (!GetRA(instr).is(kConstantPoolRegister)) return false;
535  bool overflowed = (opcode == ADDIS);
536#ifdef DEBUG
537  if (overflowed) {
538    opcode = instr_at(pc + kInstrSize) & kOpcodeMask;
539  }
540  DCHECK(opcode == kLoadIntptrOpcode || opcode == LFD);
541#endif
542  if (access) {
543    *access = (overflowed ? ConstantPoolEntry::OVERFLOWED
544                          : ConstantPoolEntry::REGULAR);
545  }
546  return true;
547}
548
549
550bool Assembler::IsConstantPoolLoadEnd(Address pc,
551                                      ConstantPoolEntry::Access* access) {
552  Instr instr = instr_at(pc);
553  int opcode = instr & kOpcodeMask;
554  bool overflowed = false;
555  if (!(opcode == kLoadIntptrOpcode || opcode == LFD)) return false;
556  if (!GetRA(instr).is(kConstantPoolRegister)) {
557    instr = instr_at(pc - kInstrSize);
558    opcode = instr & kOpcodeMask;
559    if ((opcode != ADDIS) || !GetRA(instr).is(kConstantPoolRegister)) {
560      return false;
561    }
562    overflowed = true;
563  }
564  if (access) {
565    *access = (overflowed ? ConstantPoolEntry::OVERFLOWED
566                          : ConstantPoolEntry::REGULAR);
567  }
568  return true;
569}
570
571
572int Assembler::GetConstantPoolOffset(Address pc,
573                                     ConstantPoolEntry::Access access,
574                                     ConstantPoolEntry::Type type) {
575  bool overflowed = (access == ConstantPoolEntry::OVERFLOWED);
576#ifdef DEBUG
577  ConstantPoolEntry::Access access_check =
578      static_cast<ConstantPoolEntry::Access>(-1);
579  DCHECK(IsConstantPoolLoadStart(pc, &access_check));
580  DCHECK(access_check == access);
581#endif
582  int offset;
583  if (overflowed) {
584    offset = (instr_at(pc) & kImm16Mask) << 16;
585    offset += SIGN_EXT_IMM16(instr_at(pc + kInstrSize) & kImm16Mask);
586    DCHECK(!is_int16(offset));
587  } else {
588    offset = SIGN_EXT_IMM16((instr_at(pc) & kImm16Mask));
589  }
590  return offset;
591}
592
593
594void Assembler::PatchConstantPoolAccessInstruction(
595    int pc_offset, int offset, ConstantPoolEntry::Access access,
596    ConstantPoolEntry::Type type) {
597  Address pc = buffer_ + pc_offset;
598  bool overflowed = (access == ConstantPoolEntry::OVERFLOWED);
599  CHECK(overflowed != is_int16(offset));
600#ifdef DEBUG
601  ConstantPoolEntry::Access access_check =
602      static_cast<ConstantPoolEntry::Access>(-1);
603  DCHECK(IsConstantPoolLoadStart(pc, &access_check));
604  DCHECK(access_check == access);
605#endif
606  if (overflowed) {
607    int hi_word = static_cast<int>(offset >> 16);
608    int lo_word = static_cast<int>(offset & 0xffff);
609    if (lo_word & 0x8000) hi_word++;
610
611    Instr instr1 = instr_at(pc);
612    Instr instr2 = instr_at(pc + kInstrSize);
613    instr1 &= ~kImm16Mask;
614    instr1 |= (hi_word & kImm16Mask);
615    instr2 &= ~kImm16Mask;
616    instr2 |= (lo_word & kImm16Mask);
617    instr_at_put(pc, instr1);
618    instr_at_put(pc + kInstrSize, instr2);
619  } else {
620    Instr instr = instr_at(pc);
621    instr &= ~kImm16Mask;
622    instr |= (offset & kImm16Mask);
623    instr_at_put(pc, instr);
624  }
625}
626
627
628Address Assembler::target_constant_pool_address_at(
629    Address pc, Address constant_pool, ConstantPoolEntry::Access access,
630    ConstantPoolEntry::Type type) {
631  Address addr = constant_pool;
632  DCHECK(addr);
633  addr += GetConstantPoolOffset(pc, access, type);
634  return addr;
635}
636
637
638// This sets the branch destination (which gets loaded at the call address).
639// This is for calls and branches within generated code.  The serializer
640// has already deserialized the mov instructions etc.
641// There is a FIXED_SEQUENCE assumption here
642void Assembler::deserialization_set_special_target_at(
643    Isolate* isolate, Address instruction_payload, Code* code, Address target) {
644  set_target_address_at(isolate, instruction_payload, code, target);
645}
646
647
648void Assembler::deserialization_set_target_internal_reference_at(
649    Isolate* isolate, Address pc, Address target, RelocInfo::Mode mode) {
650  if (RelocInfo::IsInternalReferenceEncoded(mode)) {
651    Code* code = NULL;
652    set_target_address_at(isolate, pc, code, target, SKIP_ICACHE_FLUSH);
653  } else {
654    Memory::Address_at(pc) = target;
655  }
656}
657
658
659// This code assumes the FIXED_SEQUENCE of lis/ori
660void Assembler::set_target_address_at(Isolate* isolate, Address pc,
661                                      Address constant_pool, Address target,
662                                      ICacheFlushMode icache_flush_mode) {
663  if (FLAG_enable_embedded_constant_pool && constant_pool) {
664    ConstantPoolEntry::Access access;
665    if (IsConstantPoolLoadStart(pc, &access)) {
666      Memory::Address_at(target_constant_pool_address_at(
667          pc, constant_pool, access, ConstantPoolEntry::INTPTR)) = target;
668      return;
669    }
670  }
671
672  Instr instr1 = instr_at(pc);
673  Instr instr2 = instr_at(pc + kInstrSize);
674  // Interpret 2 instructions generated by lis/ori
675  if (IsLis(instr1) && IsOri(instr2)) {
676#if V8_TARGET_ARCH_PPC64
677    Instr instr4 = instr_at(pc + (3 * kInstrSize));
678    Instr instr5 = instr_at(pc + (4 * kInstrSize));
679    // Needs to be fixed up when mov changes to handle 64-bit values.
680    uint32_t* p = reinterpret_cast<uint32_t*>(pc);
681    uintptr_t itarget = reinterpret_cast<uintptr_t>(target);
682
683    instr5 &= ~kImm16Mask;
684    instr5 |= itarget & kImm16Mask;
685    itarget = itarget >> 16;
686
687    instr4 &= ~kImm16Mask;
688    instr4 |= itarget & kImm16Mask;
689    itarget = itarget >> 16;
690
691    instr2 &= ~kImm16Mask;
692    instr2 |= itarget & kImm16Mask;
693    itarget = itarget >> 16;
694
695    instr1 &= ~kImm16Mask;
696    instr1 |= itarget & kImm16Mask;
697    itarget = itarget >> 16;
698
699    *p = instr1;
700    *(p + 1) = instr2;
701    *(p + 3) = instr4;
702    *(p + 4) = instr5;
703    if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
704      Assembler::FlushICache(isolate, p, 5 * kInstrSize);
705    }
706#else
707    uint32_t* p = reinterpret_cast<uint32_t*>(pc);
708    uint32_t itarget = reinterpret_cast<uint32_t>(target);
709    int lo_word = itarget & kImm16Mask;
710    int hi_word = itarget >> 16;
711    instr1 &= ~kImm16Mask;
712    instr1 |= hi_word;
713    instr2 &= ~kImm16Mask;
714    instr2 |= lo_word;
715
716    *p = instr1;
717    *(p + 1) = instr2;
718    if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
719      Assembler::FlushICache(isolate, p, 2 * kInstrSize);
720    }
721#endif
722    return;
723  }
724  UNREACHABLE();
725}
726}  // namespace internal
727}  // namespace v8
728
729#endif  // V8_PPC_ASSEMBLER_PPC_INL_H_
730