1// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/interpreter/bytecode-array-writer.h"
6
7#include "src/api.h"
8#include "src/interpreter/bytecode-label.h"
9#include "src/interpreter/bytecode-register.h"
10#include "src/interpreter/constant-array-builder.h"
11#include "src/log.h"
12#include "src/objects-inl.h"
13
14namespace v8 {
15namespace internal {
16namespace interpreter {
17
18STATIC_CONST_MEMBER_DEFINITION const size_t
19    BytecodeArrayWriter::kMaxSizeOfPackedBytecode;
20
21BytecodeArrayWriter::BytecodeArrayWriter(
22    Zone* zone, ConstantArrayBuilder* constant_array_builder,
23    SourcePositionTableBuilder::RecordingMode source_position_mode)
24    : bytecodes_(zone),
25      unbound_jumps_(0),
26      source_position_table_builder_(zone, source_position_mode),
27      constant_array_builder_(constant_array_builder) {
28  bytecodes_.reserve(512);  // Derived via experimentation.
29}
30
31// override
32BytecodeArrayWriter::~BytecodeArrayWriter() {}
33
34// override
35Handle<BytecodeArray> BytecodeArrayWriter::ToBytecodeArray(
36    Isolate* isolate, int register_count, int parameter_count,
37    Handle<FixedArray> handler_table) {
38  DCHECK_EQ(0, unbound_jumps_);
39
40  int bytecode_size = static_cast<int>(bytecodes()->size());
41  int frame_size = register_count * kPointerSize;
42  Handle<FixedArray> constant_pool =
43      constant_array_builder()->ToFixedArray(isolate);
44  Handle<BytecodeArray> bytecode_array = isolate->factory()->NewBytecodeArray(
45      bytecode_size, &bytecodes()->front(), frame_size, parameter_count,
46      constant_pool);
47  bytecode_array->set_handler_table(*handler_table);
48  Handle<ByteArray> source_position_table =
49      source_position_table_builder()->ToSourcePositionTable(
50          isolate, Handle<AbstractCode>::cast(bytecode_array));
51  bytecode_array->set_source_position_table(*source_position_table);
52  return bytecode_array;
53}
54
55// override
56void BytecodeArrayWriter::Write(BytecodeNode* node) {
57  DCHECK(!Bytecodes::IsJump(node->bytecode()));
58  UpdateSourcePositionTable(node);
59  EmitBytecode(node);
60}
61
62// override
63void BytecodeArrayWriter::WriteJump(BytecodeNode* node, BytecodeLabel* label) {
64  DCHECK(Bytecodes::IsJump(node->bytecode()));
65  UpdateSourcePositionTable(node);
66  EmitJump(node, label);
67}
68
69// override
70void BytecodeArrayWriter::BindLabel(BytecodeLabel* label) {
71  size_t current_offset = bytecodes()->size();
72  if (label->is_forward_target()) {
73    // An earlier jump instruction refers to this label. Update it's location.
74    PatchJump(current_offset, label->offset());
75    // Now treat as if the label will only be back referred to.
76  }
77  label->bind_to(current_offset);
78}
79
80// override
81void BytecodeArrayWriter::BindLabel(const BytecodeLabel& target,
82                                    BytecodeLabel* label) {
83  DCHECK(!label->is_bound());
84  DCHECK(target.is_bound());
85  if (label->is_forward_target()) {
86    // An earlier jump instruction refers to this label. Update it's location.
87    PatchJump(target.offset(), label->offset());
88    // Now treat as if the label will only be back referred to.
89  }
90  label->bind_to(target.offset());
91}
92
93void BytecodeArrayWriter::UpdateSourcePositionTable(
94    const BytecodeNode* const node) {
95  int bytecode_offset = static_cast<int>(bytecodes()->size());
96  const BytecodeSourceInfo& source_info = node->source_info();
97  if (source_info.is_valid()) {
98    source_position_table_builder()->AddPosition(
99        bytecode_offset, SourcePosition(source_info.source_position()),
100        source_info.is_statement());
101  }
102}
103
104void BytecodeArrayWriter::EmitBytecode(const BytecodeNode* const node) {
105  DCHECK_NE(node->bytecode(), Bytecode::kIllegal);
106
107  Bytecode bytecode = node->bytecode();
108  OperandScale operand_scale = node->operand_scale();
109
110  if (operand_scale != OperandScale::kSingle) {
111    Bytecode prefix = Bytecodes::OperandScaleToPrefixBytecode(operand_scale);
112    bytecodes()->push_back(Bytecodes::ToByte(prefix));
113  }
114  bytecodes()->push_back(Bytecodes::ToByte(bytecode));
115
116  const uint32_t* const operands = node->operands();
117  const int operand_count = node->operand_count();
118  const OperandSize* operand_sizes =
119      Bytecodes::GetOperandSizes(bytecode, operand_scale);
120  for (int i = 0; i < operand_count; ++i) {
121    switch (operand_sizes[i]) {
122      case OperandSize::kNone:
123        UNREACHABLE();
124        break;
125      case OperandSize::kByte:
126        bytecodes()->push_back(static_cast<uint8_t>(operands[i]));
127        break;
128      case OperandSize::kShort: {
129        uint16_t operand = static_cast<uint16_t>(operands[i]);
130        const uint8_t* raw_operand = reinterpret_cast<const uint8_t*>(&operand);
131        bytecodes()->push_back(raw_operand[0]);
132        bytecodes()->push_back(raw_operand[1]);
133        break;
134      }
135      case OperandSize::kQuad: {
136        const uint8_t* raw_operand =
137            reinterpret_cast<const uint8_t*>(&operands[i]);
138        bytecodes()->push_back(raw_operand[0]);
139        bytecodes()->push_back(raw_operand[1]);
140        bytecodes()->push_back(raw_operand[2]);
141        bytecodes()->push_back(raw_operand[3]);
142        break;
143      }
144    }
145  }
146}
147
148// static
149Bytecode GetJumpWithConstantOperand(Bytecode jump_bytecode) {
150  switch (jump_bytecode) {
151    case Bytecode::kJump:
152      return Bytecode::kJumpConstant;
153    case Bytecode::kJumpIfTrue:
154      return Bytecode::kJumpIfTrueConstant;
155    case Bytecode::kJumpIfFalse:
156      return Bytecode::kJumpIfFalseConstant;
157    case Bytecode::kJumpIfToBooleanTrue:
158      return Bytecode::kJumpIfToBooleanTrueConstant;
159    case Bytecode::kJumpIfToBooleanFalse:
160      return Bytecode::kJumpIfToBooleanFalseConstant;
161    case Bytecode::kJumpIfNotHole:
162      return Bytecode::kJumpIfNotHoleConstant;
163    case Bytecode::kJumpIfNull:
164      return Bytecode::kJumpIfNullConstant;
165    case Bytecode::kJumpIfUndefined:
166      return Bytecode::kJumpIfUndefinedConstant;
167    case Bytecode::kJumpIfJSReceiver:
168      return Bytecode::kJumpIfJSReceiverConstant;
169    default:
170      UNREACHABLE();
171      return Bytecode::kIllegal;
172  }
173}
174
175void BytecodeArrayWriter::PatchJumpWith8BitOperand(size_t jump_location,
176                                                   int delta) {
177  Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes()->at(jump_location));
178  DCHECK(Bytecodes::IsForwardJump(jump_bytecode));
179  DCHECK(Bytecodes::IsJumpImmediate(jump_bytecode));
180  DCHECK_EQ(Bytecodes::GetOperandType(jump_bytecode, 0), OperandType::kUImm);
181  DCHECK_GT(delta, 0);
182  size_t operand_location = jump_location + 1;
183  DCHECK_EQ(bytecodes()->at(operand_location), k8BitJumpPlaceholder);
184  if (Bytecodes::ScaleForUnsignedOperand(delta) == OperandScale::kSingle) {
185    // The jump fits within the range of an UImm8 operand, so cancel
186    // the reservation and jump directly.
187    constant_array_builder()->DiscardReservedEntry(OperandSize::kByte);
188    bytecodes()->at(operand_location) = static_cast<uint8_t>(delta);
189  } else {
190    // The jump does not fit within the range of an UImm8 operand, so
191    // commit reservation putting the offset into the constant pool,
192    // and update the jump instruction and operand.
193    size_t entry = constant_array_builder()->CommitReservedEntry(
194        OperandSize::kByte, Smi::FromInt(delta));
195    DCHECK_EQ(Bytecodes::SizeForUnsignedOperand(static_cast<uint32_t>(entry)),
196              OperandSize::kByte);
197    jump_bytecode = GetJumpWithConstantOperand(jump_bytecode);
198    bytecodes()->at(jump_location) = Bytecodes::ToByte(jump_bytecode);
199    bytecodes()->at(operand_location) = static_cast<uint8_t>(entry);
200  }
201}
202
203void BytecodeArrayWriter::PatchJumpWith16BitOperand(size_t jump_location,
204                                                    int delta) {
205  Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes()->at(jump_location));
206  DCHECK(Bytecodes::IsForwardJump(jump_bytecode));
207  DCHECK(Bytecodes::IsJumpImmediate(jump_bytecode));
208  DCHECK_EQ(Bytecodes::GetOperandType(jump_bytecode, 0), OperandType::kUImm);
209  DCHECK_GT(delta, 0);
210  size_t operand_location = jump_location + 1;
211  uint8_t operand_bytes[2];
212  if (Bytecodes::ScaleForUnsignedOperand(delta) <= OperandScale::kDouble) {
213    // The jump fits within the range of an Imm16 operand, so cancel
214    // the reservation and jump directly.
215    constant_array_builder()->DiscardReservedEntry(OperandSize::kShort);
216    WriteUnalignedUInt16(operand_bytes, static_cast<uint16_t>(delta));
217  } else {
218    // The jump does not fit within the range of an Imm16 operand, so
219    // commit reservation putting the offset into the constant pool,
220    // and update the jump instruction and operand.
221    size_t entry = constant_array_builder()->CommitReservedEntry(
222        OperandSize::kShort, Smi::FromInt(delta));
223    jump_bytecode = GetJumpWithConstantOperand(jump_bytecode);
224    bytecodes()->at(jump_location) = Bytecodes::ToByte(jump_bytecode);
225    WriteUnalignedUInt16(operand_bytes, static_cast<uint16_t>(entry));
226  }
227  DCHECK(bytecodes()->at(operand_location) == k8BitJumpPlaceholder &&
228         bytecodes()->at(operand_location + 1) == k8BitJumpPlaceholder);
229  bytecodes()->at(operand_location++) = operand_bytes[0];
230  bytecodes()->at(operand_location) = operand_bytes[1];
231}
232
233void BytecodeArrayWriter::PatchJumpWith32BitOperand(size_t jump_location,
234                                                    int delta) {
235  DCHECK(Bytecodes::IsJumpImmediate(
236      Bytecodes::FromByte(bytecodes()->at(jump_location))));
237  constant_array_builder()->DiscardReservedEntry(OperandSize::kQuad);
238  uint8_t operand_bytes[4];
239  WriteUnalignedUInt32(operand_bytes, static_cast<uint32_t>(delta));
240  size_t operand_location = jump_location + 1;
241  DCHECK(bytecodes()->at(operand_location) == k8BitJumpPlaceholder &&
242         bytecodes()->at(operand_location + 1) == k8BitJumpPlaceholder &&
243         bytecodes()->at(operand_location + 2) == k8BitJumpPlaceholder &&
244         bytecodes()->at(operand_location + 3) == k8BitJumpPlaceholder);
245  bytecodes()->at(operand_location++) = operand_bytes[0];
246  bytecodes()->at(operand_location++) = operand_bytes[1];
247  bytecodes()->at(operand_location++) = operand_bytes[2];
248  bytecodes()->at(operand_location) = operand_bytes[3];
249}
250
251void BytecodeArrayWriter::PatchJump(size_t jump_target, size_t jump_location) {
252  Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes()->at(jump_location));
253  int delta = static_cast<int>(jump_target - jump_location);
254  int prefix_offset = 0;
255  OperandScale operand_scale = OperandScale::kSingle;
256  if (Bytecodes::IsPrefixScalingBytecode(jump_bytecode)) {
257    // If a prefix scaling bytecode is emitted the target offset is one
258    // less than the case of no prefix scaling bytecode.
259    delta -= 1;
260    prefix_offset = 1;
261    operand_scale = Bytecodes::PrefixBytecodeToOperandScale(jump_bytecode);
262    jump_bytecode =
263        Bytecodes::FromByte(bytecodes()->at(jump_location + prefix_offset));
264  }
265
266  DCHECK(Bytecodes::IsJump(jump_bytecode));
267  switch (operand_scale) {
268    case OperandScale::kSingle:
269      PatchJumpWith8BitOperand(jump_location, delta);
270      break;
271    case OperandScale::kDouble:
272      PatchJumpWith16BitOperand(jump_location + prefix_offset, delta);
273      break;
274    case OperandScale::kQuadruple:
275      PatchJumpWith32BitOperand(jump_location + prefix_offset, delta);
276      break;
277    default:
278      UNREACHABLE();
279  }
280  unbound_jumps_--;
281}
282
283void BytecodeArrayWriter::EmitJump(BytecodeNode* node, BytecodeLabel* label) {
284  DCHECK(Bytecodes::IsJump(node->bytecode()));
285  DCHECK_EQ(0u, node->operand(0));
286
287  size_t current_offset = bytecodes()->size();
288
289  if (label->is_bound()) {
290    CHECK_GE(current_offset, label->offset());
291    CHECK_LE(current_offset, static_cast<size_t>(kMaxUInt32));
292    // Label has been bound already so this is a backwards jump.
293    uint32_t delta = static_cast<uint32_t>(current_offset - label->offset());
294    OperandScale operand_scale = Bytecodes::ScaleForUnsignedOperand(delta);
295    if (operand_scale > OperandScale::kSingle) {
296      // Adjust for scaling byte prefix for wide jump offset.
297      delta += 1;
298    }
299    DCHECK_EQ(Bytecode::kJumpLoop, node->bytecode());
300    node->update_operand0(delta);
301  } else {
302    // The label has not yet been bound so this is a forward reference
303    // that will be patched when the label is bound. We create a
304    // reservation in the constant pool so the jump can be patched
305    // when the label is bound. The reservation means the maximum size
306    // of the operand for the constant is known and the jump can
307    // be emitted into the bytecode stream with space for the operand.
308    unbound_jumps_++;
309    label->set_referrer(current_offset);
310    OperandSize reserved_operand_size =
311        constant_array_builder()->CreateReservedEntry();
312    DCHECK_NE(Bytecode::kJumpLoop, node->bytecode());
313    switch (reserved_operand_size) {
314      case OperandSize::kNone:
315        UNREACHABLE();
316        break;
317      case OperandSize::kByte:
318        node->update_operand0(k8BitJumpPlaceholder);
319        break;
320      case OperandSize::kShort:
321        node->update_operand0(k16BitJumpPlaceholder);
322        break;
323      case OperandSize::kQuad:
324        node->update_operand0(k32BitJumpPlaceholder);
325        break;
326    }
327  }
328  EmitBytecode(node);
329}
330
331}  // namespace interpreter
332}  // namespace internal
333}  // namespace v8
334