1// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/code-factory.h"
6#include "src/compiler/pipeline.h"
7#include "src/compiler/raw-machine-assembler.h"
8#include "src/compiler/scheduler.h"
9
10namespace v8 {
11namespace internal {
12namespace compiler {
13
14RawMachineAssembler::RawMachineAssembler(Graph* graph,
15                                         MachineSignature* machine_sig,
16                                         MachineType word)
17    : GraphBuilder(graph),
18      schedule_(new (zone()) Schedule(zone())),
19      machine_(word),
20      common_(zone()),
21      machine_sig_(machine_sig),
22      call_descriptor_(
23          Linkage::GetSimplifiedCDescriptor(graph->zone(), machine_sig)),
24      parameters_(NULL),
25      exit_label_(schedule()->end()),
26      current_block_(schedule()->start()) {
27  int param_count = static_cast<int>(parameter_count());
28  Node* s = graph->NewNode(common_.Start(param_count));
29  graph->SetStart(s);
30  if (parameter_count() == 0) return;
31  parameters_ = zone()->NewArray<Node*>(param_count);
32  for (size_t i = 0; i < parameter_count(); ++i) {
33    parameters_[i] =
34        NewNode(common()->Parameter(static_cast<int>(i)), graph->start());
35  }
36}
37
38
39Schedule* RawMachineAssembler::Export() {
40  // Compute the correct codegen order.
41  DCHECK(schedule_->rpo_order()->empty());
42  Scheduler::ComputeSpecialRPO(schedule_);
43  // Invalidate MachineAssembler.
44  Schedule* schedule = schedule_;
45  schedule_ = NULL;
46  return schedule;
47}
48
49
50Node* RawMachineAssembler::Parameter(size_t index) {
51  DCHECK(index < parameter_count());
52  return parameters_[index];
53}
54
55
56RawMachineAssembler::Label* RawMachineAssembler::Exit() {
57  exit_label_.used_ = true;
58  return &exit_label_;
59}
60
61
62void RawMachineAssembler::Goto(Label* label) {
63  DCHECK(current_block_ != schedule()->end());
64  schedule()->AddGoto(CurrentBlock(), Use(label));
65  current_block_ = NULL;
66}
67
68
69void RawMachineAssembler::Branch(Node* condition, Label* true_val,
70                                 Label* false_val) {
71  DCHECK(current_block_ != schedule()->end());
72  Node* branch = NewNode(common()->Branch(), condition);
73  schedule()->AddBranch(CurrentBlock(), branch, Use(true_val), Use(false_val));
74  current_block_ = NULL;
75}
76
77
78void RawMachineAssembler::Return(Node* value) {
79  schedule()->AddReturn(CurrentBlock(), value);
80  current_block_ = NULL;
81}
82
83
84Node* RawMachineAssembler::CallFunctionStub0(Node* function, Node* receiver,
85                                             Node* context, Node* frame_state,
86                                             CallFunctionFlags flags) {
87  Callable callable = CodeFactory::CallFunction(isolate(), 0, flags);
88  CallDescriptor* desc = Linkage::GetStubCallDescriptor(
89      callable.descriptor(), 1, CallDescriptor::kNeedsFrameState, zone());
90  Node* stub_code = HeapConstant(callable.code());
91  Node* call = graph()->NewNode(common()->Call(desc), stub_code, function,
92                                receiver, context, frame_state);
93  schedule()->AddNode(CurrentBlock(), call);
94  return call;
95}
96
97
98Node* RawMachineAssembler::CallJS0(Node* function, Node* receiver,
99                                   Node* context, Node* frame_state) {
100  CallDescriptor* descriptor = Linkage::GetJSCallDescriptor(1, zone());
101  Node* call = graph()->NewNode(common()->Call(descriptor), function, receiver,
102                                context, frame_state);
103  schedule()->AddNode(CurrentBlock(), call);
104  return call;
105}
106
107
108Node* RawMachineAssembler::CallRuntime1(Runtime::FunctionId function,
109                                        Node* arg0, Node* context,
110                                        Node* frame_state) {
111  CallDescriptor* descriptor = Linkage::GetRuntimeCallDescriptor(
112      function, 1, Operator::kNoProperties, zone());
113
114  Node* centry = HeapConstant(CEntryStub(isolate(), 1).GetCode());
115  Node* ref = NewNode(
116      common()->ExternalConstant(ExternalReference(function, isolate())));
117  Node* arity = Int32Constant(1);
118
119  Node* call = graph()->NewNode(common()->Call(descriptor), centry, arg0, ref,
120                                arity, context, frame_state);
121  schedule()->AddNode(CurrentBlock(), call);
122  return call;
123}
124
125
126void RawMachineAssembler::Bind(Label* label) {
127  DCHECK(current_block_ == NULL);
128  DCHECK(!label->bound_);
129  label->bound_ = true;
130  current_block_ = EnsureBlock(label);
131}
132
133
134BasicBlock* RawMachineAssembler::Use(Label* label) {
135  label->used_ = true;
136  return EnsureBlock(label);
137}
138
139
140BasicBlock* RawMachineAssembler::EnsureBlock(Label* label) {
141  if (label->block_ == NULL) label->block_ = schedule()->NewBasicBlock();
142  return label->block_;
143}
144
145
146BasicBlock* RawMachineAssembler::CurrentBlock() {
147  DCHECK(current_block_);
148  return current_block_;
149}
150
151
152Node* RawMachineAssembler::MakeNode(const Operator* op, int input_count,
153                                    Node** inputs) {
154  DCHECK(ScheduleValid());
155  DCHECK(current_block_ != NULL);
156  Node* node = graph()->NewNode(op, input_count, inputs);
157  BasicBlock* block = op->opcode() == IrOpcode::kParameter ? schedule()->start()
158                                                           : CurrentBlock();
159  schedule()->AddNode(block, node);
160  return node;
161}
162
163}  // namespace compiler
164}  // namespace internal
165}  // namespace v8
166