1// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/gap-resolver.h"
6
7#include <algorithm>
8#include <functional>
9#include <set>
10
11namespace v8 {
12namespace internal {
13namespace compiler {
14
15typedef ZoneList<MoveOperands>::iterator op_iterator;
16
17#ifdef ENABLE_SLOW_DCHECKS
18// TODO(svenpanne) Brush up InstructionOperand with comparison?
19struct InstructionOperandComparator {
20  bool operator()(const InstructionOperand* x,
21                  const InstructionOperand* y) const {
22    return (x->kind() < y->kind()) ||
23           (x->kind() == y->kind() && x->index() < y->index());
24  }
25};
26#endif
27
28// No operand should be the destination for more than one move.
29static void VerifyMovesAreInjective(ZoneList<MoveOperands>* moves) {
30#ifdef ENABLE_SLOW_DCHECKS
31  std::set<InstructionOperand*, InstructionOperandComparator> seen;
32  for (op_iterator i = moves->begin(); i != moves->end(); ++i) {
33    SLOW_DCHECK(seen.find(i->destination()) == seen.end());
34    seen.insert(i->destination());
35  }
36#endif
37}
38
39
40void GapResolver::Resolve(ParallelMove* parallel_move) const {
41  ZoneList<MoveOperands>* moves = parallel_move->move_operands();
42  // TODO(svenpanne) Use the member version of remove_if when we use real lists.
43  op_iterator end =
44      std::remove_if(moves->begin(), moves->end(),
45                     std::mem_fun_ref(&MoveOperands::IsRedundant));
46  moves->Rewind(static_cast<int>(end - moves->begin()));
47
48  VerifyMovesAreInjective(moves);
49
50  for (op_iterator move = moves->begin(); move != moves->end(); ++move) {
51    if (!move->IsEliminated()) PerformMove(moves, &*move);
52  }
53}
54
55
56void GapResolver::PerformMove(ZoneList<MoveOperands>* moves,
57                              MoveOperands* move) const {
58  // Each call to this function performs a move and deletes it from the move
59  // graph.  We first recursively perform any move blocking this one.  We mark a
60  // move as "pending" on entry to PerformMove in order to detect cycles in the
61  // move graph.  We use operand swaps to resolve cycles, which means that a
62  // call to PerformMove could change any source operand in the move graph.
63  DCHECK(!move->IsPending());
64  DCHECK(!move->IsRedundant());
65
66  // Clear this move's destination to indicate a pending move.  The actual
67  // destination is saved on the side.
68  DCHECK_NOT_NULL(move->source());  // Or else it will look eliminated.
69  InstructionOperand* destination = move->destination();
70  move->set_destination(NULL);
71
72  // Perform a depth-first traversal of the move graph to resolve dependencies.
73  // Any unperformed, unpending move with a source the same as this one's
74  // destination blocks this one so recursively perform all such moves.
75  for (op_iterator other = moves->begin(); other != moves->end(); ++other) {
76    if (other->Blocks(destination) && !other->IsPending()) {
77      // Though PerformMove can change any source operand in the move graph,
78      // this call cannot create a blocking move via a swap (this loop does not
79      // miss any).  Assume there is a non-blocking move with source A and this
80      // move is blocked on source B and there is a swap of A and B.  Then A and
81      // B must be involved in the same cycle (or they would not be swapped).
82      // Since this move's destination is B and there is only a single incoming
83      // edge to an operand, this move must also be involved in the same cycle.
84      // In that case, the blocking move will be created but will be "pending"
85      // when we return from PerformMove.
86      PerformMove(moves, other);
87    }
88  }
89
90  // We are about to resolve this move and don't need it marked as pending, so
91  // restore its destination.
92  move->set_destination(destination);
93
94  // This move's source may have changed due to swaps to resolve cycles and so
95  // it may now be the last move in the cycle.  If so remove it.
96  InstructionOperand* source = move->source();
97  if (source->Equals(destination)) {
98    move->Eliminate();
99    return;
100  }
101
102  // The move may be blocked on a (at most one) pending move, in which case we
103  // have a cycle.  Search for such a blocking move and perform a swap to
104  // resolve it.
105  op_iterator blocker = std::find_if(
106      moves->begin(), moves->end(),
107      std::bind2nd(std::mem_fun_ref(&MoveOperands::Blocks), destination));
108  if (blocker == moves->end()) {
109    // The easy case: This move is not blocked.
110    assembler_->AssembleMove(source, destination);
111    move->Eliminate();
112    return;
113  }
114
115  DCHECK(blocker->IsPending());
116  // Ensure source is a register or both are stack slots, to limit swap cases.
117  if (source->IsStackSlot() || source->IsDoubleStackSlot()) {
118    std::swap(source, destination);
119  }
120  assembler_->AssembleSwap(source, destination);
121  move->Eliminate();
122
123  // Any unperformed (including pending) move with a source of either this
124  // move's source or destination needs to have their source changed to
125  // reflect the state of affairs after the swap.
126  for (op_iterator other = moves->begin(); other != moves->end(); ++other) {
127    if (other->Blocks(source)) {
128      other->set_source(destination);
129    } else if (other->Blocks(destination)) {
130      other->set_source(source);
131    }
132  }
133}
134}
135}
136}  // namespace v8::internal::compiler
137