1// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/crankshaft/hydrogen-bce.h"
6#include "src/objects-inl.h"
7
8namespace v8 {
9namespace internal {
10
11
12// We try to "factor up" HBoundsCheck instructions towards the root of the
13// dominator tree.
14// For now we handle checks where the index is like "exp + int32value".
15// If in the dominator tree we check "exp + v1" and later (dominated)
16// "exp + v2", if v2 <= v1 we can safely remove the second check, and if
17// v2 > v1 we can use v2 in the 1st check and again remove the second.
18// To do so we keep a dictionary of all checks where the key if the pair
19// "exp, length".
20// The class BoundsCheckKey represents this key.
21class BoundsCheckKey : public ZoneObject {
22 public:
23  HValue* IndexBase() const { return index_base_; }
24  HValue* Length() const { return length_; }
25
26  uint32_t Hash() {
27    return static_cast<uint32_t>(index_base_->Hashcode() ^ length_->Hashcode());
28  }
29
30  static BoundsCheckKey* Create(Zone* zone,
31                                HBoundsCheck* check,
32                                int32_t* offset) {
33    if (!check->index()->representation().IsSmiOrInteger32()) return NULL;
34
35    HValue* index_base = NULL;
36    HConstant* constant = NULL;
37    bool is_sub = false;
38
39    if (check->index()->IsAdd()) {
40      HAdd* index = HAdd::cast(check->index());
41      if (index->left()->IsConstant()) {
42        constant = HConstant::cast(index->left());
43        index_base = index->right();
44      } else if (index->right()->IsConstant()) {
45        constant = HConstant::cast(index->right());
46        index_base = index->left();
47      }
48    } else if (check->index()->IsSub()) {
49      HSub* index = HSub::cast(check->index());
50      is_sub = true;
51      if (index->right()->IsConstant()) {
52        constant = HConstant::cast(index->right());
53        index_base = index->left();
54      }
55    } else if (check->index()->IsConstant()) {
56      index_base = check->block()->graph()->GetConstant0();
57      constant = HConstant::cast(check->index());
58    }
59
60    if (constant != NULL && constant->HasInteger32Value() &&
61        constant->Integer32Value() != kMinInt) {
62      *offset = is_sub ? - constant->Integer32Value()
63                       : constant->Integer32Value();
64    } else {
65      *offset = 0;
66      index_base = check->index();
67    }
68
69    return new(zone) BoundsCheckKey(index_base, check->length());
70  }
71
72 private:
73  BoundsCheckKey(HValue* index_base, HValue* length)
74      : index_base_(index_base),
75        length_(length) { }
76
77  HValue* index_base_;
78  HValue* length_;
79
80  DISALLOW_COPY_AND_ASSIGN(BoundsCheckKey);
81};
82
83
84// Data about each HBoundsCheck that can be eliminated or moved.
85// It is the "value" in the dictionary indexed by "base-index, length"
86// (the key is BoundsCheckKey).
87// We scan the code with a dominator tree traversal.
88// Traversing the dominator tree we keep a stack (implemented as a singly
89// linked list) of "data" for each basic block that contains a relevant check
90// with the same key (the dictionary holds the head of the list).
91// We also keep all the "data" created for a given basic block in a list, and
92// use it to "clean up" the dictionary when backtracking in the dominator tree
93// traversal.
94// Doing this each dictionary entry always directly points to the check that
95// is dominating the code being examined now.
96// We also track the current "offset" of the index expression and use it to
97// decide if any check is already "covered" (so it can be removed) or not.
98class BoundsCheckBbData: public ZoneObject {
99 public:
100  BoundsCheckKey* Key() const { return key_; }
101  int32_t LowerOffset() const { return lower_offset_; }
102  int32_t UpperOffset() const { return upper_offset_; }
103  HBasicBlock* BasicBlock() const { return basic_block_; }
104  HBoundsCheck* LowerCheck() const { return lower_check_; }
105  HBoundsCheck* UpperCheck() const { return upper_check_; }
106  BoundsCheckBbData* NextInBasicBlock() const { return next_in_bb_; }
107  BoundsCheckBbData* FatherInDominatorTree() const { return father_in_dt_; }
108
109  bool OffsetIsCovered(int32_t offset) const {
110    return offset >= LowerOffset() && offset <= UpperOffset();
111  }
112
113  bool HasSingleCheck() { return lower_check_ == upper_check_; }
114
115  void UpdateUpperOffsets(HBoundsCheck* check, int32_t offset) {
116    BoundsCheckBbData* data = FatherInDominatorTree();
117    while (data != NULL && data->UpperCheck() == check) {
118      DCHECK(data->upper_offset_ < offset);
119      data->upper_offset_ = offset;
120      data = data->FatherInDominatorTree();
121    }
122  }
123
124  void UpdateLowerOffsets(HBoundsCheck* check, int32_t offset) {
125    BoundsCheckBbData* data = FatherInDominatorTree();
126    while (data != NULL && data->LowerCheck() == check) {
127      DCHECK(data->lower_offset_ > offset);
128      data->lower_offset_ = offset;
129      data = data->FatherInDominatorTree();
130    }
131  }
132
133  // The goal of this method is to modify either upper_offset_ or
134  // lower_offset_ so that also new_offset is covered (the covered
135  // range grows).
136  //
137  // The precondition is that new_check follows UpperCheck() and
138  // LowerCheck() in the same basic block, and that new_offset is not
139  // covered (otherwise we could simply remove new_check).
140  //
141  // If HasSingleCheck() is true then new_check is added as "second check"
142  // (either upper or lower; note that HasSingleCheck() becomes false).
143  // Otherwise one of the current checks is modified so that it also covers
144  // new_offset, and new_check is removed.
145  void CoverCheck(HBoundsCheck* new_check,
146                  int32_t new_offset) {
147    DCHECK(new_check->index()->representation().IsSmiOrInteger32());
148    bool keep_new_check = false;
149
150    if (new_offset > upper_offset_) {
151      upper_offset_ = new_offset;
152      if (HasSingleCheck()) {
153        keep_new_check = true;
154        upper_check_ = new_check;
155      } else {
156        TightenCheck(upper_check_, new_check, new_offset);
157        UpdateUpperOffsets(upper_check_, upper_offset_);
158      }
159    } else if (new_offset < lower_offset_) {
160      lower_offset_ = new_offset;
161      if (HasSingleCheck()) {
162        keep_new_check = true;
163        lower_check_ = new_check;
164      } else {
165        TightenCheck(lower_check_, new_check, new_offset);
166        UpdateLowerOffsets(lower_check_, lower_offset_);
167      }
168    } else {
169      // Should never have called CoverCheck() in this case.
170      UNREACHABLE();
171    }
172
173    if (!keep_new_check) {
174      if (FLAG_trace_bce) {
175        base::OS::Print("Eliminating check #%d after tightening\n",
176                        new_check->id());
177      }
178      new_check->block()->graph()->isolate()->counters()->
179          bounds_checks_eliminated()->Increment();
180      new_check->DeleteAndReplaceWith(new_check->ActualValue());
181    } else {
182      HBoundsCheck* first_check = new_check == lower_check_ ? upper_check_
183                                                            : lower_check_;
184      if (FLAG_trace_bce) {
185        base::OS::Print("Moving second check #%d after first check #%d\n",
186                        new_check->id(), first_check->id());
187      }
188      // The length is guaranteed to be live at first_check.
189      DCHECK(new_check->length() == first_check->length());
190      HInstruction* old_position = new_check->next();
191      new_check->Unlink();
192      new_check->InsertAfter(first_check);
193      MoveIndexIfNecessary(new_check->index(), new_check, old_position);
194    }
195  }
196
197  BoundsCheckBbData(BoundsCheckKey* key,
198                    int32_t lower_offset,
199                    int32_t upper_offset,
200                    HBasicBlock* bb,
201                    HBoundsCheck* lower_check,
202                    HBoundsCheck* upper_check,
203                    BoundsCheckBbData* next_in_bb,
204                    BoundsCheckBbData* father_in_dt)
205      : key_(key),
206        lower_offset_(lower_offset),
207        upper_offset_(upper_offset),
208        basic_block_(bb),
209        lower_check_(lower_check),
210        upper_check_(upper_check),
211        next_in_bb_(next_in_bb),
212        father_in_dt_(father_in_dt) { }
213
214 private:
215  BoundsCheckKey* key_;
216  int32_t lower_offset_;
217  int32_t upper_offset_;
218  HBasicBlock* basic_block_;
219  HBoundsCheck* lower_check_;
220  HBoundsCheck* upper_check_;
221  BoundsCheckBbData* next_in_bb_;
222  BoundsCheckBbData* father_in_dt_;
223
224  void MoveIndexIfNecessary(HValue* index_raw,
225                            HBoundsCheck* insert_before,
226                            HInstruction* end_of_scan_range) {
227    // index_raw can be HAdd(index_base, offset), HSub(index_base, offset),
228    // HConstant(offset) or index_base directly.
229    // In the latter case, no need to move anything.
230    if (index_raw->IsAdd() || index_raw->IsSub()) {
231      HArithmeticBinaryOperation* index =
232          HArithmeticBinaryOperation::cast(index_raw);
233      HValue* left_input = index->left();
234      HValue* right_input = index->right();
235      HValue* context = index->context();
236      bool must_move_index = false;
237      bool must_move_left_input = false;
238      bool must_move_right_input = false;
239      bool must_move_context = false;
240      for (HInstruction* cursor = end_of_scan_range; cursor != insert_before;) {
241        if (cursor == left_input) must_move_left_input = true;
242        if (cursor == right_input) must_move_right_input = true;
243        if (cursor == context) must_move_context = true;
244        if (cursor == index) must_move_index = true;
245        if (cursor->previous() == NULL) {
246          cursor = cursor->block()->dominator()->end();
247        } else {
248          cursor = cursor->previous();
249        }
250      }
251      if (must_move_index) {
252        index->Unlink();
253        index->InsertBefore(insert_before);
254      }
255      // The BCE algorithm only selects mergeable bounds checks that share
256      // the same "index_base", so we'll only ever have to move constants.
257      if (must_move_left_input) {
258        HConstant::cast(left_input)->Unlink();
259        HConstant::cast(left_input)->InsertBefore(index);
260      }
261      if (must_move_right_input) {
262        HConstant::cast(right_input)->Unlink();
263        HConstant::cast(right_input)->InsertBefore(index);
264      }
265      if (must_move_context) {
266        // Contexts are always constants.
267        HConstant::cast(context)->Unlink();
268        HConstant::cast(context)->InsertBefore(index);
269      }
270    } else if (index_raw->IsConstant()) {
271      HConstant* index = HConstant::cast(index_raw);
272      bool must_move = false;
273      for (HInstruction* cursor = end_of_scan_range; cursor != insert_before;) {
274        if (cursor == index) must_move = true;
275        if (cursor->previous() == NULL) {
276          cursor = cursor->block()->dominator()->end();
277        } else {
278          cursor = cursor->previous();
279        }
280      }
281      if (must_move) {
282        index->Unlink();
283        index->InsertBefore(insert_before);
284      }
285    }
286  }
287
288  void TightenCheck(HBoundsCheck* original_check,
289                    HBoundsCheck* tighter_check,
290                    int32_t new_offset) {
291    DCHECK(original_check->length() == tighter_check->length());
292    MoveIndexIfNecessary(tighter_check->index(), original_check, tighter_check);
293    original_check->ReplaceAllUsesWith(original_check->index());
294    original_check->SetOperandAt(0, tighter_check->index());
295    if (FLAG_trace_bce) {
296      base::OS::Print("Tightened check #%d with offset %d from #%d\n",
297                      original_check->id(), new_offset, tighter_check->id());
298    }
299  }
300
301  DISALLOW_COPY_AND_ASSIGN(BoundsCheckBbData);
302};
303
304
305static bool BoundsCheckKeyMatch(void* key1, void* key2) {
306  BoundsCheckKey* k1 = static_cast<BoundsCheckKey*>(key1);
307  BoundsCheckKey* k2 = static_cast<BoundsCheckKey*>(key2);
308  return k1->IndexBase() == k2->IndexBase() && k1->Length() == k2->Length();
309}
310
311BoundsCheckTable::BoundsCheckTable(Zone* zone)
312    : CustomMatcherZoneHashMap(BoundsCheckKeyMatch,
313                               ZoneHashMap::kDefaultHashMapCapacity,
314                               ZoneAllocationPolicy(zone)) {}
315
316BoundsCheckBbData** BoundsCheckTable::LookupOrInsert(BoundsCheckKey* key,
317                                                     Zone* zone) {
318  return reinterpret_cast<BoundsCheckBbData**>(
319      &(CustomMatcherZoneHashMap::LookupOrInsert(key, key->Hash(),
320                                                 ZoneAllocationPolicy(zone))
321            ->value));
322}
323
324
325void BoundsCheckTable::Insert(BoundsCheckKey* key,
326                              BoundsCheckBbData* data,
327                              Zone* zone) {
328  CustomMatcherZoneHashMap::LookupOrInsert(key, key->Hash(),
329                                           ZoneAllocationPolicy(zone))
330      ->value = data;
331}
332
333
334void BoundsCheckTable::Delete(BoundsCheckKey* key) {
335  Remove(key, key->Hash());
336}
337
338
339class HBoundsCheckEliminationState {
340 public:
341  HBasicBlock* block_;
342  BoundsCheckBbData* bb_data_list_;
343  int index_;
344};
345
346
347// Eliminates checks in bb and recursively in the dominated blocks.
348// Also replace the results of check instructions with the original value, if
349// the result is used. This is safe now, since we don't do code motion after
350// this point. It enables better register allocation since the value produced
351// by check instructions is really a copy of the original value.
352void HBoundsCheckEliminationPhase::EliminateRedundantBoundsChecks(
353    HBasicBlock* entry) {
354  // Allocate the stack.
355  HBoundsCheckEliminationState* stack =
356    zone()->NewArray<HBoundsCheckEliminationState>(graph()->blocks()->length());
357
358  // Explicitly push the entry block.
359  stack[0].block_ = entry;
360  stack[0].bb_data_list_ = PreProcessBlock(entry);
361  stack[0].index_ = 0;
362  int stack_depth = 1;
363
364  // Implement depth-first traversal with a stack.
365  while (stack_depth > 0) {
366    int current = stack_depth - 1;
367    HBoundsCheckEliminationState* state = &stack[current];
368    const ZoneList<HBasicBlock*>* children = state->block_->dominated_blocks();
369
370    if (state->index_ < children->length()) {
371      // Recursively visit children blocks.
372      HBasicBlock* child = children->at(state->index_++);
373      int next = stack_depth++;
374      stack[next].block_ = child;
375      stack[next].bb_data_list_ = PreProcessBlock(child);
376      stack[next].index_ = 0;
377    } else {
378      // Finished with all children; post process the block.
379      PostProcessBlock(state->block_, state->bb_data_list_);
380      stack_depth--;
381    }
382  }
383}
384
385
386BoundsCheckBbData* HBoundsCheckEliminationPhase::PreProcessBlock(
387    HBasicBlock* bb) {
388  BoundsCheckBbData* bb_data_list = NULL;
389
390  for (HInstructionIterator it(bb); !it.Done(); it.Advance()) {
391    HInstruction* i = it.Current();
392    if (!i->IsBoundsCheck()) continue;
393
394    HBoundsCheck* check = HBoundsCheck::cast(i);
395    int32_t offset = 0;
396    BoundsCheckKey* key =
397        BoundsCheckKey::Create(zone(), check, &offset);
398    if (key == NULL) continue;
399    BoundsCheckBbData** data_p = table_.LookupOrInsert(key, zone());
400    BoundsCheckBbData* data = *data_p;
401    if (data == NULL) {
402      bb_data_list = new(zone()) BoundsCheckBbData(key,
403                                                   offset,
404                                                   offset,
405                                                   bb,
406                                                   check,
407                                                   check,
408                                                   bb_data_list,
409                                                   NULL);
410      *data_p = bb_data_list;
411      if (FLAG_trace_bce) {
412        base::OS::Print("Fresh bounds check data for block #%d: [%d]\n",
413                        bb->block_id(), offset);
414      }
415    } else if (data->OffsetIsCovered(offset)) {
416      bb->graph()->isolate()->counters()->
417          bounds_checks_eliminated()->Increment();
418      if (FLAG_trace_bce) {
419        base::OS::Print("Eliminating bounds check #%d, offset %d is covered\n",
420                        check->id(), offset);
421      }
422      check->DeleteAndReplaceWith(check->ActualValue());
423    } else if (data->BasicBlock() == bb) {
424      // TODO(jkummerow): I think the following logic would be preferable:
425      // if (data->Basicblock() == bb ||
426      //     graph()->use_optimistic_licm() ||
427      //     bb->IsLoopSuccessorDominator()) {
428      //   data->CoverCheck(check, offset)
429      // } else {
430      //   /* add pristine BCBbData like in (data == NULL) case above */
431      // }
432      // Even better would be: distinguish between read-only dominator-imposed
433      // knowledge and modifiable upper/lower checks.
434      // What happens currently is that the first bounds check in a dominated
435      // block will stay around while any further checks are hoisted out,
436      // which doesn't make sense. Investigate/fix this in a future CL.
437      data->CoverCheck(check, offset);
438    } else if (graph()->use_optimistic_licm() ||
439               bb->IsLoopSuccessorDominator()) {
440      int32_t new_lower_offset = offset < data->LowerOffset()
441          ? offset
442          : data->LowerOffset();
443      int32_t new_upper_offset = offset > data->UpperOffset()
444          ? offset
445          : data->UpperOffset();
446      bb_data_list = new(zone()) BoundsCheckBbData(key,
447                                                   new_lower_offset,
448                                                   new_upper_offset,
449                                                   bb,
450                                                   data->LowerCheck(),
451                                                   data->UpperCheck(),
452                                                   bb_data_list,
453                                                   data);
454      if (FLAG_trace_bce) {
455        base::OS::Print("Updated bounds check data for block #%d: [%d - %d]\n",
456                        bb->block_id(), new_lower_offset, new_upper_offset);
457      }
458      table_.Insert(key, bb_data_list, zone());
459    }
460  }
461
462  return bb_data_list;
463}
464
465
466void HBoundsCheckEliminationPhase::PostProcessBlock(
467    HBasicBlock* block, BoundsCheckBbData* data) {
468  while (data != NULL) {
469    if (data->FatherInDominatorTree()) {
470      table_.Insert(data->Key(), data->FatherInDominatorTree(), zone());
471    } else {
472      table_.Delete(data->Key());
473    }
474    data = data->NextInBasicBlock();
475  }
476}
477
478}  // namespace internal
479}  // namespace v8
480