1// Copyright 2009 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/global-handles.h"
6
7#include "src/api.h"
8#include "src/cancelable-task.h"
9#include "src/objects-inl.h"
10#include "src/v8.h"
11#include "src/vm-state-inl.h"
12
13namespace v8 {
14namespace internal {
15
16
17ObjectGroup::~ObjectGroup() {
18  if (info != NULL) info->Dispose();
19  delete[] objects;
20}
21
22
23ImplicitRefGroup::~ImplicitRefGroup() {
24  delete[] children;
25}
26
27
28class GlobalHandles::Node {
29 public:
30  // State transition diagram:
31  // FREE -> NORMAL <-> WEAK -> PENDING -> NEAR_DEATH -> { NORMAL, WEAK, FREE }
32  enum State {
33    FREE = 0,
34    NORMAL,      // Normal global handle.
35    WEAK,        // Flagged as weak but not yet finalized.
36    PENDING,     // Has been recognized as only reachable by weak handles.
37    NEAR_DEATH,  // Callback has informed the handle is near death.
38    NUMBER_OF_NODE_STATES
39  };
40
41  // Maps handle location (slot) to the containing node.
42  static Node* FromLocation(Object** location) {
43    DCHECK(offsetof(Node, object_) == 0);
44    return reinterpret_cast<Node*>(location);
45  }
46
47  Node() {
48    DCHECK(offsetof(Node, class_id_) == Internals::kNodeClassIdOffset);
49    DCHECK(offsetof(Node, flags_) == Internals::kNodeFlagsOffset);
50    STATIC_ASSERT(static_cast<int>(NodeState::kMask) ==
51                  Internals::kNodeStateMask);
52    STATIC_ASSERT(WEAK == Internals::kNodeStateIsWeakValue);
53    STATIC_ASSERT(PENDING == Internals::kNodeStateIsPendingValue);
54    STATIC_ASSERT(NEAR_DEATH == Internals::kNodeStateIsNearDeathValue);
55    STATIC_ASSERT(static_cast<int>(IsIndependent::kShift) ==
56                  Internals::kNodeIsIndependentShift);
57    STATIC_ASSERT(static_cast<int>(IsActive::kShift) ==
58                  Internals::kNodeIsActiveShift);
59  }
60
61#ifdef ENABLE_HANDLE_ZAPPING
62  ~Node() {
63    // TODO(1428): if it's a weak handle we should have invoked its callback.
64    // Zap the values for eager trapping.
65    object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
66    class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
67    index_ = 0;
68    set_independent(false);
69    set_active(false);
70    set_in_new_space_list(false);
71    parameter_or_next_free_.next_free = NULL;
72    weak_callback_ = NULL;
73  }
74#endif
75
76  void Initialize(int index, Node** first_free) {
77    object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
78    index_ = static_cast<uint8_t>(index);
79    DCHECK(static_cast<int>(index_) == index);
80    set_state(FREE);
81    set_in_new_space_list(false);
82    parameter_or_next_free_.next_free = *first_free;
83    *first_free = this;
84  }
85
86  void Acquire(Object* object) {
87    DCHECK(state() == FREE);
88    object_ = object;
89    class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
90    set_independent(false);
91    set_active(false);
92    set_state(NORMAL);
93    parameter_or_next_free_.parameter = NULL;
94    weak_callback_ = NULL;
95    IncreaseBlockUses();
96  }
97
98  void Zap() {
99    DCHECK(IsInUse());
100    // Zap the values for eager trapping.
101    object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
102  }
103
104  void Release() {
105    DCHECK(IsInUse());
106    set_state(FREE);
107    // Zap the values for eager trapping.
108    object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
109    class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
110    set_independent(false);
111    set_active(false);
112    weak_callback_ = NULL;
113    DecreaseBlockUses();
114  }
115
116  // Object slot accessors.
117  Object* object() const { return object_; }
118  Object** location() { return &object_; }
119  Handle<Object> handle() { return Handle<Object>(location()); }
120
121  // Wrapper class ID accessors.
122  bool has_wrapper_class_id() const {
123    return class_id_ != v8::HeapProfiler::kPersistentHandleNoClassId;
124  }
125
126  uint16_t wrapper_class_id() const { return class_id_; }
127
128  // State and flag accessors.
129
130  State state() const {
131    return NodeState::decode(flags_);
132  }
133  void set_state(State state) {
134    flags_ = NodeState::update(flags_, state);
135  }
136
137  bool is_independent() {
138    return IsIndependent::decode(flags_);
139  }
140  void set_independent(bool v) {
141    flags_ = IsIndependent::update(flags_, v);
142  }
143
144  bool is_active() {
145    return IsActive::decode(flags_);
146  }
147  void set_active(bool v) {
148    flags_ = IsActive::update(flags_, v);
149  }
150
151  bool is_in_new_space_list() {
152    return IsInNewSpaceList::decode(flags_);
153  }
154  void set_in_new_space_list(bool v) {
155    flags_ = IsInNewSpaceList::update(flags_, v);
156  }
157
158  WeaknessType weakness_type() const {
159    return NodeWeaknessType::decode(flags_);
160  }
161  void set_weakness_type(WeaknessType weakness_type) {
162    flags_ = NodeWeaknessType::update(flags_, weakness_type);
163  }
164
165  bool IsNearDeath() const {
166    // Check for PENDING to ensure correct answer when processing callbacks.
167    return state() == PENDING || state() == NEAR_DEATH;
168  }
169
170  bool IsWeak() const { return state() == WEAK; }
171
172  bool IsInUse() const { return state() != FREE; }
173
174  bool IsPendingPhantomCallback() const {
175    return state() == PENDING &&
176           (weakness_type() == PHANTOM_WEAK ||
177            weakness_type() == PHANTOM_WEAK_2_INTERNAL_FIELDS);
178  }
179
180  bool IsPendingPhantomResetHandle() const {
181    return state() == PENDING && weakness_type() == PHANTOM_WEAK_RESET_HANDLE;
182  }
183
184  bool IsRetainer() const {
185    return state() != FREE &&
186           !(state() == NEAR_DEATH && weakness_type() != FINALIZER_WEAK);
187  }
188
189  bool IsStrongRetainer() const { return state() == NORMAL; }
190
191  bool IsWeakRetainer() const {
192    return state() == WEAK || state() == PENDING ||
193           (state() == NEAR_DEATH && weakness_type() == FINALIZER_WEAK);
194  }
195
196  void MarkPending() {
197    DCHECK(state() == WEAK);
198    set_state(PENDING);
199  }
200
201  // Independent flag accessors.
202  void MarkIndependent() {
203    DCHECK(IsInUse());
204    set_independent(true);
205  }
206
207  // Callback accessor.
208  // TODO(svenpanne) Re-enable or nuke later.
209  // WeakReferenceCallback callback() { return callback_; }
210
211  // Callback parameter accessors.
212  void set_parameter(void* parameter) {
213    DCHECK(IsInUse());
214    parameter_or_next_free_.parameter = parameter;
215  }
216  void* parameter() const {
217    DCHECK(IsInUse());
218    return parameter_or_next_free_.parameter;
219  }
220
221  // Accessors for next free node in the free list.
222  Node* next_free() {
223    DCHECK(state() == FREE);
224    return parameter_or_next_free_.next_free;
225  }
226  void set_next_free(Node* value) {
227    DCHECK(state() == FREE);
228    parameter_or_next_free_.next_free = value;
229  }
230
231  void MakeWeak(void* parameter,
232                WeakCallbackInfo<void>::Callback phantom_callback,
233                v8::WeakCallbackType type) {
234    DCHECK(phantom_callback != nullptr);
235    DCHECK(IsInUse());
236    CHECK_NE(object_, reinterpret_cast<Object*>(kGlobalHandleZapValue));
237    set_state(WEAK);
238    switch (type) {
239      case v8::WeakCallbackType::kParameter:
240        set_weakness_type(PHANTOM_WEAK);
241        break;
242      case v8::WeakCallbackType::kInternalFields:
243        set_weakness_type(PHANTOM_WEAK_2_INTERNAL_FIELDS);
244        break;
245      case v8::WeakCallbackType::kFinalizer:
246        set_weakness_type(FINALIZER_WEAK);
247        break;
248    }
249    set_parameter(parameter);
250    weak_callback_ = phantom_callback;
251  }
252
253  void MakeWeak(Object*** location_addr) {
254    DCHECK(IsInUse());
255    CHECK_NE(object_, reinterpret_cast<Object*>(kGlobalHandleZapValue));
256    set_state(WEAK);
257    set_weakness_type(PHANTOM_WEAK_RESET_HANDLE);
258    set_parameter(location_addr);
259    weak_callback_ = nullptr;
260  }
261
262  void* ClearWeakness() {
263    DCHECK(IsInUse());
264    void* p = parameter();
265    set_state(NORMAL);
266    set_parameter(NULL);
267    return p;
268  }
269
270  void CollectPhantomCallbackData(
271      Isolate* isolate,
272      List<PendingPhantomCallback>* pending_phantom_callbacks) {
273    DCHECK(weakness_type() == PHANTOM_WEAK ||
274           weakness_type() == PHANTOM_WEAK_2_INTERNAL_FIELDS);
275    DCHECK(state() == PENDING);
276    DCHECK(weak_callback_ != nullptr);
277
278    void* internal_fields[v8::kInternalFieldsInWeakCallback] = {nullptr,
279                                                                nullptr};
280    if (weakness_type() != PHANTOM_WEAK && object()->IsJSObject()) {
281      auto jsobject = JSObject::cast(object());
282      int field_count = jsobject->GetInternalFieldCount();
283      for (int i = 0; i < v8::kInternalFieldsInWeakCallback; ++i) {
284        if (field_count == i) break;
285        auto field = jsobject->GetInternalField(i);
286        if (field->IsSmi()) internal_fields[i] = field;
287      }
288    }
289
290    // Zap with something dangerous.
291    *location() = reinterpret_cast<Object*>(0x6057ca11);
292
293    typedef v8::WeakCallbackInfo<void> Data;
294    auto callback = reinterpret_cast<Data::Callback>(weak_callback_);
295    pending_phantom_callbacks->Add(
296        PendingPhantomCallback(this, callback, parameter(), internal_fields));
297    DCHECK(IsInUse());
298    set_state(NEAR_DEATH);
299  }
300
301  void ResetPhantomHandle() {
302    DCHECK(weakness_type() == PHANTOM_WEAK_RESET_HANDLE);
303    DCHECK(state() == PENDING);
304    DCHECK(weak_callback_ == nullptr);
305    Object*** handle = reinterpret_cast<Object***>(parameter());
306    *handle = nullptr;
307    Release();
308  }
309
310  bool PostGarbageCollectionProcessing(Isolate* isolate) {
311    // Handles only weak handles (not phantom) that are dying.
312    if (state() != Node::PENDING) return false;
313    if (weak_callback_ == NULL) {
314      Release();
315      return false;
316    }
317    set_state(NEAR_DEATH);
318
319    // Check that we are not passing a finalized external string to
320    // the callback.
321    DCHECK(!object_->IsExternalOneByteString() ||
322           ExternalOneByteString::cast(object_)->resource() != NULL);
323    DCHECK(!object_->IsExternalTwoByteString() ||
324           ExternalTwoByteString::cast(object_)->resource() != NULL);
325    if (weakness_type() != FINALIZER_WEAK) {
326      return false;
327    }
328
329    // Leaving V8.
330    VMState<EXTERNAL> vmstate(isolate);
331    HandleScope handle_scope(isolate);
332    void* internal_fields[v8::kInternalFieldsInWeakCallback] = {nullptr,
333                                                                nullptr};
334    v8::WeakCallbackInfo<void> data(reinterpret_cast<v8::Isolate*>(isolate),
335                                    parameter(), internal_fields, nullptr);
336    weak_callback_(data);
337
338    // Absence of explicit cleanup or revival of weak handle
339    // in most of the cases would lead to memory leak.
340    CHECK(state() != NEAR_DEATH);
341    return true;
342  }
343
344  inline GlobalHandles* GetGlobalHandles();
345
346 private:
347  inline NodeBlock* FindBlock();
348  inline void IncreaseBlockUses();
349  inline void DecreaseBlockUses();
350
351  // Storage for object pointer.
352  // Placed first to avoid offset computation.
353  Object* object_;
354
355  // Next word stores class_id, index, state, and independent.
356  // Note: the most aligned fields should go first.
357
358  // Wrapper class ID.
359  uint16_t class_id_;
360
361  // Index in the containing handle block.
362  uint8_t index_;
363
364  // This stores three flags (independent, partially_dependent and
365  // in_new_space_list) and a State.
366  class NodeState : public BitField<State, 0, 3> {};
367  class IsIndependent : public BitField<bool, 3, 1> {};
368  // The following two fields are mutually exclusive
369  class IsActive : public BitField<bool, 4, 1> {};
370  class IsInNewSpaceList : public BitField<bool, 5, 1> {};
371  class NodeWeaknessType : public BitField<WeaknessType, 6, 2> {};
372
373  uint8_t flags_;
374
375  // Handle specific callback - might be a weak reference in disguise.
376  WeakCallbackInfo<void>::Callback weak_callback_;
377
378  // Provided data for callback.  In FREE state, this is used for
379  // the free list link.
380  union {
381    void* parameter;
382    Node* next_free;
383  } parameter_or_next_free_;
384
385  DISALLOW_COPY_AND_ASSIGN(Node);
386};
387
388
389class GlobalHandles::NodeBlock {
390 public:
391  static const int kSize = 256;
392
393  explicit NodeBlock(GlobalHandles* global_handles, NodeBlock* next)
394      : next_(next),
395        used_nodes_(0),
396        next_used_(NULL),
397        prev_used_(NULL),
398        global_handles_(global_handles) {}
399
400  void PutNodesOnFreeList(Node** first_free) {
401    for (int i = kSize - 1; i >= 0; --i) {
402      nodes_[i].Initialize(i, first_free);
403    }
404  }
405
406  Node* node_at(int index) {
407    DCHECK(0 <= index && index < kSize);
408    return &nodes_[index];
409  }
410
411  void IncreaseUses() {
412    DCHECK(used_nodes_ < kSize);
413    if (used_nodes_++ == 0) {
414      NodeBlock* old_first = global_handles_->first_used_block_;
415      global_handles_->first_used_block_ = this;
416      next_used_ = old_first;
417      prev_used_ = NULL;
418      if (old_first == NULL) return;
419      old_first->prev_used_ = this;
420    }
421  }
422
423  void DecreaseUses() {
424    DCHECK(used_nodes_ > 0);
425    if (--used_nodes_ == 0) {
426      if (next_used_ != NULL) next_used_->prev_used_ = prev_used_;
427      if (prev_used_ != NULL) prev_used_->next_used_ = next_used_;
428      if (this == global_handles_->first_used_block_) {
429        global_handles_->first_used_block_ = next_used_;
430      }
431    }
432  }
433
434  GlobalHandles* global_handles() { return global_handles_; }
435
436  // Next block in the list of all blocks.
437  NodeBlock* next() const { return next_; }
438
439  // Next/previous block in the list of blocks with used nodes.
440  NodeBlock* next_used() const { return next_used_; }
441  NodeBlock* prev_used() const { return prev_used_; }
442
443 private:
444  Node nodes_[kSize];
445  NodeBlock* const next_;
446  int used_nodes_;
447  NodeBlock* next_used_;
448  NodeBlock* prev_used_;
449  GlobalHandles* global_handles_;
450};
451
452
453GlobalHandles* GlobalHandles::Node::GetGlobalHandles() {
454  return FindBlock()->global_handles();
455}
456
457
458GlobalHandles::NodeBlock* GlobalHandles::Node::FindBlock() {
459  intptr_t ptr = reinterpret_cast<intptr_t>(this);
460  ptr = ptr - index_ * sizeof(Node);
461  NodeBlock* block = reinterpret_cast<NodeBlock*>(ptr);
462  DCHECK(block->node_at(index_) == this);
463  return block;
464}
465
466
467void GlobalHandles::Node::IncreaseBlockUses() {
468  NodeBlock* node_block = FindBlock();
469  node_block->IncreaseUses();
470  GlobalHandles* global_handles = node_block->global_handles();
471  global_handles->isolate()->counters()->global_handles()->Increment();
472  global_handles->number_of_global_handles_++;
473}
474
475
476void GlobalHandles::Node::DecreaseBlockUses() {
477  NodeBlock* node_block = FindBlock();
478  GlobalHandles* global_handles = node_block->global_handles();
479  parameter_or_next_free_.next_free = global_handles->first_free_;
480  global_handles->first_free_ = this;
481  node_block->DecreaseUses();
482  global_handles->isolate()->counters()->global_handles()->Decrement();
483  global_handles->number_of_global_handles_--;
484}
485
486
487class GlobalHandles::NodeIterator {
488 public:
489  explicit NodeIterator(GlobalHandles* global_handles)
490      : block_(global_handles->first_used_block_),
491        index_(0) {}
492
493  bool done() const { return block_ == NULL; }
494
495  Node* node() const {
496    DCHECK(!done());
497    return block_->node_at(index_);
498  }
499
500  void Advance() {
501    DCHECK(!done());
502    if (++index_ < NodeBlock::kSize) return;
503    index_ = 0;
504    block_ = block_->next_used();
505  }
506
507 private:
508  NodeBlock* block_;
509  int index_;
510
511  DISALLOW_COPY_AND_ASSIGN(NodeIterator);
512};
513
514class GlobalHandles::PendingPhantomCallbacksSecondPassTask
515    : public v8::internal::CancelableTask {
516 public:
517  // Takes ownership of the contents of pending_phantom_callbacks, leaving it in
518  // the same state it would be after a call to Clear().
519  PendingPhantomCallbacksSecondPassTask(
520      List<PendingPhantomCallback>* pending_phantom_callbacks, Isolate* isolate)
521      : CancelableTask(isolate) {
522    pending_phantom_callbacks_.Swap(pending_phantom_callbacks);
523  }
524
525  void RunInternal() override {
526    TRACE_EVENT0("v8", "V8.GCPhantomHandleProcessingCallback");
527    isolate()->heap()->CallGCPrologueCallbacks(
528        GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags);
529    InvokeSecondPassPhantomCallbacks(&pending_phantom_callbacks_, isolate());
530    isolate()->heap()->CallGCEpilogueCallbacks(
531        GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags);
532  }
533
534 private:
535  List<PendingPhantomCallback> pending_phantom_callbacks_;
536
537  DISALLOW_COPY_AND_ASSIGN(PendingPhantomCallbacksSecondPassTask);
538};
539
540GlobalHandles::GlobalHandles(Isolate* isolate)
541    : isolate_(isolate),
542      number_of_global_handles_(0),
543      first_block_(NULL),
544      first_used_block_(NULL),
545      first_free_(NULL),
546      post_gc_processing_count_(0),
547      number_of_phantom_handle_resets_(0),
548      object_group_connections_(kObjectGroupConnectionsCapacity) {}
549
550GlobalHandles::~GlobalHandles() {
551  NodeBlock* block = first_block_;
552  while (block != NULL) {
553    NodeBlock* tmp = block->next();
554    delete block;
555    block = tmp;
556  }
557  first_block_ = NULL;
558}
559
560
561Handle<Object> GlobalHandles::Create(Object* value) {
562  if (first_free_ == NULL) {
563    first_block_ = new NodeBlock(this, first_block_);
564    first_block_->PutNodesOnFreeList(&first_free_);
565  }
566  DCHECK(first_free_ != NULL);
567  // Take the first node in the free list.
568  Node* result = first_free_;
569  first_free_ = result->next_free();
570  result->Acquire(value);
571  if (isolate_->heap()->InNewSpace(value) &&
572      !result->is_in_new_space_list()) {
573    new_space_nodes_.Add(result);
574    result->set_in_new_space_list(true);
575  }
576  return result->handle();
577}
578
579
580Handle<Object> GlobalHandles::CopyGlobal(Object** location) {
581  DCHECK(location != NULL);
582  return Node::FromLocation(location)->GetGlobalHandles()->Create(*location);
583}
584
585
586void GlobalHandles::Destroy(Object** location) {
587  if (location != NULL) Node::FromLocation(location)->Release();
588}
589
590
591typedef v8::WeakCallbackInfo<void>::Callback GenericCallback;
592
593
594void GlobalHandles::MakeWeak(Object** location, void* parameter,
595                             GenericCallback phantom_callback,
596                             v8::WeakCallbackType type) {
597  Node::FromLocation(location)->MakeWeak(parameter, phantom_callback, type);
598}
599
600void GlobalHandles::MakeWeak(Object*** location_addr) {
601  Node::FromLocation(*location_addr)->MakeWeak(location_addr);
602}
603
604void* GlobalHandles::ClearWeakness(Object** location) {
605  return Node::FromLocation(location)->ClearWeakness();
606}
607
608
609void GlobalHandles::MarkIndependent(Object** location) {
610  Node::FromLocation(location)->MarkIndependent();
611}
612
613bool GlobalHandles::IsIndependent(Object** location) {
614  return Node::FromLocation(location)->is_independent();
615}
616
617
618bool GlobalHandles::IsNearDeath(Object** location) {
619  return Node::FromLocation(location)->IsNearDeath();
620}
621
622
623bool GlobalHandles::IsWeak(Object** location) {
624  return Node::FromLocation(location)->IsWeak();
625}
626
627DISABLE_CFI_PERF
628void GlobalHandles::IterateWeakRoots(ObjectVisitor* v) {
629  for (NodeIterator it(this); !it.done(); it.Advance()) {
630    Node* node = it.node();
631    if (node->IsWeakRetainer()) {
632      // Pending weak phantom handles die immediately. Everything else survives.
633      if (node->IsPendingPhantomResetHandle()) {
634        node->ResetPhantomHandle();
635        ++number_of_phantom_handle_resets_;
636      } else if (node->IsPendingPhantomCallback()) {
637        node->CollectPhantomCallbackData(isolate(),
638                                         &pending_phantom_callbacks_);
639      } else {
640        v->VisitPointer(node->location());
641      }
642    }
643  }
644}
645
646
647void GlobalHandles::IdentifyWeakHandles(WeakSlotCallback f) {
648  for (NodeIterator it(this); !it.done(); it.Advance()) {
649    if (it.node()->IsWeak() && f(it.node()->location())) {
650      it.node()->MarkPending();
651    }
652  }
653}
654
655
656void GlobalHandles::IterateNewSpaceStrongAndDependentRoots(ObjectVisitor* v) {
657  for (int i = 0; i < new_space_nodes_.length(); ++i) {
658    Node* node = new_space_nodes_[i];
659    if (node->IsStrongRetainer() ||
660        (node->IsWeakRetainer() && !node->is_independent() &&
661         node->is_active())) {
662      v->VisitPointer(node->location());
663    }
664  }
665}
666
667
668void GlobalHandles::IdentifyNewSpaceWeakIndependentHandles(
669    WeakSlotCallbackWithHeap f) {
670  for (int i = 0; i < new_space_nodes_.length(); ++i) {
671    Node* node = new_space_nodes_[i];
672    DCHECK(node->is_in_new_space_list());
673    if (node->is_independent() && node->IsWeak() &&
674        f(isolate_->heap(), node->location())) {
675      node->MarkPending();
676    }
677  }
678}
679
680
681void GlobalHandles::IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v) {
682  for (int i = 0; i < new_space_nodes_.length(); ++i) {
683    Node* node = new_space_nodes_[i];
684    DCHECK(node->is_in_new_space_list());
685    if (node->is_independent() && node->IsWeakRetainer()) {
686      // Pending weak phantom handles die immediately. Everything else survives.
687      if (node->IsPendingPhantomResetHandle()) {
688        node->ResetPhantomHandle();
689        ++number_of_phantom_handle_resets_;
690      } else if (node->IsPendingPhantomCallback()) {
691        node->CollectPhantomCallbackData(isolate(),
692                                         &pending_phantom_callbacks_);
693      } else {
694        v->VisitPointer(node->location());
695      }
696    }
697  }
698}
699
700
701void GlobalHandles::IdentifyWeakUnmodifiedObjects(
702    WeakSlotCallback is_unmodified) {
703  for (int i = 0; i < new_space_nodes_.length(); ++i) {
704    Node* node = new_space_nodes_[i];
705    if (node->IsWeak() && !is_unmodified(node->location())) {
706      node->set_active(true);
707    }
708  }
709}
710
711
712void GlobalHandles::MarkNewSpaceWeakUnmodifiedObjectsPending(
713    WeakSlotCallbackWithHeap is_unscavenged) {
714  for (int i = 0; i < new_space_nodes_.length(); ++i) {
715    Node* node = new_space_nodes_[i];
716    DCHECK(node->is_in_new_space_list());
717    if ((node->is_independent() || !node->is_active()) && node->IsWeak() &&
718        is_unscavenged(isolate_->heap(), node->location())) {
719      node->MarkPending();
720    }
721  }
722}
723
724template <GlobalHandles::IterationMode mode>
725void GlobalHandles::IterateNewSpaceWeakUnmodifiedRoots(ObjectVisitor* v) {
726  for (int i = 0; i < new_space_nodes_.length(); ++i) {
727    Node* node = new_space_nodes_[i];
728    DCHECK(node->is_in_new_space_list());
729    if ((node->is_independent() || !node->is_active()) &&
730        node->IsWeakRetainer()) {
731      // Pending weak phantom handles die immediately. Everything else survives.
732      if (node->IsPendingPhantomResetHandle()) {
733        if (mode == IterationMode::HANDLE_PHANTOM_NODES ||
734            mode == IterationMode::HANDLE_PHANTOM_NODES_VISIT_OTHERS) {
735          node->ResetPhantomHandle();
736          ++number_of_phantom_handle_resets_;
737        }
738      } else if (node->IsPendingPhantomCallback()) {
739        if (mode == IterationMode::HANDLE_PHANTOM_NODES ||
740            mode == IterationMode::HANDLE_PHANTOM_NODES_VISIT_OTHERS) {
741          node->CollectPhantomCallbackData(isolate(),
742                                           &pending_phantom_callbacks_);
743        }
744      } else {
745        if (mode == IterationMode::VISIT_OTHERS ||
746            mode == IterationMode::HANDLE_PHANTOM_NODES_VISIT_OTHERS) {
747          v->VisitPointer(node->location());
748        }
749      }
750    }
751  }
752}
753
754template void GlobalHandles::IterateNewSpaceWeakUnmodifiedRoots<
755    GlobalHandles::HANDLE_PHANTOM_NODES>(ObjectVisitor* v);
756
757template void GlobalHandles::IterateNewSpaceWeakUnmodifiedRoots<
758    GlobalHandles::VISIT_OTHERS>(ObjectVisitor* v);
759
760template void GlobalHandles::IterateNewSpaceWeakUnmodifiedRoots<
761    GlobalHandles::HANDLE_PHANTOM_NODES_VISIT_OTHERS>(ObjectVisitor* v);
762
763DISABLE_CFI_PERF
764bool GlobalHandles::IterateObjectGroups(ObjectVisitor* v,
765                                        WeakSlotCallbackWithHeap can_skip) {
766  ComputeObjectGroupsAndImplicitReferences();
767  int last = 0;
768  bool any_group_was_visited = false;
769  for (int i = 0; i < object_groups_.length(); i++) {
770    ObjectGroup* entry = object_groups_.at(i);
771    DCHECK(entry != NULL);
772
773    Object*** objects = entry->objects;
774    bool group_should_be_visited = false;
775    for (size_t j = 0; j < entry->length; j++) {
776      Object* object = *objects[j];
777      if (object->IsHeapObject()) {
778        if (!can_skip(isolate_->heap(), &object)) {
779          group_should_be_visited = true;
780          break;
781        }
782      }
783    }
784
785    if (!group_should_be_visited) {
786      object_groups_[last++] = entry;
787      continue;
788    }
789
790    // An object in the group requires visiting, so iterate over all
791    // objects in the group.
792    for (size_t j = 0; j < entry->length; ++j) {
793      Object* object = *objects[j];
794      if (object->IsHeapObject()) {
795        v->VisitPointer(&object);
796        any_group_was_visited = true;
797      }
798    }
799
800    // Once the entire group has been iterated over, set the object
801    // group to NULL so it won't be processed again.
802    delete entry;
803    object_groups_.at(i) = NULL;
804  }
805  object_groups_.Rewind(last);
806  return any_group_was_visited;
807}
808
809namespace {
810// Traces the information about object groups and implicit ref groups given by
811// the embedder to the V8 during each gc prologue.
812class ObjectGroupsTracer {
813 public:
814  explicit ObjectGroupsTracer(Isolate* isolate);
815  void Print();
816
817 private:
818  void PrintObjectGroup(ObjectGroup* group);
819  void PrintImplicitRefGroup(ImplicitRefGroup* group);
820  void PrintObject(Object* object);
821  void PrintConstructor(JSObject* js_object);
822  void PrintInternalFields(JSObject* js_object);
823  Isolate* isolate_;
824  DISALLOW_COPY_AND_ASSIGN(ObjectGroupsTracer);
825};
826
827ObjectGroupsTracer::ObjectGroupsTracer(Isolate* isolate) : isolate_(isolate) {}
828
829void ObjectGroupsTracer::Print() {
830  GlobalHandles* global_handles = isolate_->global_handles();
831
832  PrintIsolate(isolate_, "### Tracing object groups:\n");
833
834  for (auto group : *(global_handles->object_groups())) {
835    PrintObjectGroup(group);
836  }
837  for (auto group : *(global_handles->implicit_ref_groups())) {
838    PrintImplicitRefGroup(group);
839  }
840
841  PrintIsolate(isolate_, "### Tracing object groups finished.\n");
842}
843
844void ObjectGroupsTracer::PrintObject(Object* object) {
845  if (object->IsJSObject()) {
846    JSObject* js_object = JSObject::cast(object);
847
848    PrintF("{ constructor_name: ");
849    PrintConstructor(js_object);
850    PrintF(", hidden_fields: [ ");
851    PrintInternalFields(js_object);
852    PrintF(" ] }\n");
853  } else {
854    PrintF("object of unexpected type: %p\n", static_cast<void*>(object));
855  }
856}
857
858void ObjectGroupsTracer::PrintConstructor(JSObject* js_object) {
859  Object* maybe_constructor = js_object->map()->GetConstructor();
860  if (maybe_constructor->IsJSFunction()) {
861    JSFunction* constructor = JSFunction::cast(maybe_constructor);
862    String* name = String::cast(constructor->shared()->name());
863    if (name->length() == 0) name = constructor->shared()->inferred_name();
864
865    PrintF("%s", name->ToCString().get());
866  } else if (maybe_constructor->IsNull(isolate_)) {
867    if (js_object->IsOddball()) {
868      PrintF("<oddball>");
869    } else {
870      PrintF("<null>");
871    }
872  } else {
873    UNREACHABLE();
874  }
875}
876
877void ObjectGroupsTracer::PrintInternalFields(JSObject* js_object) {
878  for (int i = 0; i < js_object->GetInternalFieldCount(); ++i) {
879    if (i != 0) {
880      PrintF(", ");
881    }
882    PrintF("%p", static_cast<void*>(js_object->GetInternalField(i)));
883  }
884}
885
886void ObjectGroupsTracer::PrintObjectGroup(ObjectGroup* group) {
887  PrintIsolate(isolate_, "ObjectGroup (size: %" PRIuS ")\n", group->length);
888  Object*** objects = group->objects;
889
890  for (size_t i = 0; i < group->length; ++i) {
891    PrintIsolate(isolate_, "  - Member: ");
892    PrintObject(*objects[i]);
893  }
894}
895
896void ObjectGroupsTracer::PrintImplicitRefGroup(ImplicitRefGroup* group) {
897  PrintIsolate(isolate_, "ImplicitRefGroup (children count: %" PRIuS ")\n",
898               group->length);
899  PrintIsolate(isolate_, "  - Parent: ");
900  PrintObject(*(group->parent));
901
902  Object*** children = group->children;
903  for (size_t i = 0; i < group->length; ++i) {
904    PrintIsolate(isolate_, "  - Child: ");
905    PrintObject(*children[i]);
906  }
907}
908
909}  // namespace
910
911void GlobalHandles::PrintObjectGroups() {
912  ObjectGroupsTracer(isolate_).Print();
913}
914
915void GlobalHandles::InvokeSecondPassPhantomCallbacks(
916    List<PendingPhantomCallback>* callbacks, Isolate* isolate) {
917  while (callbacks->length() != 0) {
918    auto callback = callbacks->RemoveLast();
919    DCHECK(callback.node() == nullptr);
920    // Fire second pass callback
921    callback.Invoke(isolate);
922  }
923}
924
925
926int GlobalHandles::PostScavengeProcessing(
927    const int initial_post_gc_processing_count) {
928  int freed_nodes = 0;
929  for (int i = 0; i < new_space_nodes_.length(); ++i) {
930    Node* node = new_space_nodes_[i];
931    DCHECK(node->is_in_new_space_list());
932    if (!node->IsRetainer()) {
933      // Free nodes do not have weak callbacks. Do not use them to compute
934      // the freed_nodes.
935      continue;
936    }
937    // Skip dependent or unmodified handles. Their weak callbacks might expect
938    // to be
939    // called between two global garbage collection callbacks which
940    // are not called for minor collections.
941      if (!node->is_independent() && (node->is_active())) {
942        node->set_active(false);
943        continue;
944      }
945      node->set_active(false);
946
947    if (node->PostGarbageCollectionProcessing(isolate_)) {
948      if (initial_post_gc_processing_count != post_gc_processing_count_) {
949        // Weak callback triggered another GC and another round of
950        // PostGarbageCollection processing.  The current node might
951        // have been deleted in that round, so we need to bail out (or
952        // restart the processing).
953        return freed_nodes;
954      }
955    }
956    if (!node->IsRetainer()) {
957      freed_nodes++;
958    }
959  }
960  return freed_nodes;
961}
962
963
964int GlobalHandles::PostMarkSweepProcessing(
965    const int initial_post_gc_processing_count) {
966  int freed_nodes = 0;
967  for (NodeIterator it(this); !it.done(); it.Advance()) {
968    if (!it.node()->IsRetainer()) {
969      // Free nodes do not have weak callbacks. Do not use them to compute
970      // the freed_nodes.
971      continue;
972    }
973    it.node()->set_active(false);
974    if (it.node()->PostGarbageCollectionProcessing(isolate_)) {
975      if (initial_post_gc_processing_count != post_gc_processing_count_) {
976        // See the comment above.
977        return freed_nodes;
978      }
979    }
980    if (!it.node()->IsRetainer()) {
981      freed_nodes++;
982    }
983  }
984  return freed_nodes;
985}
986
987
988void GlobalHandles::UpdateListOfNewSpaceNodes() {
989  int last = 0;
990  for (int i = 0; i < new_space_nodes_.length(); ++i) {
991    Node* node = new_space_nodes_[i];
992    DCHECK(node->is_in_new_space_list());
993    if (node->IsRetainer()) {
994      if (isolate_->heap()->InNewSpace(node->object())) {
995        new_space_nodes_[last++] = node;
996        isolate_->heap()->IncrementNodesCopiedInNewSpace();
997      } else {
998        node->set_in_new_space_list(false);
999        isolate_->heap()->IncrementNodesPromoted();
1000      }
1001    } else {
1002      node->set_in_new_space_list(false);
1003      isolate_->heap()->IncrementNodesDiedInNewSpace();
1004    }
1005  }
1006  new_space_nodes_.Rewind(last);
1007  new_space_nodes_.Trim();
1008}
1009
1010
1011int GlobalHandles::DispatchPendingPhantomCallbacks(
1012    bool synchronous_second_pass) {
1013  int freed_nodes = 0;
1014  List<PendingPhantomCallback> second_pass_callbacks;
1015  {
1016    // The initial pass callbacks must simply clear the nodes.
1017    for (auto i = pending_phantom_callbacks_.begin();
1018         i != pending_phantom_callbacks_.end(); ++i) {
1019      auto callback = i;
1020      // Skip callbacks that have already been processed once.
1021      if (callback->node() == nullptr) continue;
1022      callback->Invoke(isolate());
1023      if (callback->callback()) second_pass_callbacks.Add(*callback);
1024      freed_nodes++;
1025    }
1026  }
1027  pending_phantom_callbacks_.Clear();
1028  if (second_pass_callbacks.length() > 0) {
1029    if (FLAG_optimize_for_size || FLAG_predictable || synchronous_second_pass) {
1030      isolate()->heap()->CallGCPrologueCallbacks(
1031          GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags);
1032      InvokeSecondPassPhantomCallbacks(&second_pass_callbacks, isolate());
1033      isolate()->heap()->CallGCEpilogueCallbacks(
1034          GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags);
1035    } else {
1036      auto task = new PendingPhantomCallbacksSecondPassTask(
1037          &second_pass_callbacks, isolate());
1038      V8::GetCurrentPlatform()->CallOnForegroundThread(
1039          reinterpret_cast<v8::Isolate*>(isolate()), task);
1040    }
1041  }
1042  return freed_nodes;
1043}
1044
1045
1046void GlobalHandles::PendingPhantomCallback::Invoke(Isolate* isolate) {
1047  Data::Callback* callback_addr = nullptr;
1048  if (node_ != nullptr) {
1049    // Initialize for first pass callback.
1050    DCHECK(node_->state() == Node::NEAR_DEATH);
1051    callback_addr = &callback_;
1052  }
1053  Data data(reinterpret_cast<v8::Isolate*>(isolate), parameter_,
1054            internal_fields_, callback_addr);
1055  Data::Callback callback = callback_;
1056  callback_ = nullptr;
1057  callback(data);
1058  if (node_ != nullptr) {
1059    // Transition to second pass state.
1060    DCHECK(node_->state() == Node::FREE);
1061    node_ = nullptr;
1062  }
1063}
1064
1065
1066int GlobalHandles::PostGarbageCollectionProcessing(
1067    GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) {
1068  // Process weak global handle callbacks. This must be done after the
1069  // GC is completely done, because the callbacks may invoke arbitrary
1070  // API functions.
1071  DCHECK(isolate_->heap()->gc_state() == Heap::NOT_IN_GC);
1072  const int initial_post_gc_processing_count = ++post_gc_processing_count_;
1073  int freed_nodes = 0;
1074  bool synchronous_second_pass =
1075      (gc_callback_flags &
1076       (kGCCallbackFlagForced | kGCCallbackFlagCollectAllAvailableGarbage |
1077        kGCCallbackFlagSynchronousPhantomCallbackProcessing)) != 0;
1078  freed_nodes += DispatchPendingPhantomCallbacks(synchronous_second_pass);
1079  if (initial_post_gc_processing_count != post_gc_processing_count_) {
1080    // If the callbacks caused a nested GC, then return.  See comment in
1081    // PostScavengeProcessing.
1082    return freed_nodes;
1083  }
1084  if (Heap::IsYoungGenerationCollector(collector)) {
1085    freed_nodes += PostScavengeProcessing(initial_post_gc_processing_count);
1086  } else {
1087    freed_nodes += PostMarkSweepProcessing(initial_post_gc_processing_count);
1088  }
1089  if (initial_post_gc_processing_count != post_gc_processing_count_) {
1090    // If the callbacks caused a nested GC, then return.  See comment in
1091    // PostScavengeProcessing.
1092    return freed_nodes;
1093  }
1094  if (initial_post_gc_processing_count == post_gc_processing_count_) {
1095    UpdateListOfNewSpaceNodes();
1096  }
1097  return freed_nodes;
1098}
1099
1100
1101void GlobalHandles::IterateStrongRoots(ObjectVisitor* v) {
1102  for (NodeIterator it(this); !it.done(); it.Advance()) {
1103    if (it.node()->IsStrongRetainer()) {
1104      v->VisitPointer(it.node()->location());
1105    }
1106  }
1107}
1108
1109
1110DISABLE_CFI_PERF
1111void GlobalHandles::IterateAllRoots(ObjectVisitor* v) {
1112  for (NodeIterator it(this); !it.done(); it.Advance()) {
1113    if (it.node()->IsRetainer()) {
1114      v->VisitPointer(it.node()->location());
1115    }
1116  }
1117}
1118
1119
1120DISABLE_CFI_PERF
1121void GlobalHandles::IterateAllRootsWithClassIds(ObjectVisitor* v) {
1122  for (NodeIterator it(this); !it.done(); it.Advance()) {
1123    if (it.node()->IsRetainer() && it.node()->has_wrapper_class_id()) {
1124      v->VisitEmbedderReference(it.node()->location(),
1125                                it.node()->wrapper_class_id());
1126    }
1127  }
1128}
1129
1130
1131DISABLE_CFI_PERF
1132void GlobalHandles::IterateAllRootsInNewSpaceWithClassIds(ObjectVisitor* v) {
1133  for (int i = 0; i < new_space_nodes_.length(); ++i) {
1134    Node* node = new_space_nodes_[i];
1135    if (node->IsRetainer() && node->has_wrapper_class_id()) {
1136      v->VisitEmbedderReference(node->location(),
1137                                node->wrapper_class_id());
1138    }
1139  }
1140}
1141
1142
1143DISABLE_CFI_PERF
1144void GlobalHandles::IterateWeakRootsInNewSpaceWithClassIds(ObjectVisitor* v) {
1145  for (int i = 0; i < new_space_nodes_.length(); ++i) {
1146    Node* node = new_space_nodes_[i];
1147    if (node->has_wrapper_class_id() && node->IsWeak()) {
1148      v->VisitEmbedderReference(node->location(), node->wrapper_class_id());
1149    }
1150  }
1151}
1152
1153
1154int GlobalHandles::NumberOfWeakHandles() {
1155  int count = 0;
1156  for (NodeIterator it(this); !it.done(); it.Advance()) {
1157    if (it.node()->IsWeakRetainer()) {
1158      count++;
1159    }
1160  }
1161  return count;
1162}
1163
1164
1165int GlobalHandles::NumberOfGlobalObjectWeakHandles() {
1166  int count = 0;
1167  for (NodeIterator it(this); !it.done(); it.Advance()) {
1168    if (it.node()->IsWeakRetainer() &&
1169        it.node()->object()->IsJSGlobalObject()) {
1170      count++;
1171    }
1172  }
1173  return count;
1174}
1175
1176
1177void GlobalHandles::RecordStats(HeapStats* stats) {
1178  *stats->global_handle_count = 0;
1179  *stats->weak_global_handle_count = 0;
1180  *stats->pending_global_handle_count = 0;
1181  *stats->near_death_global_handle_count = 0;
1182  *stats->free_global_handle_count = 0;
1183  for (NodeIterator it(this); !it.done(); it.Advance()) {
1184    *stats->global_handle_count += 1;
1185    if (it.node()->state() == Node::WEAK) {
1186      *stats->weak_global_handle_count += 1;
1187    } else if (it.node()->state() == Node::PENDING) {
1188      *stats->pending_global_handle_count += 1;
1189    } else if (it.node()->state() == Node::NEAR_DEATH) {
1190      *stats->near_death_global_handle_count += 1;
1191    } else if (it.node()->state() == Node::FREE) {
1192      *stats->free_global_handle_count += 1;
1193    }
1194  }
1195}
1196
1197#ifdef DEBUG
1198
1199void GlobalHandles::PrintStats() {
1200  int total = 0;
1201  int weak = 0;
1202  int pending = 0;
1203  int near_death = 0;
1204  int destroyed = 0;
1205
1206  for (NodeIterator it(this); !it.done(); it.Advance()) {
1207    total++;
1208    if (it.node()->state() == Node::WEAK) weak++;
1209    if (it.node()->state() == Node::PENDING) pending++;
1210    if (it.node()->state() == Node::NEAR_DEATH) near_death++;
1211    if (it.node()->state() == Node::FREE) destroyed++;
1212  }
1213
1214  PrintF("Global Handle Statistics:\n");
1215  PrintF("  allocated memory = %" PRIuS "B\n", total * sizeof(Node));
1216  PrintF("  # weak       = %d\n", weak);
1217  PrintF("  # pending    = %d\n", pending);
1218  PrintF("  # near_death = %d\n", near_death);
1219  PrintF("  # free       = %d\n", destroyed);
1220  PrintF("  # total      = %d\n", total);
1221}
1222
1223
1224void GlobalHandles::Print() {
1225  PrintF("Global handles:\n");
1226  for (NodeIterator it(this); !it.done(); it.Advance()) {
1227    PrintF("  handle %p to %p%s\n",
1228           reinterpret_cast<void*>(it.node()->location()),
1229           reinterpret_cast<void*>(it.node()->object()),
1230           it.node()->IsWeak() ? " (weak)" : "");
1231  }
1232}
1233
1234#endif
1235
1236
1237
1238void GlobalHandles::AddObjectGroup(Object*** handles,
1239                                   size_t length,
1240                                   v8::RetainedObjectInfo* info) {
1241#ifdef DEBUG
1242  for (size_t i = 0; i < length; ++i) {
1243    DCHECK(!Node::FromLocation(handles[i])->is_independent());
1244  }
1245#endif
1246  if (length == 0) {
1247    if (info != NULL) info->Dispose();
1248    return;
1249  }
1250  ObjectGroup* group = new ObjectGroup(length);
1251  for (size_t i = 0; i < length; ++i)
1252    group->objects[i] = handles[i];
1253  group->info = info;
1254  object_groups_.Add(group);
1255}
1256
1257
1258void GlobalHandles::SetObjectGroupId(Object** handle,
1259                                     UniqueId id) {
1260  object_group_connections_.Add(ObjectGroupConnection(id, handle));
1261}
1262
1263
1264void GlobalHandles::SetRetainedObjectInfo(UniqueId id,
1265                                          RetainedObjectInfo* info) {
1266  retainer_infos_.Add(ObjectGroupRetainerInfo(id, info));
1267}
1268
1269
1270void GlobalHandles::SetReferenceFromGroup(UniqueId id, Object** child) {
1271  DCHECK(!Node::FromLocation(child)->is_independent());
1272  implicit_ref_connections_.Add(ObjectGroupConnection(id, child));
1273}
1274
1275
1276void GlobalHandles::SetReference(HeapObject** parent, Object** child) {
1277  DCHECK(!Node::FromLocation(child)->is_independent());
1278  ImplicitRefGroup* group = new ImplicitRefGroup(parent, 1);
1279  group->children[0] = child;
1280  implicit_ref_groups_.Add(group);
1281}
1282
1283
1284void GlobalHandles::RemoveObjectGroups() {
1285  for (int i = 0; i < object_groups_.length(); i++)
1286    delete object_groups_.at(i);
1287  object_groups_.Clear();
1288  for (int i = 0; i < retainer_infos_.length(); ++i)
1289    retainer_infos_[i].info->Dispose();
1290  retainer_infos_.Clear();
1291  object_group_connections_.Clear();
1292  object_group_connections_.Initialize(kObjectGroupConnectionsCapacity);
1293}
1294
1295
1296void GlobalHandles::RemoveImplicitRefGroups() {
1297  for (int i = 0; i < implicit_ref_groups_.length(); i++) {
1298    delete implicit_ref_groups_.at(i);
1299  }
1300  implicit_ref_groups_.Clear();
1301  implicit_ref_connections_.Clear();
1302}
1303
1304
1305void GlobalHandles::TearDown() {
1306  // TODO(1428): invoke weak callbacks.
1307}
1308
1309
1310void GlobalHandles::ComputeObjectGroupsAndImplicitReferences() {
1311  if (object_group_connections_.length() == 0) {
1312    for (int i = 0; i < retainer_infos_.length(); ++i)
1313      retainer_infos_[i].info->Dispose();
1314    retainer_infos_.Clear();
1315    implicit_ref_connections_.Clear();
1316    return;
1317  }
1318
1319  object_group_connections_.Sort();
1320  retainer_infos_.Sort();
1321  implicit_ref_connections_.Sort();
1322
1323  int info_index = 0;  // For iterating retainer_infos_.
1324  UniqueId current_group_id(0);
1325  int current_group_start = 0;
1326
1327  int current_implicit_refs_start = 0;
1328  int current_implicit_refs_end = 0;
1329  for (int i = 0; i <= object_group_connections_.length(); ++i) {
1330    if (i == 0)
1331      current_group_id = object_group_connections_[i].id;
1332    if (i == object_group_connections_.length() ||
1333        current_group_id != object_group_connections_[i].id) {
1334      // Group detected: objects in indices [current_group_start, i[.
1335
1336      // Find out which implicit references are related to this group. (We want
1337      // to ignore object groups which only have 1 object, but that object is
1338      // needed as a representative object for the implicit refrerence group.)
1339      while (current_implicit_refs_start < implicit_ref_connections_.length() &&
1340             implicit_ref_connections_[current_implicit_refs_start].id <
1341                 current_group_id)
1342        ++current_implicit_refs_start;
1343      current_implicit_refs_end = current_implicit_refs_start;
1344      while (current_implicit_refs_end < implicit_ref_connections_.length() &&
1345             implicit_ref_connections_[current_implicit_refs_end].id ==
1346                 current_group_id)
1347        ++current_implicit_refs_end;
1348
1349      if (current_implicit_refs_end > current_implicit_refs_start) {
1350        // Find a representative object for the implicit references.
1351        HeapObject** representative = NULL;
1352        for (int j = current_group_start; j < i; ++j) {
1353          Object** object = object_group_connections_[j].object;
1354          if ((*object)->IsHeapObject()) {
1355            representative = reinterpret_cast<HeapObject**>(object);
1356            break;
1357          }
1358        }
1359        if (representative) {
1360          ImplicitRefGroup* group = new ImplicitRefGroup(
1361              representative,
1362              current_implicit_refs_end - current_implicit_refs_start);
1363          for (int j = current_implicit_refs_start;
1364               j < current_implicit_refs_end;
1365               ++j) {
1366            group->children[j - current_implicit_refs_start] =
1367                implicit_ref_connections_[j].object;
1368          }
1369          implicit_ref_groups_.Add(group);
1370        }
1371        current_implicit_refs_start = current_implicit_refs_end;
1372      }
1373
1374      // Find a RetainedObjectInfo for the group.
1375      RetainedObjectInfo* info = NULL;
1376      while (info_index < retainer_infos_.length() &&
1377             retainer_infos_[info_index].id < current_group_id) {
1378        retainer_infos_[info_index].info->Dispose();
1379        ++info_index;
1380      }
1381      if (info_index < retainer_infos_.length() &&
1382          retainer_infos_[info_index].id == current_group_id) {
1383        // This object group has an associated ObjectGroupRetainerInfo.
1384        info = retainer_infos_[info_index].info;
1385        ++info_index;
1386      }
1387
1388      // Ignore groups which only contain one object.
1389      if (i > current_group_start + 1) {
1390        ObjectGroup* group = new ObjectGroup(i - current_group_start);
1391        for (int j = current_group_start; j < i; ++j) {
1392          group->objects[j - current_group_start] =
1393              object_group_connections_[j].object;
1394        }
1395        group->info = info;
1396        object_groups_.Add(group);
1397      } else if (info) {
1398        info->Dispose();
1399      }
1400
1401      if (i < object_group_connections_.length()) {
1402        current_group_id = object_group_connections_[i].id;
1403        current_group_start = i;
1404      }
1405    }
1406  }
1407  object_group_connections_.Clear();
1408  object_group_connections_.Initialize(kObjectGroupConnectionsCapacity);
1409  retainer_infos_.Clear();
1410  implicit_ref_connections_.Clear();
1411}
1412
1413
1414EternalHandles::EternalHandles() : size_(0) {
1415  for (unsigned i = 0; i < arraysize(singleton_handles_); i++) {
1416    singleton_handles_[i] = kInvalidIndex;
1417  }
1418}
1419
1420
1421EternalHandles::~EternalHandles() {
1422  for (int i = 0; i < blocks_.length(); i++) delete[] blocks_[i];
1423}
1424
1425
1426void EternalHandles::IterateAllRoots(ObjectVisitor* visitor) {
1427  int limit = size_;
1428  for (int i = 0; i < blocks_.length(); i++) {
1429    DCHECK(limit > 0);
1430    Object** block = blocks_[i];
1431    visitor->VisitPointers(block, block + Min(limit, kSize));
1432    limit -= kSize;
1433  }
1434}
1435
1436
1437void EternalHandles::IterateNewSpaceRoots(ObjectVisitor* visitor) {
1438  for (int i = 0; i < new_space_indices_.length(); i++) {
1439    visitor->VisitPointer(GetLocation(new_space_indices_[i]));
1440  }
1441}
1442
1443
1444void EternalHandles::PostGarbageCollectionProcessing(Heap* heap) {
1445  int last = 0;
1446  for (int i = 0; i < new_space_indices_.length(); i++) {
1447    int index = new_space_indices_[i];
1448    if (heap->InNewSpace(*GetLocation(index))) {
1449      new_space_indices_[last++] = index;
1450    }
1451  }
1452  new_space_indices_.Rewind(last);
1453}
1454
1455
1456void EternalHandles::Create(Isolate* isolate, Object* object, int* index) {
1457  DCHECK_EQ(kInvalidIndex, *index);
1458  if (object == NULL) return;
1459  DCHECK_NE(isolate->heap()->the_hole_value(), object);
1460  int block = size_ >> kShift;
1461  int offset = size_ & kMask;
1462  // need to resize
1463  if (offset == 0) {
1464    Object** next_block = new Object*[kSize];
1465    Object* the_hole = isolate->heap()->the_hole_value();
1466    MemsetPointer(next_block, the_hole, kSize);
1467    blocks_.Add(next_block);
1468  }
1469  DCHECK_EQ(isolate->heap()->the_hole_value(), blocks_[block][offset]);
1470  blocks_[block][offset] = object;
1471  if (isolate->heap()->InNewSpace(object)) {
1472    new_space_indices_.Add(size_);
1473  }
1474  *index = size_++;
1475}
1476
1477
1478}  // namespace internal
1479}  // namespace v8
1480