global-handles.cc revision bcf72ee8e3b26f1d0726869c7ddb3921c68b09a8
1// Copyright 2009 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/global-handles.h"
6
7#include "src/api.h"
8#include "src/v8.h"
9#include "src/vm-state-inl.h"
10
11namespace v8 {
12namespace internal {
13
14
15ObjectGroup::~ObjectGroup() {
16  if (info != NULL) info->Dispose();
17  delete[] objects;
18}
19
20
21ImplicitRefGroup::~ImplicitRefGroup() {
22  delete[] children;
23}
24
25
26class GlobalHandles::Node {
27 public:
28  // State transition diagram:
29  // FREE -> NORMAL <-> WEAK -> PENDING -> NEAR_DEATH -> { NORMAL, WEAK, FREE }
30  enum State {
31    FREE = 0,
32    NORMAL,      // Normal global handle.
33    WEAK,        // Flagged as weak but not yet finalized.
34    PENDING,     // Has been recognized as only reachable by weak handles.
35    NEAR_DEATH,  // Callback has informed the handle is near death.
36    NUMBER_OF_NODE_STATES
37  };
38
39  // Maps handle location (slot) to the containing node.
40  static Node* FromLocation(Object** location) {
41    DCHECK(offsetof(Node, object_) == 0);
42    return reinterpret_cast<Node*>(location);
43  }
44
45  Node() {
46    DCHECK(offsetof(Node, class_id_) == Internals::kNodeClassIdOffset);
47    DCHECK(offsetof(Node, flags_) == Internals::kNodeFlagsOffset);
48    STATIC_ASSERT(static_cast<int>(NodeState::kMask) ==
49                  Internals::kNodeStateMask);
50    STATIC_ASSERT(WEAK == Internals::kNodeStateIsWeakValue);
51    STATIC_ASSERT(PENDING == Internals::kNodeStateIsPendingValue);
52    STATIC_ASSERT(NEAR_DEATH == Internals::kNodeStateIsNearDeathValue);
53    STATIC_ASSERT(static_cast<int>(IsIndependent::kShift) ==
54                  Internals::kNodeIsIndependentShift);
55    STATIC_ASSERT(static_cast<int>(IsPartiallyDependent::kShift) ==
56                  Internals::kNodeIsPartiallyDependentShift);
57    STATIC_ASSERT(static_cast<int>(IsActive::kShift) ==
58                  Internals::kNodeIsActiveShift);
59  }
60
61#ifdef ENABLE_HANDLE_ZAPPING
62  ~Node() {
63    // TODO(1428): if it's a weak handle we should have invoked its callback.
64    // Zap the values for eager trapping.
65    object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
66    class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
67    index_ = 0;
68    set_independent(false);
69    if (FLAG_scavenge_reclaim_unmodified_objects) {
70      set_active(false);
71    } else {
72      set_partially_dependent(false);
73    }
74    set_in_new_space_list(false);
75    parameter_or_next_free_.next_free = NULL;
76    weak_callback_ = NULL;
77  }
78#endif
79
80  void Initialize(int index, Node** first_free) {
81    object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
82    index_ = static_cast<uint8_t>(index);
83    DCHECK(static_cast<int>(index_) == index);
84    set_state(FREE);
85    set_in_new_space_list(false);
86    parameter_or_next_free_.next_free = *first_free;
87    *first_free = this;
88  }
89
90  void Acquire(Object* object) {
91    DCHECK(state() == FREE);
92    object_ = object;
93    class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
94    set_independent(false);
95    if (FLAG_scavenge_reclaim_unmodified_objects) {
96      set_active(false);
97    } else {
98      set_partially_dependent(false);
99    }
100    set_state(NORMAL);
101    parameter_or_next_free_.parameter = NULL;
102    weak_callback_ = NULL;
103    IncreaseBlockUses();
104  }
105
106  void Zap() {
107    DCHECK(IsInUse());
108    // Zap the values for eager trapping.
109    object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
110  }
111
112  void Release() {
113    DCHECK(IsInUse());
114    set_state(FREE);
115    // Zap the values for eager trapping.
116    object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
117    class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
118    set_independent(false);
119    if (FLAG_scavenge_reclaim_unmodified_objects) {
120      set_active(false);
121    } else {
122      set_partially_dependent(false);
123    }
124    weak_callback_ = NULL;
125    DecreaseBlockUses();
126  }
127
128  // Object slot accessors.
129  Object* object() const { return object_; }
130  Object** location() { return &object_; }
131  Handle<Object> handle() { return Handle<Object>(location()); }
132
133  // Wrapper class ID accessors.
134  bool has_wrapper_class_id() const {
135    return class_id_ != v8::HeapProfiler::kPersistentHandleNoClassId;
136  }
137
138  uint16_t wrapper_class_id() const { return class_id_; }
139
140  // State and flag accessors.
141
142  State state() const {
143    return NodeState::decode(flags_);
144  }
145  void set_state(State state) {
146    flags_ = NodeState::update(flags_, state);
147  }
148
149  bool is_independent() {
150    return IsIndependent::decode(flags_);
151  }
152  void set_independent(bool v) {
153    flags_ = IsIndependent::update(flags_, v);
154  }
155
156  bool is_partially_dependent() {
157    CHECK(!FLAG_scavenge_reclaim_unmodified_objects);
158    return IsPartiallyDependent::decode(flags_);
159  }
160  void set_partially_dependent(bool v) {
161    CHECK(!FLAG_scavenge_reclaim_unmodified_objects);
162    flags_ = IsPartiallyDependent::update(flags_, v);
163  }
164
165  bool is_active() {
166    CHECK(FLAG_scavenge_reclaim_unmodified_objects);
167    return IsActive::decode(flags_);
168  }
169  void set_active(bool v) {
170    CHECK(FLAG_scavenge_reclaim_unmodified_objects);
171    flags_ = IsActive::update(flags_, v);
172  }
173
174  bool is_in_new_space_list() {
175    return IsInNewSpaceList::decode(flags_);
176  }
177  void set_in_new_space_list(bool v) {
178    flags_ = IsInNewSpaceList::update(flags_, v);
179  }
180
181  WeaknessType weakness_type() const {
182    return NodeWeaknessType::decode(flags_);
183  }
184  void set_weakness_type(WeaknessType weakness_type) {
185    flags_ = NodeWeaknessType::update(flags_, weakness_type);
186  }
187
188  bool IsNearDeath() const {
189    // Check for PENDING to ensure correct answer when processing callbacks.
190    return state() == PENDING || state() == NEAR_DEATH;
191  }
192
193  bool IsWeak() const { return state() == WEAK; }
194
195  bool IsInUse() const { return state() != FREE; }
196
197  bool IsPendingPhantomCallback() const {
198    return state() == PENDING &&
199           (weakness_type() == PHANTOM_WEAK ||
200            weakness_type() == PHANTOM_WEAK_2_INTERNAL_FIELDS);
201  }
202
203  bool IsPendingPhantomResetHandle() const {
204    return state() == PENDING && weakness_type() == PHANTOM_WEAK_RESET_HANDLE;
205  }
206
207  bool IsRetainer() const {
208    return state() != FREE &&
209           !(state() == NEAR_DEATH && weakness_type() != FINALIZER_WEAK);
210  }
211
212  bool IsStrongRetainer() const { return state() == NORMAL; }
213
214  bool IsWeakRetainer() const {
215    return state() == WEAK || state() == PENDING ||
216           (state() == NEAR_DEATH && weakness_type() == FINALIZER_WEAK);
217  }
218
219  void MarkPending() {
220    DCHECK(state() == WEAK);
221    set_state(PENDING);
222  }
223
224  // Independent flag accessors.
225  void MarkIndependent() {
226    DCHECK(IsInUse());
227    set_independent(true);
228  }
229
230  void MarkPartiallyDependent() {
231    DCHECK(IsInUse());
232    if (GetGlobalHandles()->isolate()->heap()->InNewSpace(object_)) {
233      set_partially_dependent(true);
234    }
235  }
236  void clear_partially_dependent() { set_partially_dependent(false); }
237
238  // Callback accessor.
239  // TODO(svenpanne) Re-enable or nuke later.
240  // WeakReferenceCallback callback() { return callback_; }
241
242  // Callback parameter accessors.
243  void set_parameter(void* parameter) {
244    DCHECK(IsInUse());
245    parameter_or_next_free_.parameter = parameter;
246  }
247  void* parameter() const {
248    DCHECK(IsInUse());
249    return parameter_or_next_free_.parameter;
250  }
251
252  // Accessors for next free node in the free list.
253  Node* next_free() {
254    DCHECK(state() == FREE);
255    return parameter_or_next_free_.next_free;
256  }
257  void set_next_free(Node* value) {
258    DCHECK(state() == FREE);
259    parameter_or_next_free_.next_free = value;
260  }
261
262  void MakeWeak(void* parameter,
263                WeakCallbackInfo<void>::Callback phantom_callback,
264                v8::WeakCallbackType type) {
265    DCHECK(phantom_callback != nullptr);
266    DCHECK(IsInUse());
267    CHECK_NE(object_, reinterpret_cast<Object*>(kGlobalHandleZapValue));
268    set_state(WEAK);
269    switch (type) {
270      case v8::WeakCallbackType::kParameter:
271        set_weakness_type(PHANTOM_WEAK);
272        break;
273      case v8::WeakCallbackType::kInternalFields:
274        set_weakness_type(PHANTOM_WEAK_2_INTERNAL_FIELDS);
275        break;
276      case v8::WeakCallbackType::kFinalizer:
277        set_weakness_type(FINALIZER_WEAK);
278        break;
279    }
280    set_parameter(parameter);
281    weak_callback_ = phantom_callback;
282  }
283
284  void MakeWeak(Object*** location_addr) {
285    DCHECK(IsInUse());
286    CHECK_NE(object_, reinterpret_cast<Object*>(kGlobalHandleZapValue));
287    set_state(WEAK);
288    set_weakness_type(PHANTOM_WEAK_RESET_HANDLE);
289    set_parameter(location_addr);
290    weak_callback_ = nullptr;
291  }
292
293  void* ClearWeakness() {
294    DCHECK(IsInUse());
295    void* p = parameter();
296    set_state(NORMAL);
297    set_parameter(NULL);
298    return p;
299  }
300
301  void CollectPhantomCallbackData(
302      Isolate* isolate,
303      List<PendingPhantomCallback>* pending_phantom_callbacks) {
304    DCHECK(weakness_type() == PHANTOM_WEAK ||
305           weakness_type() == PHANTOM_WEAK_2_INTERNAL_FIELDS);
306    DCHECK(state() == PENDING);
307    DCHECK(weak_callback_ != nullptr);
308
309    void* internal_fields[v8::kInternalFieldsInWeakCallback] = {nullptr,
310                                                                nullptr};
311    if (weakness_type() != PHANTOM_WEAK && object()->IsJSObject()) {
312      auto jsobject = JSObject::cast(object());
313      int field_count = jsobject->GetInternalFieldCount();
314      for (int i = 0; i < v8::kInternalFieldsInWeakCallback; ++i) {
315        if (field_count == i) break;
316        auto field = jsobject->GetInternalField(i);
317        if (field->IsSmi()) internal_fields[i] = field;
318      }
319    }
320
321    // Zap with something dangerous.
322    *location() = reinterpret_cast<Object*>(0x6057ca11);
323
324    typedef v8::WeakCallbackInfo<void> Data;
325    auto callback = reinterpret_cast<Data::Callback>(weak_callback_);
326    pending_phantom_callbacks->Add(
327        PendingPhantomCallback(this, callback, parameter(), internal_fields));
328    DCHECK(IsInUse());
329    set_state(NEAR_DEATH);
330  }
331
332  void ResetPhantomHandle() {
333    DCHECK(weakness_type() == PHANTOM_WEAK_RESET_HANDLE);
334    DCHECK(state() == PENDING);
335    DCHECK(weak_callback_ == nullptr);
336    Object*** handle = reinterpret_cast<Object***>(parameter());
337    *handle = nullptr;
338    Release();
339  }
340
341  bool PostGarbageCollectionProcessing(Isolate* isolate) {
342    // Handles only weak handles (not phantom) that are dying.
343    if (state() != Node::PENDING) return false;
344    if (weak_callback_ == NULL) {
345      Release();
346      return false;
347    }
348    set_state(NEAR_DEATH);
349
350    // Check that we are not passing a finalized external string to
351    // the callback.
352    DCHECK(!object_->IsExternalOneByteString() ||
353           ExternalOneByteString::cast(object_)->resource() != NULL);
354    DCHECK(!object_->IsExternalTwoByteString() ||
355           ExternalTwoByteString::cast(object_)->resource() != NULL);
356    if (weakness_type() != FINALIZER_WEAK) {
357      return false;
358    }
359
360    // Leaving V8.
361    VMState<EXTERNAL> vmstate(isolate);
362    HandleScope handle_scope(isolate);
363    void* internal_fields[v8::kInternalFieldsInWeakCallback] = {nullptr,
364                                                                nullptr};
365    v8::WeakCallbackInfo<void> data(reinterpret_cast<v8::Isolate*>(isolate),
366                                    parameter(), internal_fields, nullptr);
367    weak_callback_(data);
368
369    // Absence of explicit cleanup or revival of weak handle
370    // in most of the cases would lead to memory leak.
371    CHECK(state() != NEAR_DEATH);
372    return true;
373  }
374
375  inline GlobalHandles* GetGlobalHandles();
376
377 private:
378  inline NodeBlock* FindBlock();
379  inline void IncreaseBlockUses();
380  inline void DecreaseBlockUses();
381
382  // Storage for object pointer.
383  // Placed first to avoid offset computation.
384  Object* object_;
385
386  // Next word stores class_id, index, state, and independent.
387  // Note: the most aligned fields should go first.
388
389  // Wrapper class ID.
390  uint16_t class_id_;
391
392  // Index in the containing handle block.
393  uint8_t index_;
394
395  // This stores three flags (independent, partially_dependent and
396  // in_new_space_list) and a State.
397  class NodeState : public BitField<State, 0, 3> {};
398  class IsIndependent : public BitField<bool, 3, 1> {};
399  // The following two fields are mutually exclusive
400  class IsActive : public BitField<bool, 4, 1> {};
401  class IsPartiallyDependent : public BitField<bool, 4, 1> {};
402  class IsInNewSpaceList : public BitField<bool, 5, 1> {};
403  class NodeWeaknessType : public BitField<WeaknessType, 6, 2> {};
404
405  uint8_t flags_;
406
407  // Handle specific callback - might be a weak reference in disguise.
408  WeakCallbackInfo<void>::Callback weak_callback_;
409
410  // Provided data for callback.  In FREE state, this is used for
411  // the free list link.
412  union {
413    void* parameter;
414    Node* next_free;
415  } parameter_or_next_free_;
416
417  DISALLOW_COPY_AND_ASSIGN(Node);
418};
419
420
421class GlobalHandles::NodeBlock {
422 public:
423  static const int kSize = 256;
424
425  explicit NodeBlock(GlobalHandles* global_handles, NodeBlock* next)
426      : next_(next),
427        used_nodes_(0),
428        next_used_(NULL),
429        prev_used_(NULL),
430        global_handles_(global_handles) {}
431
432  void PutNodesOnFreeList(Node** first_free) {
433    for (int i = kSize - 1; i >= 0; --i) {
434      nodes_[i].Initialize(i, first_free);
435    }
436  }
437
438  Node* node_at(int index) {
439    DCHECK(0 <= index && index < kSize);
440    return &nodes_[index];
441  }
442
443  void IncreaseUses() {
444    DCHECK(used_nodes_ < kSize);
445    if (used_nodes_++ == 0) {
446      NodeBlock* old_first = global_handles_->first_used_block_;
447      global_handles_->first_used_block_ = this;
448      next_used_ = old_first;
449      prev_used_ = NULL;
450      if (old_first == NULL) return;
451      old_first->prev_used_ = this;
452    }
453  }
454
455  void DecreaseUses() {
456    DCHECK(used_nodes_ > 0);
457    if (--used_nodes_ == 0) {
458      if (next_used_ != NULL) next_used_->prev_used_ = prev_used_;
459      if (prev_used_ != NULL) prev_used_->next_used_ = next_used_;
460      if (this == global_handles_->first_used_block_) {
461        global_handles_->first_used_block_ = next_used_;
462      }
463    }
464  }
465
466  GlobalHandles* global_handles() { return global_handles_; }
467
468  // Next block in the list of all blocks.
469  NodeBlock* next() const { return next_; }
470
471  // Next/previous block in the list of blocks with used nodes.
472  NodeBlock* next_used() const { return next_used_; }
473  NodeBlock* prev_used() const { return prev_used_; }
474
475 private:
476  Node nodes_[kSize];
477  NodeBlock* const next_;
478  int used_nodes_;
479  NodeBlock* next_used_;
480  NodeBlock* prev_used_;
481  GlobalHandles* global_handles_;
482};
483
484
485GlobalHandles* GlobalHandles::Node::GetGlobalHandles() {
486  return FindBlock()->global_handles();
487}
488
489
490GlobalHandles::NodeBlock* GlobalHandles::Node::FindBlock() {
491  intptr_t ptr = reinterpret_cast<intptr_t>(this);
492  ptr = ptr - index_ * sizeof(Node);
493  NodeBlock* block = reinterpret_cast<NodeBlock*>(ptr);
494  DCHECK(block->node_at(index_) == this);
495  return block;
496}
497
498
499void GlobalHandles::Node::IncreaseBlockUses() {
500  NodeBlock* node_block = FindBlock();
501  node_block->IncreaseUses();
502  GlobalHandles* global_handles = node_block->global_handles();
503  global_handles->isolate()->counters()->global_handles()->Increment();
504  global_handles->number_of_global_handles_++;
505}
506
507
508void GlobalHandles::Node::DecreaseBlockUses() {
509  NodeBlock* node_block = FindBlock();
510  GlobalHandles* global_handles = node_block->global_handles();
511  parameter_or_next_free_.next_free = global_handles->first_free_;
512  global_handles->first_free_ = this;
513  node_block->DecreaseUses();
514  global_handles->isolate()->counters()->global_handles()->Decrement();
515  global_handles->number_of_global_handles_--;
516}
517
518
519class GlobalHandles::NodeIterator {
520 public:
521  explicit NodeIterator(GlobalHandles* global_handles)
522      : block_(global_handles->first_used_block_),
523        index_(0) {}
524
525  bool done() const { return block_ == NULL; }
526
527  Node* node() const {
528    DCHECK(!done());
529    return block_->node_at(index_);
530  }
531
532  void Advance() {
533    DCHECK(!done());
534    if (++index_ < NodeBlock::kSize) return;
535    index_ = 0;
536    block_ = block_->next_used();
537  }
538
539 private:
540  NodeBlock* block_;
541  int index_;
542
543  DISALLOW_COPY_AND_ASSIGN(NodeIterator);
544};
545
546class GlobalHandles::PendingPhantomCallbacksSecondPassTask
547    : public v8::internal::CancelableTask {
548 public:
549  // Takes ownership of the contents of pending_phantom_callbacks, leaving it in
550  // the same state it would be after a call to Clear().
551  PendingPhantomCallbacksSecondPassTask(
552      List<PendingPhantomCallback>* pending_phantom_callbacks, Isolate* isolate)
553      : CancelableTask(isolate) {
554    pending_phantom_callbacks_.Swap(pending_phantom_callbacks);
555  }
556
557  void RunInternal() override {
558    TRACE_EVENT0("v8", "V8.GCPhantomHandleProcessingCallback");
559    isolate()->heap()->CallGCPrologueCallbacks(
560        GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags);
561    InvokeSecondPassPhantomCallbacks(&pending_phantom_callbacks_, isolate());
562    isolate()->heap()->CallGCEpilogueCallbacks(
563        GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags);
564  }
565
566 private:
567  List<PendingPhantomCallback> pending_phantom_callbacks_;
568
569  DISALLOW_COPY_AND_ASSIGN(PendingPhantomCallbacksSecondPassTask);
570};
571
572GlobalHandles::GlobalHandles(Isolate* isolate)
573    : isolate_(isolate),
574      number_of_global_handles_(0),
575      first_block_(NULL),
576      first_used_block_(NULL),
577      first_free_(NULL),
578      post_gc_processing_count_(0),
579      number_of_phantom_handle_resets_(0),
580      object_group_connections_(kObjectGroupConnectionsCapacity) {}
581
582GlobalHandles::~GlobalHandles() {
583  NodeBlock* block = first_block_;
584  while (block != NULL) {
585    NodeBlock* tmp = block->next();
586    delete block;
587    block = tmp;
588  }
589  first_block_ = NULL;
590}
591
592
593Handle<Object> GlobalHandles::Create(Object* value) {
594  if (first_free_ == NULL) {
595    first_block_ = new NodeBlock(this, first_block_);
596    first_block_->PutNodesOnFreeList(&first_free_);
597  }
598  DCHECK(first_free_ != NULL);
599  // Take the first node in the free list.
600  Node* result = first_free_;
601  first_free_ = result->next_free();
602  result->Acquire(value);
603  if (isolate_->heap()->InNewSpace(value) &&
604      !result->is_in_new_space_list()) {
605    new_space_nodes_.Add(result);
606    result->set_in_new_space_list(true);
607  }
608  return result->handle();
609}
610
611
612Handle<Object> GlobalHandles::CopyGlobal(Object** location) {
613  DCHECK(location != NULL);
614  return Node::FromLocation(location)->GetGlobalHandles()->Create(*location);
615}
616
617
618void GlobalHandles::Destroy(Object** location) {
619  if (location != NULL) Node::FromLocation(location)->Release();
620}
621
622
623typedef v8::WeakCallbackInfo<void>::Callback GenericCallback;
624
625
626void GlobalHandles::MakeWeak(Object** location, void* parameter,
627                             GenericCallback phantom_callback,
628                             v8::WeakCallbackType type) {
629  Node::FromLocation(location)->MakeWeak(parameter, phantom_callback, type);
630}
631
632void GlobalHandles::MakeWeak(Object*** location_addr) {
633  Node::FromLocation(*location_addr)->MakeWeak(location_addr);
634}
635
636void* GlobalHandles::ClearWeakness(Object** location) {
637  return Node::FromLocation(location)->ClearWeakness();
638}
639
640
641void GlobalHandles::MarkIndependent(Object** location) {
642  Node::FromLocation(location)->MarkIndependent();
643}
644
645
646void GlobalHandles::MarkPartiallyDependent(Object** location) {
647  Node::FromLocation(location)->MarkPartiallyDependent();
648}
649
650
651bool GlobalHandles::IsIndependent(Object** location) {
652  return Node::FromLocation(location)->is_independent();
653}
654
655
656bool GlobalHandles::IsNearDeath(Object** location) {
657  return Node::FromLocation(location)->IsNearDeath();
658}
659
660
661bool GlobalHandles::IsWeak(Object** location) {
662  return Node::FromLocation(location)->IsWeak();
663}
664
665void GlobalHandles::IterateWeakRoots(ObjectVisitor* v) {
666  for (NodeIterator it(this); !it.done(); it.Advance()) {
667    Node* node = it.node();
668    if (node->IsWeakRetainer()) {
669      // Pending weak phantom handles die immediately. Everything else survives.
670      if (node->IsPendingPhantomResetHandle()) {
671        node->ResetPhantomHandle();
672        ++number_of_phantom_handle_resets_;
673      } else if (node->IsPendingPhantomCallback()) {
674        node->CollectPhantomCallbackData(isolate(),
675                                         &pending_phantom_callbacks_);
676      } else {
677        v->VisitPointer(node->location());
678      }
679    }
680  }
681}
682
683
684void GlobalHandles::IdentifyWeakHandles(WeakSlotCallback f) {
685  for (NodeIterator it(this); !it.done(); it.Advance()) {
686    if (it.node()->IsWeak() && f(it.node()->location())) {
687      it.node()->MarkPending();
688    }
689  }
690}
691
692
693void GlobalHandles::IterateNewSpaceStrongAndDependentRoots(ObjectVisitor* v) {
694  for (int i = 0; i < new_space_nodes_.length(); ++i) {
695    Node* node = new_space_nodes_[i];
696    if (FLAG_scavenge_reclaim_unmodified_objects) {
697      if (node->IsStrongRetainer() ||
698          (node->IsWeakRetainer() && !node->is_independent() &&
699           node->is_active())) {
700        v->VisitPointer(node->location());
701      }
702    } else {
703      if (node->IsStrongRetainer() ||
704          (node->IsWeakRetainer() && !node->is_independent() &&
705           !node->is_partially_dependent())) {
706        v->VisitPointer(node->location());
707      }
708    }
709  }
710}
711
712
713void GlobalHandles::IdentifyNewSpaceWeakIndependentHandles(
714    WeakSlotCallbackWithHeap f) {
715  for (int i = 0; i < new_space_nodes_.length(); ++i) {
716    Node* node = new_space_nodes_[i];
717    DCHECK(node->is_in_new_space_list());
718    if ((node->is_independent() || node->is_partially_dependent()) &&
719        node->IsWeak() && f(isolate_->heap(), node->location())) {
720      node->MarkPending();
721    }
722  }
723}
724
725
726void GlobalHandles::IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v) {
727  for (int i = 0; i < new_space_nodes_.length(); ++i) {
728    Node* node = new_space_nodes_[i];
729    DCHECK(node->is_in_new_space_list());
730    if ((node->is_independent() || node->is_partially_dependent()) &&
731        node->IsWeakRetainer()) {
732      // Pending weak phantom handles die immediately. Everything else survives.
733      if (node->IsPendingPhantomResetHandle()) {
734        node->ResetPhantomHandle();
735        ++number_of_phantom_handle_resets_;
736      } else if (node->IsPendingPhantomCallback()) {
737        node->CollectPhantomCallbackData(isolate(),
738                                         &pending_phantom_callbacks_);
739      } else {
740        v->VisitPointer(node->location());
741      }
742    }
743  }
744}
745
746
747void GlobalHandles::IdentifyWeakUnmodifiedObjects(
748    WeakSlotCallback is_unmodified) {
749  for (int i = 0; i < new_space_nodes_.length(); ++i) {
750    Node* node = new_space_nodes_[i];
751    if (node->IsWeak() && !is_unmodified(node->location())) {
752      node->set_active(true);
753    }
754  }
755}
756
757
758void GlobalHandles::MarkNewSpaceWeakUnmodifiedObjectsPending(
759    WeakSlotCallbackWithHeap is_unscavenged) {
760  for (int i = 0; i < new_space_nodes_.length(); ++i) {
761    Node* node = new_space_nodes_[i];
762    DCHECK(node->is_in_new_space_list());
763    if ((node->is_independent() || !node->is_active()) && node->IsWeak() &&
764        is_unscavenged(isolate_->heap(), node->location())) {
765      node->MarkPending();
766    }
767  }
768}
769
770
771void GlobalHandles::IterateNewSpaceWeakUnmodifiedRoots(ObjectVisitor* v) {
772  for (int i = 0; i < new_space_nodes_.length(); ++i) {
773    Node* node = new_space_nodes_[i];
774    DCHECK(node->is_in_new_space_list());
775    if ((node->is_independent() || !node->is_active()) &&
776        node->IsWeakRetainer()) {
777      // Pending weak phantom handles die immediately. Everything else survives.
778      if (node->IsPendingPhantomResetHandle()) {
779        node->ResetPhantomHandle();
780        ++number_of_phantom_handle_resets_;
781      } else if (node->IsPendingPhantomCallback()) {
782        node->CollectPhantomCallbackData(isolate(),
783                                         &pending_phantom_callbacks_);
784      } else {
785        v->VisitPointer(node->location());
786      }
787    }
788  }
789}
790
791
792bool GlobalHandles::IterateObjectGroups(ObjectVisitor* v,
793                                        WeakSlotCallbackWithHeap can_skip) {
794  ComputeObjectGroupsAndImplicitReferences();
795  int last = 0;
796  bool any_group_was_visited = false;
797  for (int i = 0; i < object_groups_.length(); i++) {
798    ObjectGroup* entry = object_groups_.at(i);
799    DCHECK(entry != NULL);
800
801    Object*** objects = entry->objects;
802    bool group_should_be_visited = false;
803    for (size_t j = 0; j < entry->length; j++) {
804      Object* object = *objects[j];
805      if (object->IsHeapObject()) {
806        if (!can_skip(isolate_->heap(), &object)) {
807          group_should_be_visited = true;
808          break;
809        }
810      }
811    }
812
813    if (!group_should_be_visited) {
814      object_groups_[last++] = entry;
815      continue;
816    }
817
818    // An object in the group requires visiting, so iterate over all
819    // objects in the group.
820    for (size_t j = 0; j < entry->length; ++j) {
821      Object* object = *objects[j];
822      if (object->IsHeapObject()) {
823        v->VisitPointer(&object);
824        any_group_was_visited = true;
825      }
826    }
827
828    // Once the entire group has been iterated over, set the object
829    // group to NULL so it won't be processed again.
830    delete entry;
831    object_groups_.at(i) = NULL;
832  }
833  object_groups_.Rewind(last);
834  return any_group_was_visited;
835}
836
837namespace {
838// Traces the information about object groups and implicit ref groups given by
839// the embedder to the V8 during each gc prologue.
840class ObjectGroupsTracer {
841 public:
842  explicit ObjectGroupsTracer(Isolate* isolate);
843  void Print();
844
845 private:
846  void PrintObjectGroup(ObjectGroup* group);
847  void PrintImplicitRefGroup(ImplicitRefGroup* group);
848  void PrintObject(Object* object);
849  void PrintConstructor(JSObject* js_object);
850  void PrintInternalFields(JSObject* js_object);
851  Isolate* isolate_;
852  DISALLOW_COPY_AND_ASSIGN(ObjectGroupsTracer);
853};
854
855ObjectGroupsTracer::ObjectGroupsTracer(Isolate* isolate) : isolate_(isolate) {}
856
857void ObjectGroupsTracer::Print() {
858  GlobalHandles* global_handles = isolate_->global_handles();
859
860  PrintIsolate(isolate_, "### Tracing object groups:\n");
861
862  for (auto group : *(global_handles->object_groups())) {
863    PrintObjectGroup(group);
864  }
865  for (auto group : *(global_handles->implicit_ref_groups())) {
866    PrintImplicitRefGroup(group);
867  }
868
869  PrintIsolate(isolate_, "### Tracing object groups finished.\n");
870}
871
872void ObjectGroupsTracer::PrintObject(Object* object) {
873  if (object->IsJSObject()) {
874    JSObject* js_object = JSObject::cast(object);
875
876    PrintF("{ constructor_name: ");
877    PrintConstructor(js_object);
878    PrintF(", hidden_fields: [ ");
879    PrintInternalFields(js_object);
880    PrintF(" ] }\n");
881  } else {
882    PrintF("object of unexpected type: %p\n", object);
883  }
884}
885
886void ObjectGroupsTracer::PrintConstructor(JSObject* js_object) {
887  Object* maybe_constructor = js_object->map()->GetConstructor();
888  if (maybe_constructor->IsJSFunction()) {
889    JSFunction* constructor = JSFunction::cast(maybe_constructor);
890    String* name = String::cast(constructor->shared()->name());
891    if (name->length() == 0) name = constructor->shared()->inferred_name();
892
893    PrintF("%s", name->ToCString().get());
894  } else if (maybe_constructor->IsNull()) {
895    if (js_object->IsOddball()) {
896      PrintF("<oddball>");
897    } else {
898      PrintF("<null>");
899    }
900  } else {
901    UNREACHABLE();
902  }
903}
904
905void ObjectGroupsTracer::PrintInternalFields(JSObject* js_object) {
906  for (int i = 0; i < js_object->GetInternalFieldCount(); ++i) {
907    if (i != 0) {
908      PrintF(", ");
909    }
910    PrintF("%p", js_object->GetInternalField(i));
911  }
912}
913
914void ObjectGroupsTracer::PrintObjectGroup(ObjectGroup* group) {
915  PrintIsolate(isolate_, "ObjectGroup (size: %" PRIuS ")\n", group->length);
916  Object*** objects = group->objects;
917
918  for (size_t i = 0; i < group->length; ++i) {
919    PrintIsolate(isolate_, "  - Member: ");
920    PrintObject(*objects[i]);
921  }
922}
923
924void ObjectGroupsTracer::PrintImplicitRefGroup(ImplicitRefGroup* group) {
925  PrintIsolate(isolate_, "ImplicitRefGroup (children count: %" PRIuS ")\n",
926               group->length);
927  PrintIsolate(isolate_, "  - Parent: ");
928  PrintObject(*(group->parent));
929
930  Object*** children = group->children;
931  for (size_t i = 0; i < group->length; ++i) {
932    PrintIsolate(isolate_, "  - Child: ");
933    PrintObject(*children[i]);
934  }
935}
936
937}  // namespace
938
939void GlobalHandles::PrintObjectGroups() {
940  ObjectGroupsTracer(isolate_).Print();
941}
942
943void GlobalHandles::InvokeSecondPassPhantomCallbacks(
944    List<PendingPhantomCallback>* callbacks, Isolate* isolate) {
945  while (callbacks->length() != 0) {
946    auto callback = callbacks->RemoveLast();
947    DCHECK(callback.node() == nullptr);
948    // Fire second pass callback
949    callback.Invoke(isolate);
950  }
951}
952
953
954int GlobalHandles::PostScavengeProcessing(
955    const int initial_post_gc_processing_count) {
956  int freed_nodes = 0;
957  for (int i = 0; i < new_space_nodes_.length(); ++i) {
958    Node* node = new_space_nodes_[i];
959    DCHECK(node->is_in_new_space_list());
960    if (!node->IsRetainer()) {
961      // Free nodes do not have weak callbacks. Do not use them to compute
962      // the freed_nodes.
963      continue;
964    }
965    // Skip dependent or unmodified handles. Their weak callbacks might expect
966    // to be
967    // called between two global garbage collection callbacks which
968    // are not called for minor collections.
969    if (FLAG_scavenge_reclaim_unmodified_objects) {
970      if (!node->is_independent() && (node->is_active())) {
971        node->set_active(false);
972        continue;
973      }
974      node->set_active(false);
975    } else {
976      if (!node->is_independent() && !node->is_partially_dependent()) {
977        continue;
978      }
979      node->clear_partially_dependent();
980    }
981
982    if (node->PostGarbageCollectionProcessing(isolate_)) {
983      if (initial_post_gc_processing_count != post_gc_processing_count_) {
984        // Weak callback triggered another GC and another round of
985        // PostGarbageCollection processing.  The current node might
986        // have been deleted in that round, so we need to bail out (or
987        // restart the processing).
988        return freed_nodes;
989      }
990    }
991    if (!node->IsRetainer()) {
992      freed_nodes++;
993    }
994  }
995  return freed_nodes;
996}
997
998
999int GlobalHandles::PostMarkSweepProcessing(
1000    const int initial_post_gc_processing_count) {
1001  int freed_nodes = 0;
1002  for (NodeIterator it(this); !it.done(); it.Advance()) {
1003    if (!it.node()->IsRetainer()) {
1004      // Free nodes do not have weak callbacks. Do not use them to compute
1005      // the freed_nodes.
1006      continue;
1007    }
1008    if (FLAG_scavenge_reclaim_unmodified_objects) {
1009      it.node()->set_active(false);
1010    } else {
1011      it.node()->clear_partially_dependent();
1012    }
1013    if (it.node()->PostGarbageCollectionProcessing(isolate_)) {
1014      if (initial_post_gc_processing_count != post_gc_processing_count_) {
1015        // See the comment above.
1016        return freed_nodes;
1017      }
1018    }
1019    if (!it.node()->IsRetainer()) {
1020      freed_nodes++;
1021    }
1022  }
1023  return freed_nodes;
1024}
1025
1026
1027void GlobalHandles::UpdateListOfNewSpaceNodes() {
1028  int last = 0;
1029  for (int i = 0; i < new_space_nodes_.length(); ++i) {
1030    Node* node = new_space_nodes_[i];
1031    DCHECK(node->is_in_new_space_list());
1032    if (node->IsRetainer()) {
1033      if (isolate_->heap()->InNewSpace(node->object())) {
1034        new_space_nodes_[last++] = node;
1035        isolate_->heap()->IncrementNodesCopiedInNewSpace();
1036      } else {
1037        node->set_in_new_space_list(false);
1038        isolate_->heap()->IncrementNodesPromoted();
1039      }
1040    } else {
1041      node->set_in_new_space_list(false);
1042      isolate_->heap()->IncrementNodesDiedInNewSpace();
1043    }
1044  }
1045  new_space_nodes_.Rewind(last);
1046  new_space_nodes_.Trim();
1047}
1048
1049
1050int GlobalHandles::DispatchPendingPhantomCallbacks(
1051    bool synchronous_second_pass) {
1052  int freed_nodes = 0;
1053  List<PendingPhantomCallback> second_pass_callbacks;
1054  {
1055    // The initial pass callbacks must simply clear the nodes.
1056    for (auto i = pending_phantom_callbacks_.begin();
1057         i != pending_phantom_callbacks_.end(); ++i) {
1058      auto callback = i;
1059      // Skip callbacks that have already been processed once.
1060      if (callback->node() == nullptr) continue;
1061      callback->Invoke(isolate());
1062      if (callback->callback()) second_pass_callbacks.Add(*callback);
1063      freed_nodes++;
1064    }
1065  }
1066  pending_phantom_callbacks_.Clear();
1067  if (second_pass_callbacks.length() > 0) {
1068    if (FLAG_optimize_for_size || FLAG_predictable || synchronous_second_pass) {
1069      isolate()->heap()->CallGCPrologueCallbacks(
1070          GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags);
1071      InvokeSecondPassPhantomCallbacks(&second_pass_callbacks, isolate());
1072      isolate()->heap()->CallGCEpilogueCallbacks(
1073          GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags);
1074    } else {
1075      auto task = new PendingPhantomCallbacksSecondPassTask(
1076          &second_pass_callbacks, isolate());
1077      V8::GetCurrentPlatform()->CallOnForegroundThread(
1078          reinterpret_cast<v8::Isolate*>(isolate()), task);
1079    }
1080  }
1081  return freed_nodes;
1082}
1083
1084
1085void GlobalHandles::PendingPhantomCallback::Invoke(Isolate* isolate) {
1086  Data::Callback* callback_addr = nullptr;
1087  if (node_ != nullptr) {
1088    // Initialize for first pass callback.
1089    DCHECK(node_->state() == Node::NEAR_DEATH);
1090    callback_addr = &callback_;
1091  }
1092  Data data(reinterpret_cast<v8::Isolate*>(isolate), parameter_,
1093            internal_fields_, callback_addr);
1094  Data::Callback callback = callback_;
1095  callback_ = nullptr;
1096  callback(data);
1097  if (node_ != nullptr) {
1098    // Transition to second pass state.
1099    DCHECK(node_->state() == Node::FREE);
1100    node_ = nullptr;
1101  }
1102}
1103
1104
1105int GlobalHandles::PostGarbageCollectionProcessing(
1106    GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) {
1107  // Process weak global handle callbacks. This must be done after the
1108  // GC is completely done, because the callbacks may invoke arbitrary
1109  // API functions.
1110  DCHECK(isolate_->heap()->gc_state() == Heap::NOT_IN_GC);
1111  const int initial_post_gc_processing_count = ++post_gc_processing_count_;
1112  int freed_nodes = 0;
1113  bool synchronous_second_pass =
1114      (gc_callback_flags &
1115       (kGCCallbackFlagForced | kGCCallbackFlagCollectAllAvailableGarbage |
1116        kGCCallbackFlagSynchronousPhantomCallbackProcessing)) != 0;
1117  freed_nodes += DispatchPendingPhantomCallbacks(synchronous_second_pass);
1118  if (initial_post_gc_processing_count != post_gc_processing_count_) {
1119    // If the callbacks caused a nested GC, then return.  See comment in
1120    // PostScavengeProcessing.
1121    return freed_nodes;
1122  }
1123  if (collector == SCAVENGER) {
1124    freed_nodes += PostScavengeProcessing(initial_post_gc_processing_count);
1125  } else {
1126    freed_nodes += PostMarkSweepProcessing(initial_post_gc_processing_count);
1127  }
1128  if (initial_post_gc_processing_count != post_gc_processing_count_) {
1129    // If the callbacks caused a nested GC, then return.  See comment in
1130    // PostScavengeProcessing.
1131    return freed_nodes;
1132  }
1133  if (initial_post_gc_processing_count == post_gc_processing_count_) {
1134    UpdateListOfNewSpaceNodes();
1135  }
1136  return freed_nodes;
1137}
1138
1139
1140void GlobalHandles::IterateStrongRoots(ObjectVisitor* v) {
1141  for (NodeIterator it(this); !it.done(); it.Advance()) {
1142    if (it.node()->IsStrongRetainer()) {
1143      v->VisitPointer(it.node()->location());
1144    }
1145  }
1146}
1147
1148
1149void GlobalHandles::IterateAllRoots(ObjectVisitor* v) {
1150  for (NodeIterator it(this); !it.done(); it.Advance()) {
1151    if (it.node()->IsRetainer()) {
1152      v->VisitPointer(it.node()->location());
1153    }
1154  }
1155}
1156
1157
1158void GlobalHandles::IterateAllRootsWithClassIds(ObjectVisitor* v) {
1159  for (NodeIterator it(this); !it.done(); it.Advance()) {
1160    if (it.node()->IsRetainer() && it.node()->has_wrapper_class_id()) {
1161      v->VisitEmbedderReference(it.node()->location(),
1162                                it.node()->wrapper_class_id());
1163    }
1164  }
1165}
1166
1167
1168void GlobalHandles::IterateAllRootsInNewSpaceWithClassIds(ObjectVisitor* v) {
1169  for (int i = 0; i < new_space_nodes_.length(); ++i) {
1170    Node* node = new_space_nodes_[i];
1171    if (node->IsRetainer() && node->has_wrapper_class_id()) {
1172      v->VisitEmbedderReference(node->location(),
1173                                node->wrapper_class_id());
1174    }
1175  }
1176}
1177
1178
1179void GlobalHandles::IterateWeakRootsInNewSpaceWithClassIds(ObjectVisitor* v) {
1180  for (int i = 0; i < new_space_nodes_.length(); ++i) {
1181    Node* node = new_space_nodes_[i];
1182    if (node->has_wrapper_class_id() && node->IsWeak()) {
1183      v->VisitEmbedderReference(node->location(), node->wrapper_class_id());
1184    }
1185  }
1186}
1187
1188
1189int GlobalHandles::NumberOfWeakHandles() {
1190  int count = 0;
1191  for (NodeIterator it(this); !it.done(); it.Advance()) {
1192    if (it.node()->IsWeakRetainer()) {
1193      count++;
1194    }
1195  }
1196  return count;
1197}
1198
1199
1200int GlobalHandles::NumberOfGlobalObjectWeakHandles() {
1201  int count = 0;
1202  for (NodeIterator it(this); !it.done(); it.Advance()) {
1203    if (it.node()->IsWeakRetainer() &&
1204        it.node()->object()->IsJSGlobalObject()) {
1205      count++;
1206    }
1207  }
1208  return count;
1209}
1210
1211
1212void GlobalHandles::RecordStats(HeapStats* stats) {
1213  *stats->global_handle_count = 0;
1214  *stats->weak_global_handle_count = 0;
1215  *stats->pending_global_handle_count = 0;
1216  *stats->near_death_global_handle_count = 0;
1217  *stats->free_global_handle_count = 0;
1218  for (NodeIterator it(this); !it.done(); it.Advance()) {
1219    *stats->global_handle_count += 1;
1220    if (it.node()->state() == Node::WEAK) {
1221      *stats->weak_global_handle_count += 1;
1222    } else if (it.node()->state() == Node::PENDING) {
1223      *stats->pending_global_handle_count += 1;
1224    } else if (it.node()->state() == Node::NEAR_DEATH) {
1225      *stats->near_death_global_handle_count += 1;
1226    } else if (it.node()->state() == Node::FREE) {
1227      *stats->free_global_handle_count += 1;
1228    }
1229  }
1230}
1231
1232#ifdef DEBUG
1233
1234void GlobalHandles::PrintStats() {
1235  int total = 0;
1236  int weak = 0;
1237  int pending = 0;
1238  int near_death = 0;
1239  int destroyed = 0;
1240
1241  for (NodeIterator it(this); !it.done(); it.Advance()) {
1242    total++;
1243    if (it.node()->state() == Node::WEAK) weak++;
1244    if (it.node()->state() == Node::PENDING) pending++;
1245    if (it.node()->state() == Node::NEAR_DEATH) near_death++;
1246    if (it.node()->state() == Node::FREE) destroyed++;
1247  }
1248
1249  PrintF("Global Handle Statistics:\n");
1250  PrintF("  allocated memory = %" PRIuS "B\n", total * sizeof(Node));
1251  PrintF("  # weak       = %d\n", weak);
1252  PrintF("  # pending    = %d\n", pending);
1253  PrintF("  # near_death = %d\n", near_death);
1254  PrintF("  # free       = %d\n", destroyed);
1255  PrintF("  # total      = %d\n", total);
1256}
1257
1258
1259void GlobalHandles::Print() {
1260  PrintF("Global handles:\n");
1261  for (NodeIterator it(this); !it.done(); it.Advance()) {
1262    PrintF("  handle %p to %p%s\n",
1263           reinterpret_cast<void*>(it.node()->location()),
1264           reinterpret_cast<void*>(it.node()->object()),
1265           it.node()->IsWeak() ? " (weak)" : "");
1266  }
1267}
1268
1269#endif
1270
1271
1272
1273void GlobalHandles::AddObjectGroup(Object*** handles,
1274                                   size_t length,
1275                                   v8::RetainedObjectInfo* info) {
1276#ifdef DEBUG
1277  for (size_t i = 0; i < length; ++i) {
1278    DCHECK(!Node::FromLocation(handles[i])->is_independent());
1279  }
1280#endif
1281  if (length == 0) {
1282    if (info != NULL) info->Dispose();
1283    return;
1284  }
1285  ObjectGroup* group = new ObjectGroup(length);
1286  for (size_t i = 0; i < length; ++i)
1287    group->objects[i] = handles[i];
1288  group->info = info;
1289  object_groups_.Add(group);
1290}
1291
1292
1293void GlobalHandles::SetObjectGroupId(Object** handle,
1294                                     UniqueId id) {
1295  object_group_connections_.Add(ObjectGroupConnection(id, handle));
1296}
1297
1298
1299void GlobalHandles::SetRetainedObjectInfo(UniqueId id,
1300                                          RetainedObjectInfo* info) {
1301  retainer_infos_.Add(ObjectGroupRetainerInfo(id, info));
1302}
1303
1304
1305void GlobalHandles::SetReferenceFromGroup(UniqueId id, Object** child) {
1306  DCHECK(!Node::FromLocation(child)->is_independent());
1307  implicit_ref_connections_.Add(ObjectGroupConnection(id, child));
1308}
1309
1310
1311void GlobalHandles::SetReference(HeapObject** parent, Object** child) {
1312  DCHECK(!Node::FromLocation(child)->is_independent());
1313  ImplicitRefGroup* group = new ImplicitRefGroup(parent, 1);
1314  group->children[0] = child;
1315  implicit_ref_groups_.Add(group);
1316}
1317
1318
1319void GlobalHandles::RemoveObjectGroups() {
1320  for (int i = 0; i < object_groups_.length(); i++)
1321    delete object_groups_.at(i);
1322  object_groups_.Clear();
1323  for (int i = 0; i < retainer_infos_.length(); ++i)
1324    retainer_infos_[i].info->Dispose();
1325  retainer_infos_.Clear();
1326  object_group_connections_.Clear();
1327  object_group_connections_.Initialize(kObjectGroupConnectionsCapacity);
1328}
1329
1330
1331void GlobalHandles::RemoveImplicitRefGroups() {
1332  for (int i = 0; i < implicit_ref_groups_.length(); i++) {
1333    delete implicit_ref_groups_.at(i);
1334  }
1335  implicit_ref_groups_.Clear();
1336  implicit_ref_connections_.Clear();
1337}
1338
1339
1340void GlobalHandles::TearDown() {
1341  // TODO(1428): invoke weak callbacks.
1342}
1343
1344
1345void GlobalHandles::ComputeObjectGroupsAndImplicitReferences() {
1346  if (object_group_connections_.length() == 0) {
1347    for (int i = 0; i < retainer_infos_.length(); ++i)
1348      retainer_infos_[i].info->Dispose();
1349    retainer_infos_.Clear();
1350    implicit_ref_connections_.Clear();
1351    return;
1352  }
1353
1354  object_group_connections_.Sort();
1355  retainer_infos_.Sort();
1356  implicit_ref_connections_.Sort();
1357
1358  int info_index = 0;  // For iterating retainer_infos_.
1359  UniqueId current_group_id(0);
1360  int current_group_start = 0;
1361
1362  int current_implicit_refs_start = 0;
1363  int current_implicit_refs_end = 0;
1364  for (int i = 0; i <= object_group_connections_.length(); ++i) {
1365    if (i == 0)
1366      current_group_id = object_group_connections_[i].id;
1367    if (i == object_group_connections_.length() ||
1368        current_group_id != object_group_connections_[i].id) {
1369      // Group detected: objects in indices [current_group_start, i[.
1370
1371      // Find out which implicit references are related to this group. (We want
1372      // to ignore object groups which only have 1 object, but that object is
1373      // needed as a representative object for the implicit refrerence group.)
1374      while (current_implicit_refs_start < implicit_ref_connections_.length() &&
1375             implicit_ref_connections_[current_implicit_refs_start].id <
1376                 current_group_id)
1377        ++current_implicit_refs_start;
1378      current_implicit_refs_end = current_implicit_refs_start;
1379      while (current_implicit_refs_end < implicit_ref_connections_.length() &&
1380             implicit_ref_connections_[current_implicit_refs_end].id ==
1381                 current_group_id)
1382        ++current_implicit_refs_end;
1383
1384      if (current_implicit_refs_end > current_implicit_refs_start) {
1385        // Find a representative object for the implicit references.
1386        HeapObject** representative = NULL;
1387        for (int j = current_group_start; j < i; ++j) {
1388          Object** object = object_group_connections_[j].object;
1389          if ((*object)->IsHeapObject()) {
1390            representative = reinterpret_cast<HeapObject**>(object);
1391            break;
1392          }
1393        }
1394        if (representative) {
1395          ImplicitRefGroup* group = new ImplicitRefGroup(
1396              representative,
1397              current_implicit_refs_end - current_implicit_refs_start);
1398          for (int j = current_implicit_refs_start;
1399               j < current_implicit_refs_end;
1400               ++j) {
1401            group->children[j - current_implicit_refs_start] =
1402                implicit_ref_connections_[j].object;
1403          }
1404          implicit_ref_groups_.Add(group);
1405        }
1406        current_implicit_refs_start = current_implicit_refs_end;
1407      }
1408
1409      // Find a RetainedObjectInfo for the group.
1410      RetainedObjectInfo* info = NULL;
1411      while (info_index < retainer_infos_.length() &&
1412             retainer_infos_[info_index].id < current_group_id) {
1413        retainer_infos_[info_index].info->Dispose();
1414        ++info_index;
1415      }
1416      if (info_index < retainer_infos_.length() &&
1417          retainer_infos_[info_index].id == current_group_id) {
1418        // This object group has an associated ObjectGroupRetainerInfo.
1419        info = retainer_infos_[info_index].info;
1420        ++info_index;
1421      }
1422
1423      // Ignore groups which only contain one object.
1424      if (i > current_group_start + 1) {
1425        ObjectGroup* group = new ObjectGroup(i - current_group_start);
1426        for (int j = current_group_start; j < i; ++j) {
1427          group->objects[j - current_group_start] =
1428              object_group_connections_[j].object;
1429        }
1430        group->info = info;
1431        object_groups_.Add(group);
1432      } else if (info) {
1433        info->Dispose();
1434      }
1435
1436      if (i < object_group_connections_.length()) {
1437        current_group_id = object_group_connections_[i].id;
1438        current_group_start = i;
1439      }
1440    }
1441  }
1442  object_group_connections_.Clear();
1443  object_group_connections_.Initialize(kObjectGroupConnectionsCapacity);
1444  retainer_infos_.Clear();
1445  implicit_ref_connections_.Clear();
1446}
1447
1448
1449EternalHandles::EternalHandles() : size_(0) {
1450  for (unsigned i = 0; i < arraysize(singleton_handles_); i++) {
1451    singleton_handles_[i] = kInvalidIndex;
1452  }
1453}
1454
1455
1456EternalHandles::~EternalHandles() {
1457  for (int i = 0; i < blocks_.length(); i++) delete[] blocks_[i];
1458}
1459
1460
1461void EternalHandles::IterateAllRoots(ObjectVisitor* visitor) {
1462  int limit = size_;
1463  for (int i = 0; i < blocks_.length(); i++) {
1464    DCHECK(limit > 0);
1465    Object** block = blocks_[i];
1466    visitor->VisitPointers(block, block + Min(limit, kSize));
1467    limit -= kSize;
1468  }
1469}
1470
1471
1472void EternalHandles::IterateNewSpaceRoots(ObjectVisitor* visitor) {
1473  for (int i = 0; i < new_space_indices_.length(); i++) {
1474    visitor->VisitPointer(GetLocation(new_space_indices_[i]));
1475  }
1476}
1477
1478
1479void EternalHandles::PostGarbageCollectionProcessing(Heap* heap) {
1480  int last = 0;
1481  for (int i = 0; i < new_space_indices_.length(); i++) {
1482    int index = new_space_indices_[i];
1483    if (heap->InNewSpace(*GetLocation(index))) {
1484      new_space_indices_[last++] = index;
1485    }
1486  }
1487  new_space_indices_.Rewind(last);
1488}
1489
1490
1491void EternalHandles::Create(Isolate* isolate, Object* object, int* index) {
1492  DCHECK_EQ(kInvalidIndex, *index);
1493  if (object == NULL) return;
1494  DCHECK_NE(isolate->heap()->the_hole_value(), object);
1495  int block = size_ >> kShift;
1496  int offset = size_ & kMask;
1497  // need to resize
1498  if (offset == 0) {
1499    Object** next_block = new Object*[kSize];
1500    Object* the_hole = isolate->heap()->the_hole_value();
1501    MemsetPointer(next_block, the_hole, kSize);
1502    blocks_.Add(next_block);
1503  }
1504  DCHECK_EQ(isolate->heap()->the_hole_value(), blocks_[block][offset]);
1505  blocks_[block][offset] = object;
1506  if (isolate->heap()->InNewSpace(object)) {
1507    new_space_indices_.Add(size_);
1508  }
1509  *index = size_++;
1510}
1511
1512
1513}  // namespace internal
1514}  // namespace v8
1515