global-handles.cc revision 958fae7ec3f466955f8e5b50fa5b8d38b9e91675
1// Copyright 2009 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/v8.h"
6
7#include "src/api.h"
8#include "src/global-handles.h"
9
10#include "src/vm-state-inl.h"
11
12namespace v8 {
13namespace internal {
14
15
16ObjectGroup::~ObjectGroup() {
17  if (info != NULL) info->Dispose();
18  delete[] objects;
19}
20
21
22ImplicitRefGroup::~ImplicitRefGroup() {
23  delete[] children;
24}
25
26
27class GlobalHandles::Node {
28 public:
29  // State transition diagram:
30  // FREE -> NORMAL <-> WEAK -> PENDING -> NEAR_DEATH -> { NORMAL, WEAK, FREE }
31  enum State {
32    FREE = 0,
33    NORMAL,      // Normal global handle.
34    WEAK,        // Flagged as weak but not yet finalized.
35    PENDING,     // Has been recognized as only reachable by weak handles.
36    NEAR_DEATH,  // Callback has informed the handle is near death.
37    NUMBER_OF_NODE_STATES
38  };
39
40  // Maps handle location (slot) to the containing node.
41  static Node* FromLocation(Object** location) {
42    DCHECK(OFFSET_OF(Node, object_) == 0);
43    return reinterpret_cast<Node*>(location);
44  }
45
46  Node() {
47    DCHECK(OFFSET_OF(Node, class_id_) == Internals::kNodeClassIdOffset);
48    DCHECK(OFFSET_OF(Node, flags_) == Internals::kNodeFlagsOffset);
49    STATIC_ASSERT(static_cast<int>(NodeState::kMask) ==
50                  Internals::kNodeStateMask);
51    STATIC_ASSERT(WEAK == Internals::kNodeStateIsWeakValue);
52    STATIC_ASSERT(PENDING == Internals::kNodeStateIsPendingValue);
53    STATIC_ASSERT(NEAR_DEATH == Internals::kNodeStateIsNearDeathValue);
54    STATIC_ASSERT(static_cast<int>(IsIndependent::kShift) ==
55                  Internals::kNodeIsIndependentShift);
56    STATIC_ASSERT(static_cast<int>(IsPartiallyDependent::kShift) ==
57                  Internals::kNodeIsPartiallyDependentShift);
58  }
59
60#ifdef ENABLE_HANDLE_ZAPPING
61  ~Node() {
62    // TODO(1428): if it's a weak handle we should have invoked its callback.
63    // Zap the values for eager trapping.
64    object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
65    class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
66    index_ = 0;
67    set_independent(false);
68    set_partially_dependent(false);
69    set_in_new_space_list(false);
70    parameter_or_next_free_.next_free = NULL;
71    weak_callback_ = NULL;
72  }
73#endif
74
75  void Initialize(int index, Node** first_free) {
76    index_ = static_cast<uint8_t>(index);
77    DCHECK(static_cast<int>(index_) == index);
78    set_state(FREE);
79    set_in_new_space_list(false);
80    parameter_or_next_free_.next_free = *first_free;
81    *first_free = this;
82  }
83
84  void Acquire(Object* object) {
85    DCHECK(state() == FREE);
86    object_ = object;
87    class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
88    set_independent(false);
89    set_partially_dependent(false);
90    set_state(NORMAL);
91    parameter_or_next_free_.parameter = NULL;
92    weak_callback_ = NULL;
93    IncreaseBlockUses();
94  }
95
96  void Zap() {
97    DCHECK(IsInUse());
98    // Zap the values for eager trapping.
99    object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
100  }
101
102  void Release() {
103    DCHECK(IsInUse());
104    set_state(FREE);
105    // Zap the values for eager trapping.
106    object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
107    class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
108    set_independent(false);
109    set_partially_dependent(false);
110    weak_callback_ = NULL;
111    DecreaseBlockUses();
112  }
113
114  // Object slot accessors.
115  Object* object() const { return object_; }
116  Object** location() { return &object_; }
117  Handle<Object> handle() { return Handle<Object>(location()); }
118
119  // Wrapper class ID accessors.
120  bool has_wrapper_class_id() const {
121    return class_id_ != v8::HeapProfiler::kPersistentHandleNoClassId;
122  }
123
124  uint16_t wrapper_class_id() const { return class_id_; }
125
126  // State and flag accessors.
127
128  State state() const {
129    return NodeState::decode(flags_);
130  }
131  void set_state(State state) {
132    flags_ = NodeState::update(flags_, state);
133  }
134
135  bool is_independent() {
136    return IsIndependent::decode(flags_);
137  }
138  void set_independent(bool v) {
139    flags_ = IsIndependent::update(flags_, v);
140  }
141
142  bool is_partially_dependent() {
143    return IsPartiallyDependent::decode(flags_);
144  }
145  void set_partially_dependent(bool v) {
146    flags_ = IsPartiallyDependent::update(flags_, v);
147  }
148
149  bool is_in_new_space_list() {
150    return IsInNewSpaceList::decode(flags_);
151  }
152  void set_in_new_space_list(bool v) {
153    flags_ = IsInNewSpaceList::update(flags_, v);
154  }
155
156  WeaknessType weakness_type() const {
157    return NodeWeaknessType::decode(flags_);
158  }
159  void set_weakness_type(WeaknessType weakness_type) {
160    flags_ = NodeWeaknessType::update(flags_, weakness_type);
161  }
162
163  bool IsNearDeath() const {
164    // Check for PENDING to ensure correct answer when processing callbacks.
165    return state() == PENDING || state() == NEAR_DEATH;
166  }
167
168  bool IsWeak() const { return state() == WEAK; }
169
170  bool IsInUse() const { return state() != FREE; }
171
172  bool IsRetainer() const { return state() != FREE; }
173
174  bool IsStrongRetainer() const { return state() == NORMAL; }
175
176  bool IsWeakRetainer() const {
177    return state() == WEAK || state() == PENDING || state() == NEAR_DEATH;
178  }
179
180  void MarkPending() {
181    DCHECK(state() == WEAK);
182    set_state(PENDING);
183  }
184
185  // Independent flag accessors.
186  void MarkIndependent() {
187    DCHECK(IsInUse());
188    set_independent(true);
189  }
190
191  void MarkPartiallyDependent() {
192    DCHECK(IsInUse());
193    if (GetGlobalHandles()->isolate()->heap()->InNewSpace(object_)) {
194      set_partially_dependent(true);
195    }
196  }
197  void clear_partially_dependent() { set_partially_dependent(false); }
198
199  // Callback accessor.
200  // TODO(svenpanne) Re-enable or nuke later.
201  // WeakReferenceCallback callback() { return callback_; }
202
203  // Callback parameter accessors.
204  void set_parameter(void* parameter) {
205    DCHECK(IsInUse());
206    DCHECK(weakness_type() == NORMAL_WEAK || weakness_type() == PHANTOM_WEAK);
207    parameter_or_next_free_.parameter = parameter;
208  }
209  void* parameter() const {
210    DCHECK(IsInUse());
211    return parameter_or_next_free_.parameter;
212  }
213
214  void set_internal_fields(int internal_field_index1,
215                           int internal_field_index2) {
216    DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK);
217    // These are stored in an int16_t.
218    DCHECK(internal_field_index1 < 1 << 16);
219    DCHECK(internal_field_index1 >= -(1 << 16));
220    DCHECK(internal_field_index2 < 1 << 16);
221    DCHECK(internal_field_index2 >= -(1 << 16));
222    parameter_or_next_free_.internal_field_indeces.internal_field1 =
223        static_cast<int16_t>(internal_field_index1);
224    parameter_or_next_free_.internal_field_indeces.internal_field2 =
225        static_cast<int16_t>(internal_field_index2);
226  }
227
228  int internal_field1() const {
229    DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK);
230    return parameter_or_next_free_.internal_field_indeces.internal_field1;
231  }
232
233  int internal_field2() const {
234    DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK);
235    return parameter_or_next_free_.internal_field_indeces.internal_field2;
236  }
237
238  // Accessors for next free node in the free list.
239  Node* next_free() {
240    DCHECK(state() == FREE);
241    return parameter_or_next_free_.next_free;
242  }
243  void set_next_free(Node* value) {
244    DCHECK(state() == FREE);
245    parameter_or_next_free_.next_free = value;
246  }
247
248  void MakeWeak(void* parameter, WeakCallback weak_callback) {
249    DCHECK(weak_callback != NULL);
250    DCHECK(IsInUse());
251    CHECK(object_ != NULL);
252    set_state(WEAK);
253    set_weakness_type(NORMAL_WEAK);
254    set_parameter(parameter);
255    weak_callback_ = weak_callback;
256  }
257
258  void MakePhantom(void* parameter,
259                   PhantomCallbackData<void>::Callback phantom_callback,
260                   int16_t internal_field_index1,
261                   int16_t internal_field_index2) {
262    DCHECK(phantom_callback != NULL);
263    DCHECK(IsInUse());
264    CHECK(object_ != NULL);
265    set_state(WEAK);
266    if (parameter == NULL) {
267      set_weakness_type(INTERNAL_FIELDS_WEAK);
268      set_internal_fields(internal_field_index1, internal_field_index2);
269    } else {
270      DCHECK(internal_field_index1 == v8::Object::kNoInternalFieldIndex);
271      DCHECK(internal_field_index2 == v8::Object::kNoInternalFieldIndex);
272      set_weakness_type(PHANTOM_WEAK);
273      set_parameter(parameter);
274    }
275    weak_callback_ = reinterpret_cast<WeakCallback>(phantom_callback);
276  }
277
278  void* ClearWeakness() {
279    DCHECK(IsInUse());
280    void* p = parameter();
281    set_state(NORMAL);
282    set_parameter(NULL);
283    return p;
284  }
285
286  void CollectPhantomCallbackData(
287      Isolate* isolate, List<PendingPhantomCallback>* pending_phantom_callbacks,
288      List<PendingInternalFieldsCallback>* pending_internal_fields_callbacks) {
289    if (state() != Node::PENDING) return;
290    bool do_release = true;
291    if (weak_callback_ != NULL) {
292      if (weakness_type() == NORMAL_WEAK) return;
293
294      v8::Isolate* api_isolate = reinterpret_cast<v8::Isolate*>(isolate);
295
296      if (weakness_type() == PHANTOM_WEAK) {
297        // Phantom weak pointer case. Zap with harmless value.
298        DCHECK(*location() == Smi::FromInt(0));
299        typedef PhantomCallbackData<void> Data;
300
301        Data data(api_isolate, parameter());
302        Data::Callback callback =
303            reinterpret_cast<Data::Callback>(weak_callback_);
304
305        pending_phantom_callbacks->Add(
306            PendingPhantomCallback(this, data, callback));
307
308        // Postpone the release of the handle. The embedder can't use the
309        // handle (it's zapped), but it may be using the location, and we
310        // don't want to confuse things by reusing that.
311        do_release = false;
312      } else {
313        DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK);
314        typedef InternalFieldsCallbackData<void, void> Data;
315
316        // Phantom weak pointer case, passing internal fields instead of
317        // parameter. Don't use a handle here during GC, because it will
318        // create a handle pointing to a dying object, which can confuse
319        // the next GC.
320        JSObject* jsobject = reinterpret_cast<JSObject*>(object());
321        DCHECK(jsobject->IsJSObject());
322        Data data(api_isolate, jsobject->GetInternalField(internal_field1()),
323                  jsobject->GetInternalField(internal_field2()));
324        Data::Callback callback =
325            reinterpret_cast<Data::Callback>(weak_callback_);
326
327        // In the future, we want to delay the callback. In that case we will
328        // zap when we queue up, to stop the C++ side accessing the dead V8
329        // object, but we will call Release only after the callback (allowing
330        // the node to be reused).
331        pending_internal_fields_callbacks->Add(
332            PendingInternalFieldsCallback(data, callback));
333      }
334    }
335    // TODO(erikcorry): At the moment the callbacks are not postponed much,
336    // but if we really postpone them until after the mutator has run, we
337    // need to divide things up, so that an early callback clears the handle,
338    // while a later one destroys the objects involved, possibley triggering
339    // some work when decremented ref counts hit zero.
340    if (do_release) Release();
341  }
342
343  bool PostGarbageCollectionProcessing(Isolate* isolate) {
344    if (state() != Node::PENDING) return false;
345    if (weak_callback_ == NULL) {
346      Release();
347      return false;
348    }
349    set_state(NEAR_DEATH);
350
351    // Check that we are not passing a finalized external string to
352    // the callback.
353    DCHECK(!object_->IsExternalOneByteString() ||
354           ExternalOneByteString::cast(object_)->resource() != NULL);
355    DCHECK(!object_->IsExternalTwoByteString() ||
356           ExternalTwoByteString::cast(object_)->resource() != NULL);
357    // Leaving V8.
358    VMState<EXTERNAL> vmstate(isolate);
359    HandleScope handle_scope(isolate);
360    if (weakness_type() == PHANTOM_WEAK) return false;
361    DCHECK(weakness_type() == NORMAL_WEAK);
362    Object** object = location();
363    Handle<Object> handle(*object, isolate);
364    v8::WeakCallbackData<v8::Value, void> data(
365        reinterpret_cast<v8::Isolate*>(isolate), parameter(),
366        v8::Utils::ToLocal(handle));
367    set_parameter(NULL);
368    weak_callback_(data);
369
370    // Absence of explicit cleanup or revival of weak handle
371    // in most of the cases would lead to memory leak.
372    CHECK(state() != NEAR_DEATH);
373    return true;
374  }
375
376  inline GlobalHandles* GetGlobalHandles();
377
378 private:
379  inline NodeBlock* FindBlock();
380  inline void IncreaseBlockUses();
381  inline void DecreaseBlockUses();
382
383  // Storage for object pointer.
384  // Placed first to avoid offset computation.
385  Object* object_;
386
387  // Next word stores class_id, index, state, and independent.
388  // Note: the most aligned fields should go first.
389
390  // Wrapper class ID.
391  uint16_t class_id_;
392
393  // Index in the containing handle block.
394  uint8_t index_;
395
396  // This stores three flags (independent, partially_dependent and
397  // in_new_space_list) and a State.
398  class NodeState : public BitField<State, 0, 3> {};
399  class IsIndependent : public BitField<bool, 3, 1> {};
400  class IsPartiallyDependent : public BitField<bool, 4, 1> {};
401  class IsInNewSpaceList : public BitField<bool, 5, 1> {};
402  class NodeWeaknessType : public BitField<WeaknessType, 6, 2> {};
403
404  uint8_t flags_;
405
406  // Handle specific callback - might be a weak reference in disguise.
407  WeakCallback weak_callback_;
408
409  // Provided data for callback.  In FREE state, this is used for
410  // the free list link.
411  union {
412    void* parameter;
413    struct {
414      int16_t internal_field1;
415      int16_t internal_field2;
416    } internal_field_indeces;
417    Node* next_free;
418  } parameter_or_next_free_;
419
420  DISALLOW_COPY_AND_ASSIGN(Node);
421};
422
423
424class GlobalHandles::NodeBlock {
425 public:
426  static const int kSize = 256;
427
428  explicit NodeBlock(GlobalHandles* global_handles, NodeBlock* next)
429      : next_(next),
430        used_nodes_(0),
431        next_used_(NULL),
432        prev_used_(NULL),
433        global_handles_(global_handles) {}
434
435  void PutNodesOnFreeList(Node** first_free) {
436    for (int i = kSize - 1; i >= 0; --i) {
437      nodes_[i].Initialize(i, first_free);
438    }
439  }
440
441  Node* node_at(int index) {
442    DCHECK(0 <= index && index < kSize);
443    return &nodes_[index];
444  }
445
446  void IncreaseUses() {
447    DCHECK(used_nodes_ < kSize);
448    if (used_nodes_++ == 0) {
449      NodeBlock* old_first = global_handles_->first_used_block_;
450      global_handles_->first_used_block_ = this;
451      next_used_ = old_first;
452      prev_used_ = NULL;
453      if (old_first == NULL) return;
454      old_first->prev_used_ = this;
455    }
456  }
457
458  void DecreaseUses() {
459    DCHECK(used_nodes_ > 0);
460    if (--used_nodes_ == 0) {
461      if (next_used_ != NULL) next_used_->prev_used_ = prev_used_;
462      if (prev_used_ != NULL) prev_used_->next_used_ = next_used_;
463      if (this == global_handles_->first_used_block_) {
464        global_handles_->first_used_block_ = next_used_;
465      }
466    }
467  }
468
469  GlobalHandles* global_handles() { return global_handles_; }
470
471  // Next block in the list of all blocks.
472  NodeBlock* next() const { return next_; }
473
474  // Next/previous block in the list of blocks with used nodes.
475  NodeBlock* next_used() const { return next_used_; }
476  NodeBlock* prev_used() const { return prev_used_; }
477
478 private:
479  Node nodes_[kSize];
480  NodeBlock* const next_;
481  int used_nodes_;
482  NodeBlock* next_used_;
483  NodeBlock* prev_used_;
484  GlobalHandles* global_handles_;
485};
486
487
488GlobalHandles* GlobalHandles::Node::GetGlobalHandles() {
489  return FindBlock()->global_handles();
490}
491
492
493GlobalHandles::NodeBlock* GlobalHandles::Node::FindBlock() {
494  intptr_t ptr = reinterpret_cast<intptr_t>(this);
495  ptr = ptr - index_ * sizeof(Node);
496  NodeBlock* block = reinterpret_cast<NodeBlock*>(ptr);
497  DCHECK(block->node_at(index_) == this);
498  return block;
499}
500
501
502void GlobalHandles::Node::IncreaseBlockUses() {
503  NodeBlock* node_block = FindBlock();
504  node_block->IncreaseUses();
505  GlobalHandles* global_handles = node_block->global_handles();
506  global_handles->isolate()->counters()->global_handles()->Increment();
507  global_handles->number_of_global_handles_++;
508}
509
510
511void GlobalHandles::Node::DecreaseBlockUses() {
512  NodeBlock* node_block = FindBlock();
513  GlobalHandles* global_handles = node_block->global_handles();
514  parameter_or_next_free_.next_free = global_handles->first_free_;
515  global_handles->first_free_ = this;
516  node_block->DecreaseUses();
517  global_handles->isolate()->counters()->global_handles()->Decrement();
518  global_handles->number_of_global_handles_--;
519}
520
521
522class GlobalHandles::NodeIterator {
523 public:
524  explicit NodeIterator(GlobalHandles* global_handles)
525      : block_(global_handles->first_used_block_),
526        index_(0) {}
527
528  bool done() const { return block_ == NULL; }
529
530  Node* node() const {
531    DCHECK(!done());
532    return block_->node_at(index_);
533  }
534
535  void Advance() {
536    DCHECK(!done());
537    if (++index_ < NodeBlock::kSize) return;
538    index_ = 0;
539    block_ = block_->next_used();
540  }
541
542 private:
543  NodeBlock* block_;
544  int index_;
545
546  DISALLOW_COPY_AND_ASSIGN(NodeIterator);
547};
548
549
550GlobalHandles::GlobalHandles(Isolate* isolate)
551    : isolate_(isolate),
552      number_of_global_handles_(0),
553      first_block_(NULL),
554      first_used_block_(NULL),
555      first_free_(NULL),
556      post_gc_processing_count_(0),
557      object_group_connections_(kObjectGroupConnectionsCapacity) {}
558
559
560GlobalHandles::~GlobalHandles() {
561  NodeBlock* block = first_block_;
562  while (block != NULL) {
563    NodeBlock* tmp = block->next();
564    delete block;
565    block = tmp;
566  }
567  first_block_ = NULL;
568}
569
570
571Handle<Object> GlobalHandles::Create(Object* value) {
572  if (first_free_ == NULL) {
573    first_block_ = new NodeBlock(this, first_block_);
574    first_block_->PutNodesOnFreeList(&first_free_);
575  }
576  DCHECK(first_free_ != NULL);
577  // Take the first node in the free list.
578  Node* result = first_free_;
579  first_free_ = result->next_free();
580  result->Acquire(value);
581  if (isolate_->heap()->InNewSpace(value) &&
582      !result->is_in_new_space_list()) {
583    new_space_nodes_.Add(result);
584    result->set_in_new_space_list(true);
585  }
586  return result->handle();
587}
588
589
590Handle<Object> GlobalHandles::CopyGlobal(Object** location) {
591  DCHECK(location != NULL);
592  return Node::FromLocation(location)->GetGlobalHandles()->Create(*location);
593}
594
595
596void GlobalHandles::Destroy(Object** location) {
597  if (location != NULL) Node::FromLocation(location)->Release();
598}
599
600
601void GlobalHandles::MakeWeak(Object** location, void* parameter,
602                             WeakCallback weak_callback) {
603  Node::FromLocation(location)->MakeWeak(parameter, weak_callback);
604}
605
606
607typedef PhantomCallbackData<void>::Callback GenericCallback;
608
609
610void GlobalHandles::MakePhantom(
611    Object** location,
612    v8::InternalFieldsCallbackData<void, void>::Callback phantom_callback,
613    int16_t internal_field_index1, int16_t internal_field_index2) {
614  Node::FromLocation(location)
615      ->MakePhantom(NULL, reinterpret_cast<GenericCallback>(phantom_callback),
616                    internal_field_index1, internal_field_index2);
617}
618
619
620void GlobalHandles::MakePhantom(Object** location, void* parameter,
621                                GenericCallback phantom_callback) {
622  Node::FromLocation(location)->MakePhantom(parameter, phantom_callback,
623                                            v8::Object::kNoInternalFieldIndex,
624                                            v8::Object::kNoInternalFieldIndex);
625}
626
627
628void GlobalHandles::CollectPhantomCallbackData() {
629  for (NodeIterator it(this); !it.done(); it.Advance()) {
630    Node* node = it.node();
631    node->CollectPhantomCallbackData(isolate(), &pending_phantom_callbacks_,
632                                     &pending_internal_fields_callbacks_);
633  }
634}
635
636
637void* GlobalHandles::ClearWeakness(Object** location) {
638  return Node::FromLocation(location)->ClearWeakness();
639}
640
641
642void GlobalHandles::MarkIndependent(Object** location) {
643  Node::FromLocation(location)->MarkIndependent();
644}
645
646
647void GlobalHandles::MarkPartiallyDependent(Object** location) {
648  Node::FromLocation(location)->MarkPartiallyDependent();
649}
650
651
652bool GlobalHandles::IsIndependent(Object** location) {
653  return Node::FromLocation(location)->is_independent();
654}
655
656
657bool GlobalHandles::IsNearDeath(Object** location) {
658  return Node::FromLocation(location)->IsNearDeath();
659}
660
661
662bool GlobalHandles::IsWeak(Object** location) {
663  return Node::FromLocation(location)->IsWeak();
664}
665
666
667void GlobalHandles::IterateWeakRoots(ObjectVisitor* v) {
668  for (NodeIterator it(this); !it.done(); it.Advance()) {
669    Node* node = it.node();
670    if (node->IsWeakRetainer()) {
671      // Weakness type can be normal, phantom or internal fields.
672      // For normal weakness we mark through the handle so that
673      // the object and things reachable from it are available
674      // to the callback.
675      // In the case of phantom we can zap the object handle now
676      // and we won't need it, so we don't need to mark through it.
677      // In the internal fields case we will need the internal
678      // fields, so we can't zap the handle, but we don't need to
679      // mark through it, because it will die in this GC round.
680      if (node->state() == Node::PENDING) {
681        if (node->weakness_type() == PHANTOM_WEAK) {
682          *(node->location()) = Smi::FromInt(0);
683        } else if (node->weakness_type() == NORMAL_WEAK) {
684          v->VisitPointer(node->location());
685        } else {
686          DCHECK(node->weakness_type() == INTERNAL_FIELDS_WEAK);
687        }
688      } else {
689        // Node is not pending, so that means the object survived.  We still
690        // need to visit the pointer in case the object moved, eg. because of
691        // compaction.
692        v->VisitPointer(node->location());
693      }
694    }
695  }
696}
697
698
699void GlobalHandles::IdentifyWeakHandles(WeakSlotCallback f) {
700  for (NodeIterator it(this); !it.done(); it.Advance()) {
701    if (it.node()->IsWeak() && f(it.node()->location())) {
702      it.node()->MarkPending();
703    }
704  }
705}
706
707
708void GlobalHandles::IterateNewSpaceStrongAndDependentRoots(ObjectVisitor* v) {
709  for (int i = 0; i < new_space_nodes_.length(); ++i) {
710    Node* node = new_space_nodes_[i];
711    if (node->IsStrongRetainer() ||
712        (node->IsWeakRetainer() && !node->is_independent() &&
713         !node->is_partially_dependent())) {
714        v->VisitPointer(node->location());
715    }
716  }
717}
718
719
720void GlobalHandles::IdentifyNewSpaceWeakIndependentHandles(
721    WeakSlotCallbackWithHeap f) {
722  for (int i = 0; i < new_space_nodes_.length(); ++i) {
723    Node* node = new_space_nodes_[i];
724    DCHECK(node->is_in_new_space_list());
725    if ((node->is_independent() || node->is_partially_dependent()) &&
726        node->IsWeak() && f(isolate_->heap(), node->location())) {
727      node->MarkPending();
728    }
729  }
730}
731
732
733void GlobalHandles::IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v) {
734  for (int i = 0; i < new_space_nodes_.length(); ++i) {
735    Node* node = new_space_nodes_[i];
736    DCHECK(node->is_in_new_space_list());
737    if ((node->is_independent() || node->is_partially_dependent()) &&
738        node->IsWeakRetainer()) {
739      if (node->weakness_type() == PHANTOM_WEAK) {
740        *(node->location()) = Smi::FromInt(0);
741      } else if (node->weakness_type() == NORMAL_WEAK) {
742        v->VisitPointer(node->location());
743      } else {
744        DCHECK(node->weakness_type() == INTERNAL_FIELDS_WEAK);
745        // For this case we only need to trace if it's alive: The tracing of
746        // something that is already alive is just to get the pointer updated
747        // to the new location of the object).
748        DCHECK(node->state() != Node::NEAR_DEATH);
749        if (node->state() != Node::PENDING) {
750          v->VisitPointer(node->location());
751        }
752      }
753    }
754  }
755}
756
757
758bool GlobalHandles::IterateObjectGroups(ObjectVisitor* v,
759                                        WeakSlotCallbackWithHeap can_skip) {
760  ComputeObjectGroupsAndImplicitReferences();
761  int last = 0;
762  bool any_group_was_visited = false;
763  for (int i = 0; i < object_groups_.length(); i++) {
764    ObjectGroup* entry = object_groups_.at(i);
765    DCHECK(entry != NULL);
766
767    Object*** objects = entry->objects;
768    bool group_should_be_visited = false;
769    for (size_t j = 0; j < entry->length; j++) {
770      Object* object = *objects[j];
771      if (object->IsHeapObject()) {
772        if (!can_skip(isolate_->heap(), &object)) {
773          group_should_be_visited = true;
774          break;
775        }
776      }
777    }
778
779    if (!group_should_be_visited) {
780      object_groups_[last++] = entry;
781      continue;
782    }
783
784    // An object in the group requires visiting, so iterate over all
785    // objects in the group.
786    for (size_t j = 0; j < entry->length; ++j) {
787      Object* object = *objects[j];
788      if (object->IsHeapObject()) {
789        v->VisitPointer(&object);
790        any_group_was_visited = true;
791      }
792    }
793
794    // Once the entire group has been iterated over, set the object
795    // group to NULL so it won't be processed again.
796    delete entry;
797    object_groups_.at(i) = NULL;
798  }
799  object_groups_.Rewind(last);
800  return any_group_was_visited;
801}
802
803
804int GlobalHandles::PostScavengeProcessing(
805    const int initial_post_gc_processing_count) {
806  int freed_nodes = 0;
807  for (int i = 0; i < new_space_nodes_.length(); ++i) {
808    Node* node = new_space_nodes_[i];
809    DCHECK(node->is_in_new_space_list());
810    if (!node->IsRetainer()) {
811      // Free nodes do not have weak callbacks. Do not use them to compute
812      // the freed_nodes.
813      continue;
814    }
815    // Skip dependent handles. Their weak callbacks might expect to be
816    // called between two global garbage collection callbacks which
817    // are not called for minor collections.
818    if (!node->is_independent() && !node->is_partially_dependent()) {
819      continue;
820    }
821    node->clear_partially_dependent();
822    if (node->PostGarbageCollectionProcessing(isolate_)) {
823      if (initial_post_gc_processing_count != post_gc_processing_count_) {
824        // Weak callback triggered another GC and another round of
825        // PostGarbageCollection processing.  The current node might
826        // have been deleted in that round, so we need to bail out (or
827        // restart the processing).
828        return freed_nodes;
829      }
830    }
831    if (!node->IsRetainer()) {
832      freed_nodes++;
833    }
834  }
835  return freed_nodes;
836}
837
838
839int GlobalHandles::PostMarkSweepProcessing(
840    const int initial_post_gc_processing_count) {
841  int freed_nodes = 0;
842  for (NodeIterator it(this); !it.done(); it.Advance()) {
843    if (!it.node()->IsRetainer()) {
844      // Free nodes do not have weak callbacks. Do not use them to compute
845      // the freed_nodes.
846      continue;
847    }
848    it.node()->clear_partially_dependent();
849    if (it.node()->PostGarbageCollectionProcessing(isolate_)) {
850      if (initial_post_gc_processing_count != post_gc_processing_count_) {
851        // See the comment above.
852        return freed_nodes;
853      }
854    }
855    if (!it.node()->IsRetainer()) {
856      freed_nodes++;
857    }
858  }
859  return freed_nodes;
860}
861
862
863void GlobalHandles::UpdateListOfNewSpaceNodes() {
864  int last = 0;
865  for (int i = 0; i < new_space_nodes_.length(); ++i) {
866    Node* node = new_space_nodes_[i];
867    DCHECK(node->is_in_new_space_list());
868    if (node->IsRetainer()) {
869      if (isolate_->heap()->InNewSpace(node->object())) {
870        new_space_nodes_[last++] = node;
871        isolate_->heap()->IncrementNodesCopiedInNewSpace();
872      } else {
873        node->set_in_new_space_list(false);
874        isolate_->heap()->IncrementNodesPromoted();
875      }
876    } else {
877      node->set_in_new_space_list(false);
878      isolate_->heap()->IncrementNodesDiedInNewSpace();
879    }
880  }
881  new_space_nodes_.Rewind(last);
882}
883
884
885int GlobalHandles::DispatchPendingPhantomCallbacks() {
886  int freed_nodes = 0;
887  while (pending_phantom_callbacks_.length() != 0) {
888    PendingPhantomCallback callback = pending_phantom_callbacks_.RemoveLast();
889    callback.invoke();
890    freed_nodes++;
891  }
892  while (pending_internal_fields_callbacks_.length() != 0) {
893    PendingInternalFieldsCallback callback =
894        pending_internal_fields_callbacks_.RemoveLast();
895    callback.invoke();
896    freed_nodes++;
897  }
898  return freed_nodes;
899}
900
901
902int GlobalHandles::PostGarbageCollectionProcessing(GarbageCollector collector) {
903  // Process weak global handle callbacks. This must be done after the
904  // GC is completely done, because the callbacks may invoke arbitrary
905  // API functions.
906  DCHECK(isolate_->heap()->gc_state() == Heap::NOT_IN_GC);
907  const int initial_post_gc_processing_count = ++post_gc_processing_count_;
908  int freed_nodes = 0;
909  if (collector == SCAVENGER) {
910    freed_nodes = PostScavengeProcessing(initial_post_gc_processing_count);
911  } else {
912    freed_nodes = PostMarkSweepProcessing(initial_post_gc_processing_count);
913  }
914  if (initial_post_gc_processing_count != post_gc_processing_count_) {
915    // If the callbacks caused a nested GC, then return.  See comment in
916    // PostScavengeProcessing.
917    return freed_nodes;
918  }
919  freed_nodes += DispatchPendingPhantomCallbacks();
920  if (initial_post_gc_processing_count == post_gc_processing_count_) {
921    UpdateListOfNewSpaceNodes();
922  }
923  return freed_nodes;
924}
925
926
927void GlobalHandles::PendingPhantomCallback::invoke() {
928  if (node_->state() == Node::FREE) return;
929  DCHECK(node_->state() == Node::NEAR_DEATH);
930  callback_(data_);
931  if (node_->state() != Node::FREE) node_->Release();
932}
933
934
935void GlobalHandles::IterateStrongRoots(ObjectVisitor* v) {
936  for (NodeIterator it(this); !it.done(); it.Advance()) {
937    if (it.node()->IsStrongRetainer()) {
938      v->VisitPointer(it.node()->location());
939    }
940  }
941}
942
943
944void GlobalHandles::IterateAllRoots(ObjectVisitor* v) {
945  for (NodeIterator it(this); !it.done(); it.Advance()) {
946    if (it.node()->IsRetainer()) {
947      v->VisitPointer(it.node()->location());
948    }
949  }
950}
951
952
953void GlobalHandles::IterateAllRootsWithClassIds(ObjectVisitor* v) {
954  for (NodeIterator it(this); !it.done(); it.Advance()) {
955    if (it.node()->IsRetainer() && it.node()->has_wrapper_class_id()) {
956      v->VisitEmbedderReference(it.node()->location(),
957                                it.node()->wrapper_class_id());
958    }
959  }
960}
961
962
963void GlobalHandles::IterateAllRootsInNewSpaceWithClassIds(ObjectVisitor* v) {
964  for (int i = 0; i < new_space_nodes_.length(); ++i) {
965    Node* node = new_space_nodes_[i];
966    if (node->IsRetainer() && node->has_wrapper_class_id()) {
967      v->VisitEmbedderReference(node->location(),
968                                node->wrapper_class_id());
969    }
970  }
971}
972
973
974int GlobalHandles::NumberOfWeakHandles() {
975  int count = 0;
976  for (NodeIterator it(this); !it.done(); it.Advance()) {
977    if (it.node()->IsWeakRetainer()) {
978      count++;
979    }
980  }
981  return count;
982}
983
984
985int GlobalHandles::NumberOfGlobalObjectWeakHandles() {
986  int count = 0;
987  for (NodeIterator it(this); !it.done(); it.Advance()) {
988    if (it.node()->IsWeakRetainer() &&
989        it.node()->object()->IsJSGlobalObject()) {
990      count++;
991    }
992  }
993  return count;
994}
995
996
997void GlobalHandles::RecordStats(HeapStats* stats) {
998  *stats->global_handle_count = 0;
999  *stats->weak_global_handle_count = 0;
1000  *stats->pending_global_handle_count = 0;
1001  *stats->near_death_global_handle_count = 0;
1002  *stats->free_global_handle_count = 0;
1003  for (NodeIterator it(this); !it.done(); it.Advance()) {
1004    *stats->global_handle_count += 1;
1005    if (it.node()->state() == Node::WEAK) {
1006      *stats->weak_global_handle_count += 1;
1007    } else if (it.node()->state() == Node::PENDING) {
1008      *stats->pending_global_handle_count += 1;
1009    } else if (it.node()->state() == Node::NEAR_DEATH) {
1010      *stats->near_death_global_handle_count += 1;
1011    } else if (it.node()->state() == Node::FREE) {
1012      *stats->free_global_handle_count += 1;
1013    }
1014  }
1015}
1016
1017#ifdef DEBUG
1018
1019void GlobalHandles::PrintStats() {
1020  int total = 0;
1021  int weak = 0;
1022  int pending = 0;
1023  int near_death = 0;
1024  int destroyed = 0;
1025
1026  for (NodeIterator it(this); !it.done(); it.Advance()) {
1027    total++;
1028    if (it.node()->state() == Node::WEAK) weak++;
1029    if (it.node()->state() == Node::PENDING) pending++;
1030    if (it.node()->state() == Node::NEAR_DEATH) near_death++;
1031    if (it.node()->state() == Node::FREE) destroyed++;
1032  }
1033
1034  PrintF("Global Handle Statistics:\n");
1035  PrintF("  allocated memory = %" V8_PTR_PREFIX "dB\n", sizeof(Node) * total);
1036  PrintF("  # weak       = %d\n", weak);
1037  PrintF("  # pending    = %d\n", pending);
1038  PrintF("  # near_death = %d\n", near_death);
1039  PrintF("  # free       = %d\n", destroyed);
1040  PrintF("  # total      = %d\n", total);
1041}
1042
1043
1044void GlobalHandles::Print() {
1045  PrintF("Global handles:\n");
1046  for (NodeIterator it(this); !it.done(); it.Advance()) {
1047    PrintF("  handle %p to %p%s\n",
1048           reinterpret_cast<void*>(it.node()->location()),
1049           reinterpret_cast<void*>(it.node()->object()),
1050           it.node()->IsWeak() ? " (weak)" : "");
1051  }
1052}
1053
1054#endif
1055
1056
1057
1058void GlobalHandles::AddObjectGroup(Object*** handles,
1059                                   size_t length,
1060                                   v8::RetainedObjectInfo* info) {
1061#ifdef DEBUG
1062  for (size_t i = 0; i < length; ++i) {
1063    DCHECK(!Node::FromLocation(handles[i])->is_independent());
1064  }
1065#endif
1066  if (length == 0) {
1067    if (info != NULL) info->Dispose();
1068    return;
1069  }
1070  ObjectGroup* group = new ObjectGroup(length);
1071  for (size_t i = 0; i < length; ++i)
1072    group->objects[i] = handles[i];
1073  group->info = info;
1074  object_groups_.Add(group);
1075}
1076
1077
1078void GlobalHandles::SetObjectGroupId(Object** handle,
1079                                     UniqueId id) {
1080  object_group_connections_.Add(ObjectGroupConnection(id, handle));
1081}
1082
1083
1084void GlobalHandles::SetRetainedObjectInfo(UniqueId id,
1085                                          RetainedObjectInfo* info) {
1086  retainer_infos_.Add(ObjectGroupRetainerInfo(id, info));
1087}
1088
1089
1090void GlobalHandles::SetReferenceFromGroup(UniqueId id, Object** child) {
1091  DCHECK(!Node::FromLocation(child)->is_independent());
1092  implicit_ref_connections_.Add(ObjectGroupConnection(id, child));
1093}
1094
1095
1096void GlobalHandles::SetReference(HeapObject** parent, Object** child) {
1097  DCHECK(!Node::FromLocation(child)->is_independent());
1098  ImplicitRefGroup* group = new ImplicitRefGroup(parent, 1);
1099  group->children[0] = child;
1100  implicit_ref_groups_.Add(group);
1101}
1102
1103
1104void GlobalHandles::RemoveObjectGroups() {
1105  for (int i = 0; i < object_groups_.length(); i++)
1106    delete object_groups_.at(i);
1107  object_groups_.Clear();
1108  for (int i = 0; i < retainer_infos_.length(); ++i)
1109    retainer_infos_[i].info->Dispose();
1110  retainer_infos_.Clear();
1111  object_group_connections_.Clear();
1112  object_group_connections_.Initialize(kObjectGroupConnectionsCapacity);
1113}
1114
1115
1116void GlobalHandles::RemoveImplicitRefGroups() {
1117  for (int i = 0; i < implicit_ref_groups_.length(); i++) {
1118    delete implicit_ref_groups_.at(i);
1119  }
1120  implicit_ref_groups_.Clear();
1121  implicit_ref_connections_.Clear();
1122}
1123
1124
1125void GlobalHandles::TearDown() {
1126  // TODO(1428): invoke weak callbacks.
1127}
1128
1129
1130void GlobalHandles::ComputeObjectGroupsAndImplicitReferences() {
1131  if (object_group_connections_.length() == 0) {
1132    for (int i = 0; i < retainer_infos_.length(); ++i)
1133      retainer_infos_[i].info->Dispose();
1134    retainer_infos_.Clear();
1135    implicit_ref_connections_.Clear();
1136    return;
1137  }
1138
1139  object_group_connections_.Sort();
1140  retainer_infos_.Sort();
1141  implicit_ref_connections_.Sort();
1142
1143  int info_index = 0;  // For iterating retainer_infos_.
1144  UniqueId current_group_id(0);
1145  int current_group_start = 0;
1146
1147  int current_implicit_refs_start = 0;
1148  int current_implicit_refs_end = 0;
1149  for (int i = 0; i <= object_group_connections_.length(); ++i) {
1150    if (i == 0)
1151      current_group_id = object_group_connections_[i].id;
1152    if (i == object_group_connections_.length() ||
1153        current_group_id != object_group_connections_[i].id) {
1154      // Group detected: objects in indices [current_group_start, i[.
1155
1156      // Find out which implicit references are related to this group. (We want
1157      // to ignore object groups which only have 1 object, but that object is
1158      // needed as a representative object for the implicit refrerence group.)
1159      while (current_implicit_refs_start < implicit_ref_connections_.length() &&
1160             implicit_ref_connections_[current_implicit_refs_start].id <
1161                 current_group_id)
1162        ++current_implicit_refs_start;
1163      current_implicit_refs_end = current_implicit_refs_start;
1164      while (current_implicit_refs_end < implicit_ref_connections_.length() &&
1165             implicit_ref_connections_[current_implicit_refs_end].id ==
1166                 current_group_id)
1167        ++current_implicit_refs_end;
1168
1169      if (current_implicit_refs_end > current_implicit_refs_start) {
1170        // Find a representative object for the implicit references.
1171        HeapObject** representative = NULL;
1172        for (int j = current_group_start; j < i; ++j) {
1173          Object** object = object_group_connections_[j].object;
1174          if ((*object)->IsHeapObject()) {
1175            representative = reinterpret_cast<HeapObject**>(object);
1176            break;
1177          }
1178        }
1179        if (representative) {
1180          ImplicitRefGroup* group = new ImplicitRefGroup(
1181              representative,
1182              current_implicit_refs_end - current_implicit_refs_start);
1183          for (int j = current_implicit_refs_start;
1184               j < current_implicit_refs_end;
1185               ++j) {
1186            group->children[j - current_implicit_refs_start] =
1187                implicit_ref_connections_[j].object;
1188          }
1189          implicit_ref_groups_.Add(group);
1190        }
1191        current_implicit_refs_start = current_implicit_refs_end;
1192      }
1193
1194      // Find a RetainedObjectInfo for the group.
1195      RetainedObjectInfo* info = NULL;
1196      while (info_index < retainer_infos_.length() &&
1197             retainer_infos_[info_index].id < current_group_id) {
1198        retainer_infos_[info_index].info->Dispose();
1199        ++info_index;
1200      }
1201      if (info_index < retainer_infos_.length() &&
1202          retainer_infos_[info_index].id == current_group_id) {
1203        // This object group has an associated ObjectGroupRetainerInfo.
1204        info = retainer_infos_[info_index].info;
1205        ++info_index;
1206      }
1207
1208      // Ignore groups which only contain one object.
1209      if (i > current_group_start + 1) {
1210        ObjectGroup* group = new ObjectGroup(i - current_group_start);
1211        for (int j = current_group_start; j < i; ++j) {
1212          group->objects[j - current_group_start] =
1213              object_group_connections_[j].object;
1214        }
1215        group->info = info;
1216        object_groups_.Add(group);
1217      } else if (info) {
1218        info->Dispose();
1219      }
1220
1221      if (i < object_group_connections_.length()) {
1222        current_group_id = object_group_connections_[i].id;
1223        current_group_start = i;
1224      }
1225    }
1226  }
1227  object_group_connections_.Clear();
1228  object_group_connections_.Initialize(kObjectGroupConnectionsCapacity);
1229  retainer_infos_.Clear();
1230  implicit_ref_connections_.Clear();
1231}
1232
1233
1234EternalHandles::EternalHandles() : size_(0) {
1235  for (unsigned i = 0; i < arraysize(singleton_handles_); i++) {
1236    singleton_handles_[i] = kInvalidIndex;
1237  }
1238}
1239
1240
1241EternalHandles::~EternalHandles() {
1242  for (int i = 0; i < blocks_.length(); i++) delete[] blocks_[i];
1243}
1244
1245
1246void EternalHandles::IterateAllRoots(ObjectVisitor* visitor) {
1247  int limit = size_;
1248  for (int i = 0; i < blocks_.length(); i++) {
1249    DCHECK(limit > 0);
1250    Object** block = blocks_[i];
1251    visitor->VisitPointers(block, block + Min(limit, kSize));
1252    limit -= kSize;
1253  }
1254}
1255
1256
1257void EternalHandles::IterateNewSpaceRoots(ObjectVisitor* visitor) {
1258  for (int i = 0; i < new_space_indices_.length(); i++) {
1259    visitor->VisitPointer(GetLocation(new_space_indices_[i]));
1260  }
1261}
1262
1263
1264void EternalHandles::PostGarbageCollectionProcessing(Heap* heap) {
1265  int last = 0;
1266  for (int i = 0; i < new_space_indices_.length(); i++) {
1267    int index = new_space_indices_[i];
1268    if (heap->InNewSpace(*GetLocation(index))) {
1269      new_space_indices_[last++] = index;
1270    }
1271  }
1272  new_space_indices_.Rewind(last);
1273}
1274
1275
1276void EternalHandles::Create(Isolate* isolate, Object* object, int* index) {
1277  DCHECK_EQ(kInvalidIndex, *index);
1278  if (object == NULL) return;
1279  DCHECK_NE(isolate->heap()->the_hole_value(), object);
1280  int block = size_ >> kShift;
1281  int offset = size_ & kMask;
1282  // need to resize
1283  if (offset == 0) {
1284    Object** next_block = new Object*[kSize];
1285    Object* the_hole = isolate->heap()->the_hole_value();
1286    MemsetPointer(next_block, the_hole, kSize);
1287    blocks_.Add(next_block);
1288  }
1289  DCHECK_EQ(isolate->heap()->the_hole_value(), blocks_[block][offset]);
1290  blocks_[block][offset] = object;
1291  if (isolate->heap()->InNewSpace(object)) {
1292    new_space_indices_.Add(size_);
1293  }
1294  *index = size_++;
1295}
1296
1297
1298} }  // namespace v8::internal
1299