1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_HEAP_HEAP_INL_H_
6#define V8_HEAP_HEAP_INL_H_
7
8#include <cmath>
9
10#include "src/base/platform/platform.h"
11#include "src/cpu-profiler.h"
12#include "src/heap/heap.h"
13#include "src/heap/store-buffer.h"
14#include "src/heap/store-buffer-inl.h"
15#include "src/heap-profiler.h"
16#include "src/isolate.h"
17#include "src/list-inl.h"
18#include "src/msan.h"
19#include "src/objects.h"
20
21namespace v8 {
22namespace internal {
23
24void PromotionQueue::insert(HeapObject* target, int size) {
25  if (emergency_stack_ != NULL) {
26    emergency_stack_->Add(Entry(target, size));
27    return;
28  }
29
30  if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(rear_))) {
31    NewSpacePage* rear_page =
32        NewSpacePage::FromAddress(reinterpret_cast<Address>(rear_));
33    DCHECK(!rear_page->prev_page()->is_anchor());
34    rear_ = reinterpret_cast<intptr_t*>(rear_page->prev_page()->area_end());
35  }
36
37  if ((rear_ - 2) < limit_) {
38    RelocateQueueHead();
39    emergency_stack_->Add(Entry(target, size));
40    return;
41  }
42
43  *(--rear_) = reinterpret_cast<intptr_t>(target);
44  *(--rear_) = size;
45// Assert no overflow into live objects.
46#ifdef DEBUG
47  SemiSpace::AssertValidRange(target->GetIsolate()->heap()->new_space()->top(),
48                              reinterpret_cast<Address>(rear_));
49#endif
50}
51
52
53template <>
54bool inline Heap::IsOneByte(Vector<const char> str, int chars) {
55  // TODO(dcarney): incorporate Latin-1 check when Latin-1 is supported?
56  return chars == str.length();
57}
58
59
60template <>
61bool inline Heap::IsOneByte(String* str, int chars) {
62  return str->IsOneByteRepresentation();
63}
64
65
66AllocationResult Heap::AllocateInternalizedStringFromUtf8(
67    Vector<const char> str, int chars, uint32_t hash_field) {
68  if (IsOneByte(str, chars)) {
69    return AllocateOneByteInternalizedString(Vector<const uint8_t>::cast(str),
70                                             hash_field);
71  }
72  return AllocateInternalizedStringImpl<false>(str, chars, hash_field);
73}
74
75
76template <typename T>
77AllocationResult Heap::AllocateInternalizedStringImpl(T t, int chars,
78                                                      uint32_t hash_field) {
79  if (IsOneByte(t, chars)) {
80    return AllocateInternalizedStringImpl<true>(t, chars, hash_field);
81  }
82  return AllocateInternalizedStringImpl<false>(t, chars, hash_field);
83}
84
85
86AllocationResult Heap::AllocateOneByteInternalizedString(
87    Vector<const uint8_t> str, uint32_t hash_field) {
88  CHECK_GE(String::kMaxLength, str.length());
89  // Compute map and object size.
90  Map* map = one_byte_internalized_string_map();
91  int size = SeqOneByteString::SizeFor(str.length());
92  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
93
94  // Allocate string.
95  HeapObject* result;
96  {
97    AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
98    if (!allocation.To(&result)) return allocation;
99  }
100
101  // String maps are all immortal immovable objects.
102  result->set_map_no_write_barrier(map);
103  // Set length and hash fields of the allocated string.
104  String* answer = String::cast(result);
105  answer->set_length(str.length());
106  answer->set_hash_field(hash_field);
107
108  DCHECK_EQ(size, answer->Size());
109
110  // Fill in the characters.
111  MemCopy(answer->address() + SeqOneByteString::kHeaderSize, str.start(),
112          str.length());
113
114  return answer;
115}
116
117
118AllocationResult Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str,
119                                                         uint32_t hash_field) {
120  CHECK_GE(String::kMaxLength, str.length());
121  // Compute map and object size.
122  Map* map = internalized_string_map();
123  int size = SeqTwoByteString::SizeFor(str.length());
124  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
125
126  // Allocate string.
127  HeapObject* result;
128  {
129    AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
130    if (!allocation.To(&result)) return allocation;
131  }
132
133  result->set_map(map);
134  // Set length and hash fields of the allocated string.
135  String* answer = String::cast(result);
136  answer->set_length(str.length());
137  answer->set_hash_field(hash_field);
138
139  DCHECK_EQ(size, answer->Size());
140
141  // Fill in the characters.
142  MemCopy(answer->address() + SeqTwoByteString::kHeaderSize, str.start(),
143          str.length() * kUC16Size);
144
145  return answer;
146}
147
148AllocationResult Heap::CopyFixedArray(FixedArray* src) {
149  if (src->length() == 0) return src;
150  return CopyFixedArrayWithMap(src, src->map());
151}
152
153
154AllocationResult Heap::CopyFixedDoubleArray(FixedDoubleArray* src) {
155  if (src->length() == 0) return src;
156  return CopyFixedDoubleArrayWithMap(src, src->map());
157}
158
159
160AllocationResult Heap::CopyConstantPoolArray(ConstantPoolArray* src) {
161  if (src->length() == 0) return src;
162  return CopyConstantPoolArrayWithMap(src, src->map());
163}
164
165
166AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
167                                   AllocationSpace retry_space) {
168  DCHECK(AllowHandleAllocation::IsAllowed());
169  DCHECK(AllowHeapAllocation::IsAllowed());
170  DCHECK(gc_state_ == NOT_IN_GC);
171#ifdef DEBUG
172  if (FLAG_gc_interval >= 0 && AllowAllocationFailure::IsAllowed(isolate_) &&
173      Heap::allocation_timeout_-- <= 0) {
174    return AllocationResult::Retry(space);
175  }
176  isolate_->counters()->objs_since_last_full()->Increment();
177  isolate_->counters()->objs_since_last_young()->Increment();
178#endif
179
180  HeapObject* object;
181  AllocationResult allocation;
182  if (NEW_SPACE == space) {
183    allocation = new_space_.AllocateRaw(size_in_bytes);
184    if (always_allocate() && allocation.IsRetry() && retry_space != NEW_SPACE) {
185      space = retry_space;
186    } else {
187      if (allocation.To(&object)) {
188        OnAllocationEvent(object, size_in_bytes);
189      }
190      return allocation;
191    }
192  }
193
194  if (OLD_POINTER_SPACE == space) {
195    allocation = old_pointer_space_->AllocateRaw(size_in_bytes);
196  } else if (OLD_DATA_SPACE == space) {
197    allocation = old_data_space_->AllocateRaw(size_in_bytes);
198  } else if (CODE_SPACE == space) {
199    if (size_in_bytes <= code_space()->AreaSize()) {
200      allocation = code_space_->AllocateRaw(size_in_bytes);
201    } else {
202      // Large code objects are allocated in large object space.
203      allocation = lo_space_->AllocateRaw(size_in_bytes, EXECUTABLE);
204    }
205  } else if (LO_SPACE == space) {
206    allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
207  } else if (CELL_SPACE == space) {
208    allocation = cell_space_->AllocateRaw(size_in_bytes);
209  } else if (PROPERTY_CELL_SPACE == space) {
210    allocation = property_cell_space_->AllocateRaw(size_in_bytes);
211  } else {
212    DCHECK(MAP_SPACE == space);
213    allocation = map_space_->AllocateRaw(size_in_bytes);
214  }
215  if (allocation.To(&object)) {
216    OnAllocationEvent(object, size_in_bytes);
217  } else {
218    old_gen_exhausted_ = true;
219  }
220  return allocation;
221}
222
223
224void Heap::OnAllocationEvent(HeapObject* object, int size_in_bytes) {
225  HeapProfiler* profiler = isolate_->heap_profiler();
226  if (profiler->is_tracking_allocations()) {
227    profiler->AllocationEvent(object->address(), size_in_bytes);
228  }
229
230  if (FLAG_verify_predictable) {
231    ++allocations_count_;
232
233    UpdateAllocationsHash(object);
234    UpdateAllocationsHash(size_in_bytes);
235
236    if ((FLAG_dump_allocations_digest_at_alloc > 0) &&
237        (--dump_allocations_hash_countdown_ == 0)) {
238      dump_allocations_hash_countdown_ = FLAG_dump_allocations_digest_at_alloc;
239      PrintAlloctionsHash();
240    }
241  }
242}
243
244
245void Heap::OnMoveEvent(HeapObject* target, HeapObject* source,
246                       int size_in_bytes) {
247  HeapProfiler* heap_profiler = isolate_->heap_profiler();
248  if (heap_profiler->is_tracking_object_moves()) {
249    heap_profiler->ObjectMoveEvent(source->address(), target->address(),
250                                   size_in_bytes);
251  }
252
253  if (isolate_->logger()->is_logging_code_events() ||
254      isolate_->cpu_profiler()->is_profiling()) {
255    if (target->IsSharedFunctionInfo()) {
256      PROFILE(isolate_, SharedFunctionInfoMoveEvent(source->address(),
257                                                    target->address()));
258    }
259  }
260
261  if (FLAG_verify_predictable) {
262    ++allocations_count_;
263
264    UpdateAllocationsHash(source);
265    UpdateAllocationsHash(target);
266    UpdateAllocationsHash(size_in_bytes);
267
268    if ((FLAG_dump_allocations_digest_at_alloc > 0) &&
269        (--dump_allocations_hash_countdown_ == 0)) {
270      dump_allocations_hash_countdown_ = FLAG_dump_allocations_digest_at_alloc;
271      PrintAlloctionsHash();
272    }
273  }
274}
275
276
277void Heap::UpdateAllocationsHash(HeapObject* object) {
278  Address object_address = object->address();
279  MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address);
280  AllocationSpace allocation_space = memory_chunk->owner()->identity();
281
282  STATIC_ASSERT(kSpaceTagSize + kPageSizeBits <= 32);
283  uint32_t value =
284      static_cast<uint32_t>(object_address - memory_chunk->address()) |
285      (static_cast<uint32_t>(allocation_space) << kPageSizeBits);
286
287  UpdateAllocationsHash(value);
288}
289
290
291void Heap::UpdateAllocationsHash(uint32_t value) {
292  uint16_t c1 = static_cast<uint16_t>(value);
293  uint16_t c2 = static_cast<uint16_t>(value >> 16);
294  raw_allocations_hash_ =
295      StringHasher::AddCharacterCore(raw_allocations_hash_, c1);
296  raw_allocations_hash_ =
297      StringHasher::AddCharacterCore(raw_allocations_hash_, c2);
298}
299
300
301void Heap::PrintAlloctionsHash() {
302  uint32_t hash = StringHasher::GetHashCore(raw_allocations_hash_);
303  PrintF("\n### Allocations = %u, hash = 0x%08x\n", allocations_count_, hash);
304}
305
306
307void Heap::FinalizeExternalString(String* string) {
308  DCHECK(string->IsExternalString());
309  v8::String::ExternalStringResourceBase** resource_addr =
310      reinterpret_cast<v8::String::ExternalStringResourceBase**>(
311          reinterpret_cast<byte*>(string) + ExternalString::kResourceOffset -
312          kHeapObjectTag);
313
314  // Dispose of the C++ object if it has not already been disposed.
315  if (*resource_addr != NULL) {
316    (*resource_addr)->Dispose();
317    *resource_addr = NULL;
318  }
319}
320
321
322bool Heap::InNewSpace(Object* object) {
323  bool result = new_space_.Contains(object);
324  DCHECK(!result ||                 // Either not in new space
325         gc_state_ != NOT_IN_GC ||  // ... or in the middle of GC
326         InToSpace(object));        // ... or in to-space (where we allocate).
327  return result;
328}
329
330
331bool Heap::InNewSpace(Address address) { return new_space_.Contains(address); }
332
333
334bool Heap::InFromSpace(Object* object) {
335  return new_space_.FromSpaceContains(object);
336}
337
338
339bool Heap::InToSpace(Object* object) {
340  return new_space_.ToSpaceContains(object);
341}
342
343
344bool Heap::InOldPointerSpace(Address address) {
345  return old_pointer_space_->Contains(address);
346}
347
348
349bool Heap::InOldPointerSpace(Object* object) {
350  return InOldPointerSpace(reinterpret_cast<Address>(object));
351}
352
353
354bool Heap::InOldDataSpace(Address address) {
355  return old_data_space_->Contains(address);
356}
357
358
359bool Heap::InOldDataSpace(Object* object) {
360  return InOldDataSpace(reinterpret_cast<Address>(object));
361}
362
363
364bool Heap::OldGenerationAllocationLimitReached() {
365  if (!incremental_marking()->IsStopped()) return false;
366  return OldGenerationSpaceAvailable() < 0;
367}
368
369
370bool Heap::ShouldBePromoted(Address old_address, int object_size) {
371  NewSpacePage* page = NewSpacePage::FromAddress(old_address);
372  Address age_mark = new_space_.age_mark();
373  return page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
374         (!page->ContainsLimit(age_mark) || old_address < age_mark);
375}
376
377
378void Heap::RecordWrite(Address address, int offset) {
379  if (!InNewSpace(address)) store_buffer_.Mark(address + offset);
380}
381
382
383void Heap::RecordWrites(Address address, int start, int len) {
384  if (!InNewSpace(address)) {
385    for (int i = 0; i < len; i++) {
386      store_buffer_.Mark(address + start + i * kPointerSize);
387    }
388  }
389}
390
391
392OldSpace* Heap::TargetSpace(HeapObject* object) {
393  InstanceType type = object->map()->instance_type();
394  AllocationSpace space = TargetSpaceId(type);
395  return (space == OLD_POINTER_SPACE) ? old_pointer_space_ : old_data_space_;
396}
397
398
399AllocationSpace Heap::TargetSpaceId(InstanceType type) {
400  // Heap numbers and sequential strings are promoted to old data space, all
401  // other object types are promoted to old pointer space.  We do not use
402  // object->IsHeapNumber() and object->IsSeqString() because we already
403  // know that object has the heap object tag.
404
405  // These objects are never allocated in new space.
406  DCHECK(type != MAP_TYPE);
407  DCHECK(type != CODE_TYPE);
408  DCHECK(type != ODDBALL_TYPE);
409  DCHECK(type != CELL_TYPE);
410  DCHECK(type != PROPERTY_CELL_TYPE);
411
412  if (type <= LAST_NAME_TYPE) {
413    if (type == SYMBOL_TYPE) return OLD_POINTER_SPACE;
414    DCHECK(type < FIRST_NONSTRING_TYPE);
415    // There are four string representations: sequential strings, external
416    // strings, cons strings, and sliced strings.
417    // Only the latter two contain non-map-word pointers to heap objects.
418    return ((type & kIsIndirectStringMask) == kIsIndirectStringTag)
419               ? OLD_POINTER_SPACE
420               : OLD_DATA_SPACE;
421  } else {
422    return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE;
423  }
424}
425
426
427bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) {
428  // Object migration is governed by the following rules:
429  //
430  // 1) Objects in new-space can be migrated to one of the old spaces
431  //    that matches their target space or they stay in new-space.
432  // 2) Objects in old-space stay in the same space when migrating.
433  // 3) Fillers (two or more words) can migrate due to left-trimming of
434  //    fixed arrays in new-space, old-data-space and old-pointer-space.
435  // 4) Fillers (one word) can never migrate, they are skipped by
436  //    incremental marking explicitly to prevent invalid pattern.
437  // 5) Short external strings can end up in old pointer space when a cons
438  //    string in old pointer space is made external (String::MakeExternal).
439  //
440  // Since this function is used for debugging only, we do not place
441  // asserts here, but check everything explicitly.
442  if (obj->map() == one_pointer_filler_map()) return false;
443  InstanceType type = obj->map()->instance_type();
444  MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
445  AllocationSpace src = chunk->owner()->identity();
446  switch (src) {
447    case NEW_SPACE:
448      return dst == src || dst == TargetSpaceId(type);
449    case OLD_POINTER_SPACE:
450      return dst == src && (dst == TargetSpaceId(type) || obj->IsFiller() ||
451                            obj->IsExternalString());
452    case OLD_DATA_SPACE:
453      return dst == src && dst == TargetSpaceId(type);
454    case CODE_SPACE:
455      return dst == src && type == CODE_TYPE;
456    case MAP_SPACE:
457    case CELL_SPACE:
458    case PROPERTY_CELL_SPACE:
459    case LO_SPACE:
460      return false;
461    case INVALID_SPACE:
462      break;
463  }
464  UNREACHABLE();
465  return false;
466}
467
468
469void Heap::CopyBlock(Address dst, Address src, int byte_size) {
470  CopyWords(reinterpret_cast<Object**>(dst), reinterpret_cast<Object**>(src),
471            static_cast<size_t>(byte_size / kPointerSize));
472}
473
474
475void Heap::MoveBlock(Address dst, Address src, int byte_size) {
476  DCHECK(IsAligned(byte_size, kPointerSize));
477
478  int size_in_words = byte_size / kPointerSize;
479
480  if ((dst < src) || (dst >= (src + byte_size))) {
481    Object** src_slot = reinterpret_cast<Object**>(src);
482    Object** dst_slot = reinterpret_cast<Object**>(dst);
483    Object** end_slot = src_slot + size_in_words;
484
485    while (src_slot != end_slot) {
486      *dst_slot++ = *src_slot++;
487    }
488  } else {
489    MemMove(dst, src, static_cast<size_t>(byte_size));
490  }
491}
492
493
494void Heap::ScavengePointer(HeapObject** p) { ScavengeObject(p, *p); }
495
496
497AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) {
498  // Check if there is potentially a memento behind the object. If
499  // the last word of the memento is on another page we return
500  // immediately.
501  Address object_address = object->address();
502  Address memento_address = object_address + object->Size();
503  Address last_memento_word_address = memento_address + kPointerSize;
504  if (!NewSpacePage::OnSamePage(object_address, last_memento_word_address)) {
505    return NULL;
506  }
507
508  HeapObject* candidate = HeapObject::FromAddress(memento_address);
509  Map* candidate_map = candidate->map();
510  // This fast check may peek at an uninitialized word. However, the slow check
511  // below (memento_address == top) ensures that this is safe. Mark the word as
512  // initialized to silence MemorySanitizer warnings.
513  MSAN_MEMORY_IS_INITIALIZED(&candidate_map, sizeof(candidate_map));
514  if (candidate_map != allocation_memento_map()) return NULL;
515
516  // Either the object is the last object in the new space, or there is another
517  // object of at least word size (the header map word) following it, so
518  // suffices to compare ptr and top here. Note that technically we do not have
519  // to compare with the current top pointer of the from space page during GC,
520  // since we always install filler objects above the top pointer of a from
521  // space page when performing a garbage collection. However, always performing
522  // the test makes it possible to have a single, unified version of
523  // FindAllocationMemento that is used both by the GC and the mutator.
524  Address top = NewSpaceTop();
525  DCHECK(memento_address == top ||
526         memento_address + HeapObject::kHeaderSize <= top ||
527         !NewSpacePage::OnSamePage(memento_address, top));
528  if (memento_address == top) return NULL;
529
530  AllocationMemento* memento = AllocationMemento::cast(candidate);
531  if (!memento->IsValid()) return NULL;
532  return memento;
533}
534
535
536void Heap::UpdateAllocationSiteFeedback(HeapObject* object,
537                                        ScratchpadSlotMode mode) {
538  Heap* heap = object->GetHeap();
539  DCHECK(heap->InFromSpace(object));
540
541  if (!FLAG_allocation_site_pretenuring ||
542      !AllocationSite::CanTrack(object->map()->instance_type()))
543    return;
544
545  AllocationMemento* memento = heap->FindAllocationMemento(object);
546  if (memento == NULL) return;
547
548  if (memento->GetAllocationSite()->IncrementMementoFoundCount()) {
549    heap->AddAllocationSiteToScratchpad(memento->GetAllocationSite(), mode);
550  }
551}
552
553
554void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
555  DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
556
557  // We use the first word (where the map pointer usually is) of a heap
558  // object to record the forwarding pointer.  A forwarding pointer can
559  // point to an old space, the code space, or the to space of the new
560  // generation.
561  MapWord first_word = object->map_word();
562
563  // If the first word is a forwarding address, the object has already been
564  // copied.
565  if (first_word.IsForwardingAddress()) {
566    HeapObject* dest = first_word.ToForwardingAddress();
567    DCHECK(object->GetIsolate()->heap()->InFromSpace(*p));
568    *p = dest;
569    return;
570  }
571
572  UpdateAllocationSiteFeedback(object, IGNORE_SCRATCHPAD_SLOT);
573
574  // AllocationMementos are unrooted and shouldn't survive a scavenge
575  DCHECK(object->map() != object->GetHeap()->allocation_memento_map());
576  // Call the slow part of scavenge object.
577  return ScavengeObjectSlow(p, object);
578}
579
580
581bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason,
582                          const v8::GCCallbackFlags callbackFlags) {
583  const char* collector_reason = NULL;
584  GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
585  return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags);
586}
587
588
589Isolate* Heap::isolate() {
590  return reinterpret_cast<Isolate*>(
591      reinterpret_cast<intptr_t>(this) -
592      reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
593}
594
595
596// Calls the FUNCTION_CALL function and retries it up to three times
597// to guarantee that any allocations performed during the call will
598// succeed if there's enough memory.
599
600// Warning: Do not use the identifiers __object__, __maybe_object__ or
601// __scope__ in a call to this macro.
602
603#define RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
604  if (__allocation__.To(&__object__)) {                   \
605    DCHECK(__object__ != (ISOLATE)->heap()->exception()); \
606    RETURN_VALUE;                                         \
607  }
608
609#define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)    \
610  do {                                                                        \
611    AllocationResult __allocation__ = FUNCTION_CALL;                          \
612    Object* __object__ = NULL;                                                \
613    RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE)                         \
614    (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(),            \
615                                      "allocation failure");                  \
616    __allocation__ = FUNCTION_CALL;                                           \
617    RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE)                         \
618    (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment();        \
619    (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc");          \
620    {                                                                         \
621      AlwaysAllocateScope __scope__(ISOLATE);                                 \
622      __allocation__ = FUNCTION_CALL;                                         \
623    }                                                                         \
624    RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE)                         \
625    /* TODO(1181417): Fix this. */                                            \
626    v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true); \
627    RETURN_EMPTY;                                                             \
628  } while (false)
629
630#define CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, RETURN_VALUE, \
631                              RETURN_EMPTY)                         \
632  CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)
633
634#define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE)                      \
635  CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL,                               \
636                        return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \
637                        return Handle<TYPE>())
638
639
640#define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \
641  CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, return, return)
642
643
644void ExternalStringTable::AddString(String* string) {
645  DCHECK(string->IsExternalString());
646  if (heap_->InNewSpace(string)) {
647    new_space_strings_.Add(string);
648  } else {
649    old_space_strings_.Add(string);
650  }
651}
652
653
654void ExternalStringTable::Iterate(ObjectVisitor* v) {
655  if (!new_space_strings_.is_empty()) {
656    Object** start = &new_space_strings_[0];
657    v->VisitPointers(start, start + new_space_strings_.length());
658  }
659  if (!old_space_strings_.is_empty()) {
660    Object** start = &old_space_strings_[0];
661    v->VisitPointers(start, start + old_space_strings_.length());
662  }
663}
664
665
666// Verify() is inline to avoid ifdef-s around its calls in release
667// mode.
668void ExternalStringTable::Verify() {
669#ifdef DEBUG
670  for (int i = 0; i < new_space_strings_.length(); ++i) {
671    Object* obj = Object::cast(new_space_strings_[i]);
672    DCHECK(heap_->InNewSpace(obj));
673    DCHECK(obj != heap_->the_hole_value());
674  }
675  for (int i = 0; i < old_space_strings_.length(); ++i) {
676    Object* obj = Object::cast(old_space_strings_[i]);
677    DCHECK(!heap_->InNewSpace(obj));
678    DCHECK(obj != heap_->the_hole_value());
679  }
680#endif
681}
682
683
684void ExternalStringTable::AddOldString(String* string) {
685  DCHECK(string->IsExternalString());
686  DCHECK(!heap_->InNewSpace(string));
687  old_space_strings_.Add(string);
688}
689
690
691void ExternalStringTable::ShrinkNewStrings(int position) {
692  new_space_strings_.Rewind(position);
693#ifdef VERIFY_HEAP
694  if (FLAG_verify_heap) {
695    Verify();
696  }
697#endif
698}
699
700
701void Heap::ClearInstanceofCache() {
702  set_instanceof_cache_function(the_hole_value());
703}
704
705
706Object* Heap::ToBoolean(bool condition) {
707  return condition ? true_value() : false_value();
708}
709
710
711void Heap::CompletelyClearInstanceofCache() {
712  set_instanceof_cache_map(the_hole_value());
713  set_instanceof_cache_function(the_hole_value());
714}
715
716
717AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
718    : heap_(isolate->heap()), daf_(isolate) {
719  // We shouldn't hit any nested scopes, because that requires
720  // non-handle code to call handle code. The code still works but
721  // performance will degrade, so we want to catch this situation
722  // in debug mode.
723  DCHECK(heap_->always_allocate_scope_depth_ == 0);
724  heap_->always_allocate_scope_depth_++;
725}
726
727
728AlwaysAllocateScope::~AlwaysAllocateScope() {
729  heap_->always_allocate_scope_depth_--;
730  DCHECK(heap_->always_allocate_scope_depth_ == 0);
731}
732
733
734#ifdef VERIFY_HEAP
735NoWeakObjectVerificationScope::NoWeakObjectVerificationScope() {
736  Isolate* isolate = Isolate::Current();
737  isolate->heap()->no_weak_object_verification_scope_depth_++;
738}
739
740
741NoWeakObjectVerificationScope::~NoWeakObjectVerificationScope() {
742  Isolate* isolate = Isolate::Current();
743  isolate->heap()->no_weak_object_verification_scope_depth_--;
744}
745#endif
746
747
748GCCallbacksScope::GCCallbacksScope(Heap* heap) : heap_(heap) {
749  heap_->gc_callbacks_depth_++;
750}
751
752
753GCCallbacksScope::~GCCallbacksScope() { heap_->gc_callbacks_depth_--; }
754
755
756bool GCCallbacksScope::CheckReenter() {
757  return heap_->gc_callbacks_depth_ == 1;
758}
759
760
761void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) {
762  for (Object** current = start; current < end; current++) {
763    if ((*current)->IsHeapObject()) {
764      HeapObject* object = HeapObject::cast(*current);
765      CHECK(object->GetIsolate()->heap()->Contains(object));
766      CHECK(object->map()->IsMap());
767    }
768  }
769}
770
771
772void VerifySmisVisitor::VisitPointers(Object** start, Object** end) {
773  for (Object** current = start; current < end; current++) {
774    CHECK((*current)->IsSmi());
775  }
776}
777}
778}  // namespace v8::internal
779
780#endif  // V8_HEAP_HEAP_INL_H_
781