heap.h revision 1e0659c275bb392c045087af4f6b0d7565cb3d77
1// Copyright 2010 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_HEAP_H_
29#define V8_HEAP_H_
30
31#include <math.h>
32
33#include "spaces.h"
34#include "splay-tree-inl.h"
35#include "v8-counters.h"
36
37namespace v8 {
38namespace internal {
39
40
41// Defines all the roots in Heap.
42#define UNCONDITIONAL_STRONG_ROOT_LIST(V)                                      \
43  /* Put the byte array map early.  We need it to be in place by the time   */ \
44  /* the deserializer hits the next page, since it wants to put a byte      */ \
45  /* array in the unused space at the end of the page.                      */ \
46  V(Map, byte_array_map, ByteArrayMap)                                         \
47  V(Map, one_pointer_filler_map, OnePointerFillerMap)                          \
48  V(Map, two_pointer_filler_map, TwoPointerFillerMap)                          \
49  /* Cluster the most popular ones in a few cache lines here at the top.    */ \
50  V(Smi, stack_limit, StackLimit)                                              \
51  V(Object, undefined_value, UndefinedValue)                                   \
52  V(Object, the_hole_value, TheHoleValue)                                      \
53  V(Object, null_value, NullValue)                                             \
54  V(Object, true_value, TrueValue)                                             \
55  V(Object, false_value, FalseValue)                                           \
56  V(Object, arguments_marker, ArgumentsMarker)                                 \
57  V(Map, heap_number_map, HeapNumberMap)                                       \
58  V(Map, global_context_map, GlobalContextMap)                                 \
59  V(Map, fixed_array_map, FixedArrayMap)                                       \
60  V(Map, fixed_cow_array_map, FixedCOWArrayMap)                                \
61  V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel)       \
62  V(Map, meta_map, MetaMap)                                                    \
63  V(Object, termination_exception, TerminationException)                       \
64  V(Map, hash_table_map, HashTableMap)                                         \
65  V(FixedArray, empty_fixed_array, EmptyFixedArray)                            \
66  V(ByteArray, empty_byte_array, EmptyByteArray)                               \
67  V(Map, string_map, StringMap)                                                \
68  V(Map, ascii_string_map, AsciiStringMap)                                     \
69  V(Map, symbol_map, SymbolMap)                                                \
70  V(Map, ascii_symbol_map, AsciiSymbolMap)                                     \
71  V(Map, cons_symbol_map, ConsSymbolMap)                                       \
72  V(Map, cons_ascii_symbol_map, ConsAsciiSymbolMap)                            \
73  V(Map, external_symbol_map, ExternalSymbolMap)                               \
74  V(Map, external_symbol_with_ascii_data_map, ExternalSymbolWithAsciiDataMap)  \
75  V(Map, external_ascii_symbol_map, ExternalAsciiSymbolMap)                    \
76  V(Map, cons_string_map, ConsStringMap)                                       \
77  V(Map, cons_ascii_string_map, ConsAsciiStringMap)                            \
78  V(Map, external_string_map, ExternalStringMap)                               \
79  V(Map, external_string_with_ascii_data_map, ExternalStringWithAsciiDataMap)  \
80  V(Map, external_ascii_string_map, ExternalAsciiStringMap)                    \
81  V(Map, undetectable_string_map, UndetectableStringMap)                       \
82  V(Map, undetectable_ascii_string_map, UndetectableAsciiStringMap)            \
83  V(Map, pixel_array_map, PixelArrayMap)                                       \
84  V(Map, external_byte_array_map, ExternalByteArrayMap)                        \
85  V(Map, external_unsigned_byte_array_map, ExternalUnsignedByteArrayMap)       \
86  V(Map, external_short_array_map, ExternalShortArrayMap)                      \
87  V(Map, external_unsigned_short_array_map, ExternalUnsignedShortArrayMap)     \
88  V(Map, external_int_array_map, ExternalIntArrayMap)                          \
89  V(Map, external_unsigned_int_array_map, ExternalUnsignedIntArrayMap)         \
90  V(Map, external_float_array_map, ExternalFloatArrayMap)                      \
91  V(Map, context_map, ContextMap)                                              \
92  V(Map, catch_context_map, CatchContextMap)                                   \
93  V(Map, code_map, CodeMap)                                                    \
94  V(Map, oddball_map, OddballMap)                                              \
95  V(Map, global_property_cell_map, GlobalPropertyCellMap)                      \
96  V(Map, shared_function_info_map, SharedFunctionInfoMap)                      \
97  V(Map, message_object_map, JSMessageObjectMap)                               \
98  V(Map, proxy_map, ProxyMap)                                                  \
99  V(Object, nan_value, NanValue)                                               \
100  V(Object, minus_zero_value, MinusZeroValue)                                  \
101  V(Object, instanceof_cache_function, InstanceofCacheFunction)                \
102  V(Object, instanceof_cache_map, InstanceofCacheMap)                          \
103  V(Object, instanceof_cache_answer, InstanceofCacheAnswer)                    \
104  V(String, empty_string, EmptyString)                                         \
105  V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray)             \
106  V(Map, neander_map, NeanderMap)                                              \
107  V(JSObject, message_listeners, MessageListeners)                             \
108  V(Proxy, prototype_accessors, PrototypeAccessors)                            \
109  V(NumberDictionary, code_stubs, CodeStubs)                                   \
110  V(NumberDictionary, non_monomorphic_cache, NonMonomorphicCache)              \
111  V(Code, js_entry_code, JsEntryCode)                                          \
112  V(Code, js_construct_entry_code, JsConstructEntryCode)                       \
113  V(Code, c_entry_code, CEntryCode)                                            \
114  V(FixedArray, number_string_cache, NumberStringCache)                        \
115  V(FixedArray, single_character_string_cache, SingleCharacterStringCache)     \
116  V(FixedArray, natives_source_cache, NativesSourceCache)                      \
117  V(Object, last_script_id, LastScriptId)                                      \
118  V(Script, empty_script, EmptyScript)                                         \
119  V(Smi, real_stack_limit, RealStackLimit)                                     \
120  V(StringDictionary, intrinsic_function_names, IntrinsicFunctionNames)        \
121
122#if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
123#define STRONG_ROOT_LIST(V)                                                    \
124  UNCONDITIONAL_STRONG_ROOT_LIST(V)                                            \
125  V(Code, re_c_entry_code, RegExpCEntryCode)                                   \
126  V(Code, direct_c_entry_code, DirectCEntryCode)
127#elif V8_TARGET_ARCH_ARM
128#define STRONG_ROOT_LIST(V)                                                    \
129  UNCONDITIONAL_STRONG_ROOT_LIST(V)                                            \
130  V(Code, direct_c_entry_code, DirectCEntryCode)
131#else
132#define STRONG_ROOT_LIST(V) UNCONDITIONAL_STRONG_ROOT_LIST(V)
133#endif
134
135#define ROOT_LIST(V)                                  \
136  STRONG_ROOT_LIST(V)                                 \
137  V(SymbolTable, symbol_table, SymbolTable)
138
139#define SYMBOL_LIST(V)                                                   \
140  V(Array_symbol, "Array")                                               \
141  V(Object_symbol, "Object")                                             \
142  V(Proto_symbol, "__proto__")                                           \
143  V(StringImpl_symbol, "StringImpl")                                     \
144  V(arguments_symbol, "arguments")                                       \
145  V(Arguments_symbol, "Arguments")                                       \
146  V(arguments_shadow_symbol, ".arguments")                               \
147  V(call_symbol, "call")                                                 \
148  V(apply_symbol, "apply")                                               \
149  V(caller_symbol, "caller")                                             \
150  V(boolean_symbol, "boolean")                                           \
151  V(Boolean_symbol, "Boolean")                                           \
152  V(callee_symbol, "callee")                                             \
153  V(constructor_symbol, "constructor")                                   \
154  V(code_symbol, ".code")                                                \
155  V(result_symbol, ".result")                                            \
156  V(catch_var_symbol, ".catch-var")                                      \
157  V(empty_symbol, "")                                                    \
158  V(eval_symbol, "eval")                                                 \
159  V(function_symbol, "function")                                         \
160  V(length_symbol, "length")                                             \
161  V(name_symbol, "name")                                                 \
162  V(number_symbol, "number")                                             \
163  V(Number_symbol, "Number")                                             \
164  V(RegExp_symbol, "RegExp")                                             \
165  V(source_symbol, "source")                                             \
166  V(global_symbol, "global")                                             \
167  V(ignore_case_symbol, "ignoreCase")                                    \
168  V(multiline_symbol, "multiline")                                       \
169  V(input_symbol, "input")                                               \
170  V(index_symbol, "index")                                               \
171  V(last_index_symbol, "lastIndex")                                      \
172  V(object_symbol, "object")                                             \
173  V(prototype_symbol, "prototype")                                       \
174  V(string_symbol, "string")                                             \
175  V(String_symbol, "String")                                             \
176  V(Date_symbol, "Date")                                                 \
177  V(this_symbol, "this")                                                 \
178  V(to_string_symbol, "toString")                                        \
179  V(char_at_symbol, "CharAt")                                            \
180  V(undefined_symbol, "undefined")                                       \
181  V(value_of_symbol, "valueOf")                                          \
182  V(InitializeVarGlobal_symbol, "InitializeVarGlobal")                   \
183  V(InitializeConstGlobal_symbol, "InitializeConstGlobal")               \
184  V(KeyedLoadSpecialized_symbol, "KeyedLoadSpecialized")                 \
185  V(KeyedStoreSpecialized_symbol, "KeyedStoreSpecialized")               \
186  V(KeyedLoadPixelArray_symbol, "KeyedLoadPixelArray")                   \
187  V(stack_overflow_symbol, "kStackOverflowBoilerplate")                  \
188  V(illegal_access_symbol, "illegal access")                             \
189  V(out_of_memory_symbol, "out-of-memory")                               \
190  V(illegal_execution_state_symbol, "illegal execution state")           \
191  V(get_symbol, "get")                                                   \
192  V(set_symbol, "set")                                                   \
193  V(function_class_symbol, "Function")                                   \
194  V(illegal_argument_symbol, "illegal argument")                         \
195  V(MakeReferenceError_symbol, "MakeReferenceError")                     \
196  V(MakeSyntaxError_symbol, "MakeSyntaxError")                           \
197  V(MakeTypeError_symbol, "MakeTypeError")                               \
198  V(invalid_lhs_in_assignment_symbol, "invalid_lhs_in_assignment")       \
199  V(invalid_lhs_in_for_in_symbol, "invalid_lhs_in_for_in")               \
200  V(invalid_lhs_in_postfix_op_symbol, "invalid_lhs_in_postfix_op")       \
201  V(invalid_lhs_in_prefix_op_symbol, "invalid_lhs_in_prefix_op")         \
202  V(illegal_return_symbol, "illegal_return")                             \
203  V(illegal_break_symbol, "illegal_break")                               \
204  V(illegal_continue_symbol, "illegal_continue")                         \
205  V(unknown_label_symbol, "unknown_label")                               \
206  V(redeclaration_symbol, "redeclaration")                               \
207  V(failure_symbol, "<failure>")                                         \
208  V(space_symbol, " ")                                                   \
209  V(exec_symbol, "exec")                                                 \
210  V(zero_symbol, "0")                                                    \
211  V(global_eval_symbol, "GlobalEval")                                    \
212  V(identity_hash_symbol, "v8::IdentityHash")                            \
213  V(closure_symbol, "(closure)")                                         \
214  V(use_strict, "use strict")                                            \
215  V(KeyedLoadExternalArray_symbol, "KeyedLoadExternalArray")             \
216  V(KeyedStoreExternalArray_symbol, "KeyedStoreExternalArray")
217
218
219// Forward declarations.
220class GCTracer;
221class HeapStats;
222class WeakObjectRetainer;
223
224
225typedef String* (*ExternalStringTableUpdaterCallback)(Object** pointer);
226
227typedef bool (*DirtyRegionCallback)(Address start,
228                                    Address end,
229                                    ObjectSlotCallback copy_object_func);
230
231
232// The all static Heap captures the interface to the global object heap.
233// All JavaScript contexts by this process share the same object heap.
234
235class Heap : public AllStatic {
236 public:
237  // Configure heap size before setup. Return false if the heap has been
238  // setup already.
239  static bool ConfigureHeap(int max_semispace_size,
240                            int max_old_gen_size,
241                            int max_executable_size);
242  static bool ConfigureHeapDefault();
243
244  // Initializes the global object heap. If create_heap_objects is true,
245  // also creates the basic non-mutable objects.
246  // Returns whether it succeeded.
247  static bool Setup(bool create_heap_objects);
248
249  // Destroys all memory allocated by the heap.
250  static void TearDown();
251
252  // Set the stack limit in the roots_ array.  Some architectures generate
253  // code that looks here, because it is faster than loading from the static
254  // jslimit_/real_jslimit_ variable in the StackGuard.
255  static void SetStackLimits();
256
257  // Returns whether Setup has been called.
258  static bool HasBeenSetup();
259
260  // Returns the maximum amount of memory reserved for the heap.  For
261  // the young generation, we reserve 4 times the amount needed for a
262  // semi space.  The young generation consists of two semi spaces and
263  // we reserve twice the amount needed for those in order to ensure
264  // that new space can be aligned to its size.
265  static intptr_t MaxReserved() {
266    return 4 * reserved_semispace_size_ + max_old_generation_size_;
267  }
268  static int MaxSemiSpaceSize() { return max_semispace_size_; }
269  static int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
270  static int InitialSemiSpaceSize() { return initial_semispace_size_; }
271  static intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
272  static intptr_t MaxExecutableSize() { return max_executable_size_; }
273
274  // Returns the capacity of the heap in bytes w/o growing. Heap grows when
275  // more spaces are needed until it reaches the limit.
276  static intptr_t Capacity();
277
278  // Returns the amount of memory currently committed for the heap.
279  static intptr_t CommittedMemory();
280
281  // Returns the amount of executable memory currently committed for the heap.
282  static intptr_t CommittedMemoryExecutable();
283
284  // Returns the available bytes in space w/o growing.
285  // Heap doesn't guarantee that it can allocate an object that requires
286  // all available bytes. Check MaxHeapObjectSize() instead.
287  static intptr_t Available();
288
289  // Returns the maximum object size in paged space.
290  static inline int MaxObjectSizeInPagedSpace();
291
292  // Returns of size of all objects residing in the heap.
293  static intptr_t SizeOfObjects();
294
295  // Return the starting address and a mask for the new space.  And-masking an
296  // address with the mask will result in the start address of the new space
297  // for all addresses in either semispace.
298  static Address NewSpaceStart() { return new_space_.start(); }
299  static uintptr_t NewSpaceMask() { return new_space_.mask(); }
300  static Address NewSpaceTop() { return new_space_.top(); }
301
302  static NewSpace* new_space() { return &new_space_; }
303  static OldSpace* old_pointer_space() { return old_pointer_space_; }
304  static OldSpace* old_data_space() { return old_data_space_; }
305  static OldSpace* code_space() { return code_space_; }
306  static MapSpace* map_space() { return map_space_; }
307  static CellSpace* cell_space() { return cell_space_; }
308  static LargeObjectSpace* lo_space() { return lo_space_; }
309
310  static bool always_allocate() { return always_allocate_scope_depth_ != 0; }
311  static Address always_allocate_scope_depth_address() {
312    return reinterpret_cast<Address>(&always_allocate_scope_depth_);
313  }
314  static bool linear_allocation() {
315    return linear_allocation_scope_depth_ != 0;
316  }
317
318  static Address* NewSpaceAllocationTopAddress() {
319    return new_space_.allocation_top_address();
320  }
321  static Address* NewSpaceAllocationLimitAddress() {
322    return new_space_.allocation_limit_address();
323  }
324
325  // Uncommit unused semi space.
326  static bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
327
328#ifdef ENABLE_HEAP_PROTECTION
329  // Protect/unprotect the heap by marking all spaces read-only/writable.
330  static void Protect();
331  static void Unprotect();
332#endif
333
334  // Allocates and initializes a new JavaScript object based on a
335  // constructor.
336  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
337  // failed.
338  // Please note this does not perform a garbage collection.
339  MUST_USE_RESULT static MaybeObject* AllocateJSObject(
340      JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED);
341
342  // Allocates and initializes a new global object based on a constructor.
343  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
344  // failed.
345  // Please note this does not perform a garbage collection.
346  MUST_USE_RESULT static MaybeObject* AllocateGlobalObject(
347      JSFunction* constructor);
348
349  // Returns a deep copy of the JavaScript object.
350  // Properties and elements are copied too.
351  // Returns failure if allocation failed.
352  MUST_USE_RESULT static MaybeObject* CopyJSObject(JSObject* source);
353
354  // Allocates the function prototype.
355  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
356  // failed.
357  // Please note this does not perform a garbage collection.
358  MUST_USE_RESULT static MaybeObject* AllocateFunctionPrototype(
359      JSFunction* function);
360
361  // Reinitialize an JSGlobalProxy based on a constructor.  The object
362  // must have the same size as objects allocated using the
363  // constructor.  The object is reinitialized and behaves as an
364  // object that has been freshly allocated using the constructor.
365  MUST_USE_RESULT static MaybeObject* ReinitializeJSGlobalProxy(
366      JSFunction* constructor,
367      JSGlobalProxy* global);
368
369  // Allocates and initializes a new JavaScript object based on a map.
370  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
371  // failed.
372  // Please note this does not perform a garbage collection.
373  MUST_USE_RESULT static MaybeObject* AllocateJSObjectFromMap(
374      Map* map, PretenureFlag pretenure = NOT_TENURED);
375
376  // Allocates a heap object based on the map.
377  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
378  // failed.
379  // Please note this function does not perform a garbage collection.
380  MUST_USE_RESULT static MaybeObject* Allocate(Map* map, AllocationSpace space);
381
382  // Allocates a JS Map in the heap.
383  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
384  // failed.
385  // Please note this function does not perform a garbage collection.
386  MUST_USE_RESULT static MaybeObject* AllocateMap(InstanceType instance_type,
387                                             int instance_size);
388
389  // Allocates a partial map for bootstrapping.
390  MUST_USE_RESULT static MaybeObject* AllocatePartialMap(
391      InstanceType instance_type,
392      int instance_size);
393
394  // Allocate a map for the specified function
395  MUST_USE_RESULT static MaybeObject* AllocateInitialMap(JSFunction* fun);
396
397  // Allocates an empty code cache.
398  MUST_USE_RESULT static MaybeObject* AllocateCodeCache();
399
400  // Clear the Instanceof cache (used when a prototype changes).
401  static void ClearInstanceofCache() {
402    set_instanceof_cache_function(the_hole_value());
403  }
404
405  // Allocates and fully initializes a String.  There are two String
406  // encodings: ASCII and two byte. One should choose between the three string
407  // allocation functions based on the encoding of the string buffer used to
408  // initialized the string.
409  //   - ...FromAscii initializes the string from a buffer that is ASCII
410  //     encoded (it does not check that the buffer is ASCII encoded) and the
411  //     result will be ASCII encoded.
412  //   - ...FromUTF8 initializes the string from a buffer that is UTF-8
413  //     encoded.  If the characters are all single-byte characters, the
414  //     result will be ASCII encoded, otherwise it will converted to two
415  //     byte.
416  //   - ...FromTwoByte initializes the string from a buffer that is two-byte
417  //     encoded.  If the characters are all single-byte characters, the
418  //     result will be converted to ASCII, otherwise it will be left as
419  //     two-byte.
420  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
421  // failed.
422  // Please note this does not perform a garbage collection.
423  MUST_USE_RESULT static MaybeObject* AllocateStringFromAscii(
424      Vector<const char> str,
425      PretenureFlag pretenure = NOT_TENURED);
426  MUST_USE_RESULT static inline MaybeObject* AllocateStringFromUtf8(
427      Vector<const char> str,
428      PretenureFlag pretenure = NOT_TENURED);
429  MUST_USE_RESULT static MaybeObject* AllocateStringFromUtf8Slow(
430      Vector<const char> str,
431      PretenureFlag pretenure = NOT_TENURED);
432  MUST_USE_RESULT static MaybeObject* AllocateStringFromTwoByte(
433      Vector<const uc16> str,
434      PretenureFlag pretenure = NOT_TENURED);
435
436  // Allocates a symbol in old space based on the character stream.
437  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
438  // failed.
439  // Please note this function does not perform a garbage collection.
440  MUST_USE_RESULT static inline MaybeObject* AllocateSymbol(
441      Vector<const char> str,
442      int chars,
443      uint32_t hash_field);
444
445  MUST_USE_RESULT static inline MaybeObject* AllocateAsciiSymbol(
446        Vector<const char> str,
447        uint32_t hash_field);
448
449  MUST_USE_RESULT static inline MaybeObject* AllocateTwoByteSymbol(
450        Vector<const uc16> str,
451        uint32_t hash_field);
452
453  MUST_USE_RESULT static MaybeObject* AllocateInternalSymbol(
454      unibrow::CharacterStream* buffer, int chars, uint32_t hash_field);
455
456  MUST_USE_RESULT static MaybeObject* AllocateExternalSymbol(
457      Vector<const char> str,
458      int chars);
459
460
461  // Allocates and partially initializes a String.  There are two String
462  // encodings: ASCII and two byte.  These functions allocate a string of the
463  // given length and set its map and length fields.  The characters of the
464  // string are uninitialized.
465  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
466  // failed.
467  // Please note this does not perform a garbage collection.
468  MUST_USE_RESULT static MaybeObject* AllocateRawAsciiString(
469      int length,
470      PretenureFlag pretenure = NOT_TENURED);
471  MUST_USE_RESULT static MaybeObject* AllocateRawTwoByteString(
472      int length,
473      PretenureFlag pretenure = NOT_TENURED);
474
475  // Computes a single character string where the character has code.
476  // A cache is used for ascii codes.
477  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
478  // failed. Please note this does not perform a garbage collection.
479  MUST_USE_RESULT static MaybeObject* LookupSingleCharacterStringFromCode(
480      uint16_t code);
481
482  // Allocate a byte array of the specified length
483  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
484  // failed.
485  // Please note this does not perform a garbage collection.
486  MUST_USE_RESULT static MaybeObject* AllocateByteArray(int length,
487                                                   PretenureFlag pretenure);
488
489  // Allocate a non-tenured byte array of the specified length
490  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
491  // failed.
492  // Please note this does not perform a garbage collection.
493  MUST_USE_RESULT static MaybeObject* AllocateByteArray(int length);
494
495  // Allocate a pixel array of the specified length
496  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
497  // failed.
498  // Please note this does not perform a garbage collection.
499  MUST_USE_RESULT static MaybeObject* AllocatePixelArray(int length,
500                                                    uint8_t* external_pointer,
501                                                    PretenureFlag pretenure);
502
503  // Allocates an external array of the specified length and type.
504  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
505  // failed.
506  // Please note this does not perform a garbage collection.
507  MUST_USE_RESULT static MaybeObject* AllocateExternalArray(
508      int length,
509      ExternalArrayType array_type,
510      void* external_pointer,
511      PretenureFlag pretenure);
512
513  // Allocate a tenured JS global property cell.
514  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
515  // failed.
516  // Please note this does not perform a garbage collection.
517  MUST_USE_RESULT static MaybeObject* AllocateJSGlobalPropertyCell(
518      Object* value);
519
520  // Allocates a fixed array initialized with undefined values
521  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
522  // failed.
523  // Please note this does not perform a garbage collection.
524  MUST_USE_RESULT static MaybeObject* AllocateFixedArray(
525      int length,
526      PretenureFlag pretenure);
527  // Allocates a fixed array initialized with undefined values
528  MUST_USE_RESULT static MaybeObject* AllocateFixedArray(int length);
529
530  // Allocates an uninitialized fixed array. It must be filled by the caller.
531  //
532  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
533  // failed.
534  // Please note this does not perform a garbage collection.
535  MUST_USE_RESULT static MaybeObject* AllocateUninitializedFixedArray(
536      int length);
537
538  // Make a copy of src and return it. Returns
539  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
540  MUST_USE_RESULT static inline MaybeObject* CopyFixedArray(FixedArray* src);
541
542  // Make a copy of src, set the map, and return the copy. Returns
543  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
544  MUST_USE_RESULT static MaybeObject* CopyFixedArrayWithMap(FixedArray* src,
545                                                            Map* map);
546
547  // Allocates a fixed array initialized with the hole values.
548  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
549  // failed.
550  // Please note this does not perform a garbage collection.
551  MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithHoles(
552      int length,
553      PretenureFlag pretenure = NOT_TENURED);
554
555  // AllocateHashTable is identical to AllocateFixedArray except
556  // that the resulting object has hash_table_map as map.
557  MUST_USE_RESULT static MaybeObject* AllocateHashTable(
558      int length, PretenureFlag pretenure = NOT_TENURED);
559
560  // Allocate a global (but otherwise uninitialized) context.
561  MUST_USE_RESULT static MaybeObject* AllocateGlobalContext();
562
563  // Allocate a function context.
564  MUST_USE_RESULT static MaybeObject* AllocateFunctionContext(
565      int length,
566      JSFunction* closure);
567
568  // Allocate a 'with' context.
569  MUST_USE_RESULT static MaybeObject* AllocateWithContext(
570      Context* previous,
571      JSObject* extension,
572      bool is_catch_context);
573
574  // Allocates a new utility object in the old generation.
575  MUST_USE_RESULT static MaybeObject* AllocateStruct(InstanceType type);
576
577  // Allocates a function initialized with a shared part.
578  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
579  // failed.
580  // Please note this does not perform a garbage collection.
581  MUST_USE_RESULT static MaybeObject* AllocateFunction(
582      Map* function_map,
583      SharedFunctionInfo* shared,
584      Object* prototype,
585      PretenureFlag pretenure = TENURED);
586
587  // Indicies for direct access into argument objects.
588  static const int kArgumentsObjectSize =
589      JSObject::kHeaderSize + 2 * kPointerSize;
590  static const int arguments_callee_index = 0;
591  static const int arguments_length_index = 1;
592
593  // Allocates an arguments object - optionally with an elements array.
594  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
595  // failed.
596  // Please note this does not perform a garbage collection.
597  MUST_USE_RESULT static MaybeObject* AllocateArgumentsObject(Object* callee,
598                                                              int length);
599
600  // Same as NewNumberFromDouble, but may return a preallocated/immutable
601  // number object (e.g., minus_zero_value_, nan_value_)
602  MUST_USE_RESULT static MaybeObject* NumberFromDouble(
603      double value, PretenureFlag pretenure = NOT_TENURED);
604
605  // Allocated a HeapNumber from value.
606  MUST_USE_RESULT static MaybeObject* AllocateHeapNumber(
607      double value,
608      PretenureFlag pretenure);
609  // pretenure = NOT_TENURED.
610  MUST_USE_RESULT static MaybeObject* AllocateHeapNumber(double value);
611
612  // Converts an int into either a Smi or a HeapNumber object.
613  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
614  // failed.
615  // Please note this does not perform a garbage collection.
616  MUST_USE_RESULT static inline MaybeObject* NumberFromInt32(int32_t value);
617
618  // Converts an int into either a Smi or a HeapNumber object.
619  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
620  // failed.
621  // Please note this does not perform a garbage collection.
622  MUST_USE_RESULT static inline MaybeObject* NumberFromUint32(uint32_t value);
623
624  // Allocates a new proxy object.
625  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
626  // failed.
627  // Please note this does not perform a garbage collection.
628  MUST_USE_RESULT static MaybeObject* AllocateProxy(
629      Address proxy,
630      PretenureFlag pretenure = NOT_TENURED);
631
632  // Allocates a new SharedFunctionInfo object.
633  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
634  // failed.
635  // Please note this does not perform a garbage collection.
636  MUST_USE_RESULT static MaybeObject* AllocateSharedFunctionInfo(Object* name);
637
638  // Allocates a new JSMessageObject object.
639  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
640  // failed.
641  // Please note that this does not perform a garbage collection.
642  MUST_USE_RESULT static MaybeObject* AllocateJSMessageObject(
643      String* type,
644      JSArray* arguments,
645      int start_position,
646      int end_position,
647      Object* script,
648      Object* stack_trace,
649      Object* stack_frames);
650
651  // Allocates a new cons string object.
652  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
653  // failed.
654  // Please note this does not perform a garbage collection.
655  MUST_USE_RESULT static MaybeObject* AllocateConsString(String* first,
656                                                         String* second);
657
658  // Allocates a new sub string object which is a substring of an underlying
659  // string buffer stretching from the index start (inclusive) to the index
660  // end (exclusive).
661  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
662  // failed.
663  // Please note this does not perform a garbage collection.
664  MUST_USE_RESULT static MaybeObject* AllocateSubString(
665      String* buffer,
666      int start,
667      int end,
668      PretenureFlag pretenure = NOT_TENURED);
669
670  // Allocate a new external string object, which is backed by a string
671  // resource that resides outside the V8 heap.
672  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
673  // failed.
674  // Please note this does not perform a garbage collection.
675  MUST_USE_RESULT static MaybeObject* AllocateExternalStringFromAscii(
676      ExternalAsciiString::Resource* resource);
677  MUST_USE_RESULT static MaybeObject* AllocateExternalStringFromTwoByte(
678      ExternalTwoByteString::Resource* resource);
679
680  // Finalizes an external string by deleting the associated external
681  // data and clearing the resource pointer.
682  static inline void FinalizeExternalString(String* string);
683
684  // Allocates an uninitialized object.  The memory is non-executable if the
685  // hardware and OS allow.
686  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
687  // failed.
688  // Please note this function does not perform a garbage collection.
689  MUST_USE_RESULT static inline MaybeObject* AllocateRaw(
690      int size_in_bytes,
691      AllocationSpace space,
692      AllocationSpace retry_space);
693
694  // Initialize a filler object to keep the ability to iterate over the heap
695  // when shortening objects.
696  static void CreateFillerObjectAt(Address addr, int size);
697
698  // Makes a new native code object
699  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
700  // failed. On success, the pointer to the Code object is stored in the
701  // self_reference. This allows generated code to reference its own Code
702  // object by containing this pointer.
703  // Please note this function does not perform a garbage collection.
704  MUST_USE_RESULT static MaybeObject* CreateCode(const CodeDesc& desc,
705                                                 Code::Flags flags,
706                                                 Handle<Object> self_reference);
707
708  MUST_USE_RESULT static MaybeObject* CopyCode(Code* code);
709
710  // Copy the code and scope info part of the code object, but insert
711  // the provided data as the relocation information.
712  MUST_USE_RESULT static MaybeObject* CopyCode(Code* code,
713                                               Vector<byte> reloc_info);
714
715  // Finds the symbol for string in the symbol table.
716  // If not found, a new symbol is added to the table and returned.
717  // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation
718  // failed.
719  // Please note this function does not perform a garbage collection.
720  MUST_USE_RESULT static MaybeObject* LookupSymbol(Vector<const char> str);
721  MUST_USE_RESULT static MaybeObject* LookupAsciiSymbol(Vector<const char> str);
722  MUST_USE_RESULT static MaybeObject* LookupTwoByteSymbol(
723      Vector<const uc16> str);
724  MUST_USE_RESULT static MaybeObject* LookupAsciiSymbol(const char* str) {
725    return LookupSymbol(CStrVector(str));
726  }
727  MUST_USE_RESULT static MaybeObject* LookupSymbol(String* str);
728  static bool LookupSymbolIfExists(String* str, String** symbol);
729  static bool LookupTwoCharsSymbolIfExists(String* str, String** symbol);
730
731  // Compute the matching symbol map for a string if possible.
732  // NULL is returned if string is in new space or not flattened.
733  static Map* SymbolMapForString(String* str);
734
735  // Tries to flatten a string before compare operation.
736  //
737  // Returns a failure in case it was decided that flattening was
738  // necessary and failed.  Note, if flattening is not necessary the
739  // string might stay non-flat even when not a failure is returned.
740  //
741  // Please note this function does not perform a garbage collection.
742  MUST_USE_RESULT static inline MaybeObject* PrepareForCompare(String* str);
743
744  // Converts the given boolean condition to JavaScript boolean value.
745  static Object* ToBoolean(bool condition) {
746    return condition ? true_value() : false_value();
747  }
748
749  // Code that should be run before and after each GC.  Includes some
750  // reporting/verification activities when compiled with DEBUG set.
751  static void GarbageCollectionPrologue();
752  static void GarbageCollectionEpilogue();
753
754  // Performs garbage collection operation.
755  // Returns whether there is a chance that another major GC could
756  // collect more garbage.
757  static bool CollectGarbage(AllocationSpace space, GarbageCollector collector);
758
759  // Performs garbage collection operation.
760  // Returns whether there is a chance that another major GC could
761  // collect more garbage.
762  inline static bool CollectGarbage(AllocationSpace space);
763
764  // Performs a full garbage collection. Force compaction if the
765  // parameter is true.
766  static void CollectAllGarbage(bool force_compaction);
767
768  // Last hope GC, should try to squeeze as much as possible.
769  static void CollectAllAvailableGarbage();
770
771  // Notify the heap that a context has been disposed.
772  static int NotifyContextDisposed() { return ++contexts_disposed_; }
773
774  // Utility to invoke the scavenger. This is needed in test code to
775  // ensure correct callback for weak global handles.
776  static void PerformScavenge();
777
778#ifdef DEBUG
779  // Utility used with flag gc-greedy.
780  static void GarbageCollectionGreedyCheck();
781#endif
782
783  static void AddGCPrologueCallback(
784      GCEpilogueCallback callback, GCType gc_type_filter);
785  static void RemoveGCPrologueCallback(GCEpilogueCallback callback);
786
787  static void AddGCEpilogueCallback(
788      GCEpilogueCallback callback, GCType gc_type_filter);
789  static void RemoveGCEpilogueCallback(GCEpilogueCallback callback);
790
791  static void SetGlobalGCPrologueCallback(GCCallback callback) {
792    ASSERT((callback == NULL) ^ (global_gc_prologue_callback_ == NULL));
793    global_gc_prologue_callback_ = callback;
794  }
795  static void SetGlobalGCEpilogueCallback(GCCallback callback) {
796    ASSERT((callback == NULL) ^ (global_gc_epilogue_callback_ == NULL));
797    global_gc_epilogue_callback_ = callback;
798  }
799
800  // Heap root getters.  We have versions with and without type::cast() here.
801  // You can't use type::cast during GC because the assert fails.
802#define ROOT_ACCESSOR(type, name, camel_name)                                  \
803  static inline type* name() {                                                 \
804    return type::cast(roots_[k##camel_name##RootIndex]);                       \
805  }                                                                            \
806  static inline type* raw_unchecked_##name() {                                 \
807    return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]);          \
808  }
809  ROOT_LIST(ROOT_ACCESSOR)
810#undef ROOT_ACCESSOR
811
812// Utility type maps
813#define STRUCT_MAP_ACCESSOR(NAME, Name, name)                                  \
814    static inline Map* name##_map() {                                          \
815      return Map::cast(roots_[k##Name##MapRootIndex]);                         \
816    }
817  STRUCT_LIST(STRUCT_MAP_ACCESSOR)
818#undef STRUCT_MAP_ACCESSOR
819
820#define SYMBOL_ACCESSOR(name, str) static inline String* name() {              \
821    return String::cast(roots_[k##name##RootIndex]);                           \
822  }
823  SYMBOL_LIST(SYMBOL_ACCESSOR)
824#undef SYMBOL_ACCESSOR
825
826  // The hidden_symbol is special because it is the empty string, but does
827  // not match the empty string.
828  static String* hidden_symbol() { return hidden_symbol_; }
829
830  static void set_global_contexts_list(Object* object) {
831    global_contexts_list_ = object;
832  }
833  static Object* global_contexts_list() { return global_contexts_list_; }
834
835  // Iterates over all roots in the heap.
836  static void IterateRoots(ObjectVisitor* v, VisitMode mode);
837  // Iterates over all strong roots in the heap.
838  static void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
839  // Iterates over all the other roots in the heap.
840  static void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
841
842  enum ExpectedPageWatermarkState {
843    WATERMARK_SHOULD_BE_VALID,
844    WATERMARK_CAN_BE_INVALID
845  };
846
847  // For each dirty region on a page in use from an old space call
848  // visit_dirty_region callback.
849  // If either visit_dirty_region or callback can cause an allocation
850  // in old space and changes in allocation watermark then
851  // can_preallocate_during_iteration should be set to true.
852  // All pages will be marked as having invalid watermark upon
853  // iteration completion.
854  static void IterateDirtyRegions(
855      PagedSpace* space,
856      DirtyRegionCallback visit_dirty_region,
857      ObjectSlotCallback callback,
858      ExpectedPageWatermarkState expected_page_watermark_state);
859
860  // Interpret marks as a bitvector of dirty marks for regions of size
861  // Page::kRegionSize aligned by Page::kRegionAlignmentMask and covering
862  // memory interval from start to top. For each dirty region call a
863  // visit_dirty_region callback. Return updated bitvector of dirty marks.
864  static uint32_t IterateDirtyRegions(uint32_t marks,
865                                      Address start,
866                                      Address end,
867                                      DirtyRegionCallback visit_dirty_region,
868                                      ObjectSlotCallback callback);
869
870  // Iterate pointers to from semispace of new space found in memory interval
871  // from start to end.
872  // Update dirty marks for page containing start address.
873  static void IterateAndMarkPointersToFromSpace(Address start,
874                                                Address end,
875                                                ObjectSlotCallback callback);
876
877  // Iterate pointers to new space found in memory interval from start to end.
878  // Return true if pointers to new space was found.
879  static bool IteratePointersInDirtyRegion(Address start,
880                                           Address end,
881                                           ObjectSlotCallback callback);
882
883
884  // Iterate pointers to new space found in memory interval from start to end.
885  // This interval is considered to belong to the map space.
886  // Return true if pointers to new space was found.
887  static bool IteratePointersInDirtyMapsRegion(Address start,
888                                               Address end,
889                                               ObjectSlotCallback callback);
890
891
892  // Returns whether the object resides in new space.
893  static inline bool InNewSpace(Object* object);
894  static inline bool InFromSpace(Object* object);
895  static inline bool InToSpace(Object* object);
896
897  // Checks whether an address/object in the heap (including auxiliary
898  // area and unused area).
899  static bool Contains(Address addr);
900  static bool Contains(HeapObject* value);
901
902  // Checks whether an address/object in a space.
903  // Currently used by tests, serialization and heap verification only.
904  static bool InSpace(Address addr, AllocationSpace space);
905  static bool InSpace(HeapObject* value, AllocationSpace space);
906
907  // Finds out which space an object should get promoted to based on its type.
908  static inline OldSpace* TargetSpace(HeapObject* object);
909  static inline AllocationSpace TargetSpaceId(InstanceType type);
910
911  // Sets the stub_cache_ (only used when expanding the dictionary).
912  static void public_set_code_stubs(NumberDictionary* value) {
913    roots_[kCodeStubsRootIndex] = value;
914  }
915
916  // Support for computing object sizes for old objects during GCs. Returns
917  // a function that is guaranteed to be safe for computing object sizes in
918  // the current GC phase.
919  static HeapObjectCallback GcSafeSizeOfOldObjectFunction() {
920    return gc_safe_size_of_old_object_;
921  }
922
923  // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
924  static void public_set_non_monomorphic_cache(NumberDictionary* value) {
925    roots_[kNonMonomorphicCacheRootIndex] = value;
926  }
927
928  static void public_set_empty_script(Script* script) {
929    roots_[kEmptyScriptRootIndex] = script;
930  }
931
932  // Update the next script id.
933  static inline void SetLastScriptId(Object* last_script_id);
934
935  // Generated code can embed this address to get access to the roots.
936  static Object** roots_address() { return roots_; }
937
938  // Get address of global contexts list for serialization support.
939  static Object** global_contexts_list_address() {
940    return &global_contexts_list_;
941  }
942
943#ifdef DEBUG
944  static void Print();
945  static void PrintHandles();
946
947  // Verify the heap is in its normal state before or after a GC.
948  static void Verify();
949
950  // Report heap statistics.
951  static void ReportHeapStatistics(const char* title);
952  static void ReportCodeStatistics(const char* title);
953
954  // Fill in bogus values in from space
955  static void ZapFromSpace();
956#endif
957
958#if defined(ENABLE_LOGGING_AND_PROFILING)
959  // Print short heap statistics.
960  static void PrintShortHeapStatistics();
961#endif
962
963  // Makes a new symbol object
964  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
965  // failed.
966  // Please note this function does not perform a garbage collection.
967  MUST_USE_RESULT static MaybeObject* CreateSymbol(const char* str,
968                                                   int length,
969                                                   int hash);
970  MUST_USE_RESULT static MaybeObject* CreateSymbol(String* str);
971
972  // Write barrier support for address[offset] = o.
973  static inline void RecordWrite(Address address, int offset);
974
975  // Write barrier support for address[start : start + len[ = o.
976  static inline void RecordWrites(Address address, int start, int len);
977
978  // Given an address occupied by a live code object, return that object.
979  static Object* FindCodeObject(Address a);
980
981  // Invoke Shrink on shrinkable spaces.
982  static void Shrink();
983
984  enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
985  static inline HeapState gc_state() { return gc_state_; }
986
987#ifdef DEBUG
988  static bool IsAllocationAllowed() { return allocation_allowed_; }
989  static inline bool allow_allocation(bool enable);
990
991  static bool disallow_allocation_failure() {
992    return disallow_allocation_failure_;
993  }
994
995  static void TracePathToObject(Object* target);
996  static void TracePathToGlobal();
997#endif
998
999  // Callback function passed to Heap::Iterate etc.  Copies an object if
1000  // necessary, the object might be promoted to an old space.  The caller must
1001  // ensure the precondition that the object is (a) a heap object and (b) in
1002  // the heap's from space.
1003  static void ScavengePointer(HeapObject** p);
1004  static inline void ScavengeObject(HeapObject** p, HeapObject* object);
1005
1006  // Commits from space if it is uncommitted.
1007  static void EnsureFromSpaceIsCommitted();
1008
1009  // Support for partial snapshots.  After calling this we can allocate a
1010  // certain number of bytes using only linear allocation (with a
1011  // LinearAllocationScope and an AlwaysAllocateScope) without using freelists
1012  // or causing a GC.  It returns true of space was reserved or false if a GC is
1013  // needed.  For paged spaces the space requested must include the space wasted
1014  // at the end of each page when allocating linearly.
1015  static void ReserveSpace(
1016    int new_space_size,
1017    int pointer_space_size,
1018    int data_space_size,
1019    int code_space_size,
1020    int map_space_size,
1021    int cell_space_size,
1022    int large_object_size);
1023
1024  //
1025  // Support for the API.
1026  //
1027
1028  static bool CreateApiObjects();
1029
1030  // Attempt to find the number in a small cache.  If we finds it, return
1031  // the string representation of the number.  Otherwise return undefined.
1032  static Object* GetNumberStringCache(Object* number);
1033
1034  // Update the cache with a new number-string pair.
1035  static void SetNumberStringCache(Object* number, String* str);
1036
1037  // Adjusts the amount of registered external memory.
1038  // Returns the adjusted value.
1039  static inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes);
1040
1041  // Allocate uninitialized fixed array.
1042  MUST_USE_RESULT static MaybeObject* AllocateRawFixedArray(int length);
1043  MUST_USE_RESULT static MaybeObject* AllocateRawFixedArray(
1044      int length,
1045      PretenureFlag pretenure);
1046
1047  // True if we have reached the allocation limit in the old generation that
1048  // should force the next GC (caused normally) to be a full one.
1049  static bool OldGenerationPromotionLimitReached() {
1050    return (PromotedSpaceSize() + PromotedExternalMemorySize())
1051           > old_gen_promotion_limit_;
1052  }
1053
1054  static intptr_t OldGenerationSpaceAvailable() {
1055    return old_gen_allocation_limit_ -
1056           (PromotedSpaceSize() + PromotedExternalMemorySize());
1057  }
1058
1059  // True if we have reached the allocation limit in the old generation that
1060  // should artificially cause a GC right now.
1061  static bool OldGenerationAllocationLimitReached() {
1062    return OldGenerationSpaceAvailable() < 0;
1063  }
1064
1065  // Can be called when the embedding application is idle.
1066  static bool IdleNotification();
1067
1068  // Declare all the root indices.
1069  enum RootListIndex {
1070#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1071    STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
1072#undef ROOT_INDEX_DECLARATION
1073
1074// Utility type maps
1075#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
1076  STRUCT_LIST(DECLARE_STRUCT_MAP)
1077#undef DECLARE_STRUCT_MAP
1078
1079#define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
1080    SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
1081#undef SYMBOL_DECLARATION
1082
1083    kSymbolTableRootIndex,
1084    kStrongRootListLength = kSymbolTableRootIndex,
1085    kRootListLength
1086  };
1087
1088  MUST_USE_RESULT static MaybeObject* NumberToString(
1089      Object* number,
1090      bool check_number_string_cache = true);
1091
1092  static Map* MapForExternalArrayType(ExternalArrayType array_type);
1093  static RootListIndex RootIndexForExternalArrayType(
1094      ExternalArrayType array_type);
1095
1096  static void RecordStats(HeapStats* stats, bool take_snapshot = false);
1097
1098  // Copy block of memory from src to dst. Size of block should be aligned
1099  // by pointer size.
1100  static inline void CopyBlock(Address dst, Address src, int byte_size);
1101
1102  static inline void CopyBlockToOldSpaceAndUpdateRegionMarks(Address dst,
1103                                                             Address src,
1104                                                             int byte_size);
1105
1106  // Optimized version of memmove for blocks with pointer size aligned sizes and
1107  // pointer size aligned addresses.
1108  static inline void MoveBlock(Address dst, Address src, int byte_size);
1109
1110  static inline void MoveBlockToOldSpaceAndUpdateRegionMarks(Address dst,
1111                                                             Address src,
1112                                                             int byte_size);
1113
1114  // Check new space expansion criteria and expand semispaces if it was hit.
1115  static void CheckNewSpaceExpansionCriteria();
1116
1117  static inline void IncrementYoungSurvivorsCounter(int survived) {
1118    young_survivors_after_last_gc_ = survived;
1119    survived_since_last_expansion_ += survived;
1120  }
1121
1122  static void UpdateNewSpaceReferencesInExternalStringTable(
1123      ExternalStringTableUpdaterCallback updater_func);
1124
1125  static void ProcessWeakReferences(WeakObjectRetainer* retainer);
1126
1127  // Helper function that governs the promotion policy from new space to
1128  // old.  If the object's old address lies below the new space's age
1129  // mark or if we've already filled the bottom 1/16th of the to space,
1130  // we try to promote this object.
1131  static inline bool ShouldBePromoted(Address old_address, int object_size);
1132
1133  static int MaxObjectSizeInNewSpace() { return kMaxObjectSizeInNewSpace; }
1134
1135  static void ClearJSFunctionResultCaches();
1136
1137  static void ClearNormalizedMapCaches();
1138
1139  static GCTracer* tracer() { return tracer_; }
1140
1141 private:
1142  static int reserved_semispace_size_;
1143  static int max_semispace_size_;
1144  static int initial_semispace_size_;
1145  static intptr_t max_old_generation_size_;
1146  static intptr_t max_executable_size_;
1147  static intptr_t code_range_size_;
1148
1149  // For keeping track of how much data has survived
1150  // scavenge since last new space expansion.
1151  static int survived_since_last_expansion_;
1152
1153  static int always_allocate_scope_depth_;
1154  static int linear_allocation_scope_depth_;
1155
1156  // For keeping track of context disposals.
1157  static int contexts_disposed_;
1158
1159#if defined(V8_TARGET_ARCH_X64)
1160  static const int kMaxObjectSizeInNewSpace = 1024*KB;
1161#else
1162  static const int kMaxObjectSizeInNewSpace = 512*KB;
1163#endif
1164
1165  static NewSpace new_space_;
1166  static OldSpace* old_pointer_space_;
1167  static OldSpace* old_data_space_;
1168  static OldSpace* code_space_;
1169  static MapSpace* map_space_;
1170  static CellSpace* cell_space_;
1171  static LargeObjectSpace* lo_space_;
1172  static HeapState gc_state_;
1173
1174  // Returns the size of object residing in non new spaces.
1175  static intptr_t PromotedSpaceSize();
1176
1177  // Returns the amount of external memory registered since last global gc.
1178  static int PromotedExternalMemorySize();
1179
1180  static int mc_count_;  // how many mark-compact collections happened
1181  static int ms_count_;  // how many mark-sweep collections happened
1182  static int gc_count_;  // how many gc happened
1183
1184  // Total length of the strings we failed to flatten since the last GC.
1185  static int unflattened_strings_length_;
1186
1187#define ROOT_ACCESSOR(type, name, camel_name)                                  \
1188  static inline void set_##name(type* value) {                                 \
1189    roots_[k##camel_name##RootIndex] = value;                                  \
1190  }
1191  ROOT_LIST(ROOT_ACCESSOR)
1192#undef ROOT_ACCESSOR
1193
1194#ifdef DEBUG
1195  static bool allocation_allowed_;
1196
1197  // If the --gc-interval flag is set to a positive value, this
1198  // variable holds the value indicating the number of allocations
1199  // remain until the next failure and garbage collection.
1200  static int allocation_timeout_;
1201
1202  // Do we expect to be able to handle allocation failure at this
1203  // time?
1204  static bool disallow_allocation_failure_;
1205#endif  // DEBUG
1206
1207  // Limit that triggers a global GC on the next (normally caused) GC.  This
1208  // is checked when we have already decided to do a GC to help determine
1209  // which collector to invoke.
1210  static intptr_t old_gen_promotion_limit_;
1211
1212  // Limit that triggers a global GC as soon as is reasonable.  This is
1213  // checked before expanding a paged space in the old generation and on
1214  // every allocation in large object space.
1215  static intptr_t old_gen_allocation_limit_;
1216
1217  // Limit on the amount of externally allocated memory allowed
1218  // between global GCs. If reached a global GC is forced.
1219  static intptr_t external_allocation_limit_;
1220
1221  // The amount of external memory registered through the API kept alive
1222  // by global handles
1223  static int amount_of_external_allocated_memory_;
1224
1225  // Caches the amount of external memory registered at the last global gc.
1226  static int amount_of_external_allocated_memory_at_last_global_gc_;
1227
1228  // Indicates that an allocation has failed in the old generation since the
1229  // last GC.
1230  static int old_gen_exhausted_;
1231
1232  static Object* roots_[kRootListLength];
1233
1234  static Object* global_contexts_list_;
1235
1236  struct StringTypeTable {
1237    InstanceType type;
1238    int size;
1239    RootListIndex index;
1240  };
1241
1242  struct ConstantSymbolTable {
1243    const char* contents;
1244    RootListIndex index;
1245  };
1246
1247  struct StructTable {
1248    InstanceType type;
1249    int size;
1250    RootListIndex index;
1251  };
1252
1253  static const StringTypeTable string_type_table[];
1254  static const ConstantSymbolTable constant_symbol_table[];
1255  static const StructTable struct_table[];
1256
1257  // The special hidden symbol which is an empty string, but does not match
1258  // any string when looked up in properties.
1259  static String* hidden_symbol_;
1260
1261  // GC callback function, called before and after mark-compact GC.
1262  // Allocations in the callback function are disallowed.
1263  struct GCPrologueCallbackPair {
1264    GCPrologueCallbackPair(GCPrologueCallback callback, GCType gc_type)
1265        : callback(callback), gc_type(gc_type) {
1266    }
1267    bool operator==(const GCPrologueCallbackPair& pair) const {
1268      return pair.callback == callback;
1269    }
1270    GCPrologueCallback callback;
1271    GCType gc_type;
1272  };
1273  static List<GCPrologueCallbackPair> gc_prologue_callbacks_;
1274
1275  struct GCEpilogueCallbackPair {
1276    GCEpilogueCallbackPair(GCEpilogueCallback callback, GCType gc_type)
1277        : callback(callback), gc_type(gc_type) {
1278    }
1279    bool operator==(const GCEpilogueCallbackPair& pair) const {
1280      return pair.callback == callback;
1281    }
1282    GCEpilogueCallback callback;
1283    GCType gc_type;
1284  };
1285  static List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
1286
1287  static GCCallback global_gc_prologue_callback_;
1288  static GCCallback global_gc_epilogue_callback_;
1289
1290  // Support for computing object sizes during GC.
1291  static HeapObjectCallback gc_safe_size_of_old_object_;
1292  static int GcSafeSizeOfOldObject(HeapObject* object);
1293  static int GcSafeSizeOfOldObjectWithEncodedMap(HeapObject* object);
1294
1295  // Update the GC state. Called from the mark-compact collector.
1296  static void MarkMapPointersAsEncoded(bool encoded) {
1297    gc_safe_size_of_old_object_ = encoded
1298        ? &GcSafeSizeOfOldObjectWithEncodedMap
1299        : &GcSafeSizeOfOldObject;
1300  }
1301
1302  // Checks whether a global GC is necessary
1303  static GarbageCollector SelectGarbageCollector(AllocationSpace space);
1304
1305  // Performs garbage collection
1306  // Returns whether there is a chance another major GC could
1307  // collect more garbage.
1308  static bool PerformGarbageCollection(GarbageCollector collector,
1309                                       GCTracer* tracer);
1310
1311  // Allocate an uninitialized object in map space.  The behavior is identical
1312  // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
1313  // have to test the allocation space argument and (b) can reduce code size
1314  // (since both AllocateRaw and AllocateRawMap are inlined).
1315  MUST_USE_RESULT static inline MaybeObject* AllocateRawMap();
1316
1317  // Allocate an uninitialized object in the global property cell space.
1318  MUST_USE_RESULT static inline MaybeObject* AllocateRawCell();
1319
1320  // Initializes a JSObject based on its map.
1321  static void InitializeJSObjectFromMap(JSObject* obj,
1322                                        FixedArray* properties,
1323                                        Map* map);
1324
1325  static bool CreateInitialMaps();
1326  static bool CreateInitialObjects();
1327
1328  // These five Create*EntryStub functions are here and forced to not be inlined
1329  // because of a gcc-4.4 bug that assigns wrong vtable entries.
1330  NO_INLINE(static void CreateCEntryStub());
1331  NO_INLINE(static void CreateJSEntryStub());
1332  NO_INLINE(static void CreateJSConstructEntryStub());
1333  NO_INLINE(static void CreateRegExpCEntryStub());
1334  NO_INLINE(static void CreateDirectCEntryStub());
1335
1336  static void CreateFixedStubs();
1337
1338  MUST_USE_RESULT static MaybeObject* CreateOddball(const char* to_string,
1339                                                    Object* to_number);
1340
1341  // Allocate empty fixed array.
1342  MUST_USE_RESULT static MaybeObject* AllocateEmptyFixedArray();
1343
1344  // Performs a minor collection in new generation.
1345  static void Scavenge();
1346
1347  static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
1348      Object** pointer);
1349
1350  static Address DoScavenge(ObjectVisitor* scavenge_visitor,
1351                            Address new_space_front);
1352
1353  // Performs a major collection in the whole heap.
1354  static void MarkCompact(GCTracer* tracer);
1355
1356  // Code to be run before and after mark-compact.
1357  static void MarkCompactPrologue(bool is_compacting);
1358
1359  // Completely clear the Instanceof cache (to stop it keeping objects alive
1360  // around a GC).
1361  static void CompletelyClearInstanceofCache() {
1362    set_instanceof_cache_map(the_hole_value());
1363    set_instanceof_cache_function(the_hole_value());
1364  }
1365
1366#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1367  // Record statistics before and after garbage collection.
1368  static void ReportStatisticsBeforeGC();
1369  static void ReportStatisticsAfterGC();
1370#endif
1371
1372  // Slow part of scavenge object.
1373  static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
1374
1375  // Initializes a function with a shared part and prototype.
1376  // Returns the function.
1377  // Note: this code was factored out of AllocateFunction such that
1378  // other parts of the VM could use it. Specifically, a function that creates
1379  // instances of type JS_FUNCTION_TYPE benefit from the use of this function.
1380  // Please note this does not perform a garbage collection.
1381  MUST_USE_RESULT static inline MaybeObject* InitializeFunction(
1382      JSFunction* function,
1383      SharedFunctionInfo* shared,
1384      Object* prototype);
1385
1386  static GCTracer* tracer_;
1387
1388
1389  // Initializes the number to string cache based on the max semispace size.
1390  MUST_USE_RESULT static MaybeObject* InitializeNumberStringCache();
1391  // Flush the number to string cache.
1392  static void FlushNumberStringCache();
1393
1394  static void UpdateSurvivalRateTrend(int start_new_space_size);
1395
1396  enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING };
1397
1398  static const int kYoungSurvivalRateThreshold = 90;
1399  static const int kYoungSurvivalRateAllowedDeviation = 15;
1400
1401  static int young_survivors_after_last_gc_;
1402  static int high_survival_rate_period_length_;
1403  static double survival_rate_;
1404  static SurvivalRateTrend previous_survival_rate_trend_;
1405  static SurvivalRateTrend survival_rate_trend_;
1406
1407  static void set_survival_rate_trend(SurvivalRateTrend survival_rate_trend) {
1408    ASSERT(survival_rate_trend != FLUCTUATING);
1409    previous_survival_rate_trend_ = survival_rate_trend_;
1410    survival_rate_trend_ = survival_rate_trend;
1411  }
1412
1413  static SurvivalRateTrend survival_rate_trend() {
1414    if (survival_rate_trend_ == STABLE) {
1415      return STABLE;
1416    } else if (previous_survival_rate_trend_ == STABLE) {
1417      return survival_rate_trend_;
1418    } else if (survival_rate_trend_ != previous_survival_rate_trend_) {
1419      return FLUCTUATING;
1420    } else {
1421      return survival_rate_trend_;
1422    }
1423  }
1424
1425  static bool IsStableOrIncreasingSurvivalTrend() {
1426    switch (survival_rate_trend()) {
1427      case STABLE:
1428      case INCREASING:
1429        return true;
1430      default:
1431        return false;
1432    }
1433  }
1434
1435  static bool IsIncreasingSurvivalTrend() {
1436    return survival_rate_trend() == INCREASING;
1437  }
1438
1439  static bool IsHighSurvivalRate() {
1440    return high_survival_rate_period_length_ > 0;
1441  }
1442
1443  static const int kInitialSymbolTableSize = 2048;
1444  static const int kInitialEvalCacheSize = 64;
1445
1446  friend class Factory;
1447  friend class DisallowAllocationFailure;
1448  friend class AlwaysAllocateScope;
1449  friend class LinearAllocationScope;
1450  friend class MarkCompactCollector;
1451};
1452
1453
1454class HeapStats {
1455 public:
1456  static const int kStartMarker = 0xDECADE00;
1457  static const int kEndMarker = 0xDECADE01;
1458
1459  int* start_marker;                    //  0
1460  int* new_space_size;                  //  1
1461  int* new_space_capacity;              //  2
1462  intptr_t* old_pointer_space_size;          //  3
1463  intptr_t* old_pointer_space_capacity;      //  4
1464  intptr_t* old_data_space_size;             //  5
1465  intptr_t* old_data_space_capacity;         //  6
1466  intptr_t* code_space_size;                 //  7
1467  intptr_t* code_space_capacity;             //  8
1468  intptr_t* map_space_size;                  //  9
1469  intptr_t* map_space_capacity;              // 10
1470  intptr_t* cell_space_size;                 // 11
1471  intptr_t* cell_space_capacity;             // 12
1472  intptr_t* lo_space_size;                   // 13
1473  int* global_handle_count;             // 14
1474  int* weak_global_handle_count;        // 15
1475  int* pending_global_handle_count;     // 16
1476  int* near_death_global_handle_count;  // 17
1477  int* destroyed_global_handle_count;   // 18
1478  intptr_t* memory_allocator_size;           // 19
1479  intptr_t* memory_allocator_capacity;       // 20
1480  int* objects_per_type;                // 21
1481  int* size_per_type;                   // 22
1482  int* os_error;                        // 23
1483  int* end_marker;                      // 24
1484};
1485
1486
1487class AlwaysAllocateScope {
1488 public:
1489  AlwaysAllocateScope() {
1490    // We shouldn't hit any nested scopes, because that requires
1491    // non-handle code to call handle code. The code still works but
1492    // performance will degrade, so we want to catch this situation
1493    // in debug mode.
1494    ASSERT(Heap::always_allocate_scope_depth_ == 0);
1495    Heap::always_allocate_scope_depth_++;
1496  }
1497
1498  ~AlwaysAllocateScope() {
1499    Heap::always_allocate_scope_depth_--;
1500    ASSERT(Heap::always_allocate_scope_depth_ == 0);
1501  }
1502};
1503
1504
1505class LinearAllocationScope {
1506 public:
1507  LinearAllocationScope() {
1508    Heap::linear_allocation_scope_depth_++;
1509  }
1510
1511  ~LinearAllocationScope() {
1512    Heap::linear_allocation_scope_depth_--;
1513    ASSERT(Heap::linear_allocation_scope_depth_ >= 0);
1514  }
1515};
1516
1517
1518#ifdef DEBUG
1519// Visitor class to verify interior pointers in spaces that do not contain
1520// or care about intergenerational references. All heap object pointers have to
1521// point into the heap to a location that has a map pointer at its first word.
1522// Caveat: Heap::Contains is an approximation because it can return true for
1523// objects in a heap space but above the allocation pointer.
1524class VerifyPointersVisitor: public ObjectVisitor {
1525 public:
1526  void VisitPointers(Object** start, Object** end) {
1527    for (Object** current = start; current < end; current++) {
1528      if ((*current)->IsHeapObject()) {
1529        HeapObject* object = HeapObject::cast(*current);
1530        ASSERT(Heap::Contains(object));
1531        ASSERT(object->map()->IsMap());
1532      }
1533    }
1534  }
1535};
1536
1537
1538// Visitor class to verify interior pointers in spaces that use region marks
1539// to keep track of intergenerational references.
1540// As VerifyPointersVisitor but also checks that dirty marks are set
1541// for regions covering intergenerational references.
1542class VerifyPointersAndDirtyRegionsVisitor: public ObjectVisitor {
1543 public:
1544  void VisitPointers(Object** start, Object** end) {
1545    for (Object** current = start; current < end; current++) {
1546      if ((*current)->IsHeapObject()) {
1547        HeapObject* object = HeapObject::cast(*current);
1548        ASSERT(Heap::Contains(object));
1549        ASSERT(object->map()->IsMap());
1550        if (Heap::InNewSpace(object)) {
1551          ASSERT(Heap::InToSpace(object));
1552          Address addr = reinterpret_cast<Address>(current);
1553          ASSERT(Page::FromAddress(addr)->IsRegionDirty(addr));
1554        }
1555      }
1556    }
1557  }
1558};
1559#endif
1560
1561
1562// Space iterator for iterating over all spaces of the heap.
1563// Returns each space in turn, and null when it is done.
1564class AllSpaces BASE_EMBEDDED {
1565 public:
1566  Space* next();
1567  AllSpaces() { counter_ = FIRST_SPACE; }
1568 private:
1569  int counter_;
1570};
1571
1572
1573// Space iterator for iterating over all old spaces of the heap: Old pointer
1574// space, old data space and code space.
1575// Returns each space in turn, and null when it is done.
1576class OldSpaces BASE_EMBEDDED {
1577 public:
1578  OldSpace* next();
1579  OldSpaces() { counter_ = OLD_POINTER_SPACE; }
1580 private:
1581  int counter_;
1582};
1583
1584
1585// Space iterator for iterating over all the paged spaces of the heap:
1586// Map space, old pointer space, old data space, code space and cell space.
1587// Returns each space in turn, and null when it is done.
1588class PagedSpaces BASE_EMBEDDED {
1589 public:
1590  PagedSpace* next();
1591  PagedSpaces() { counter_ = OLD_POINTER_SPACE; }
1592 private:
1593  int counter_;
1594};
1595
1596
1597// Space iterator for iterating over all spaces of the heap.
1598// For each space an object iterator is provided. The deallocation of the
1599// returned object iterators is handled by the space iterator.
1600class SpaceIterator : public Malloced {
1601 public:
1602  SpaceIterator();
1603  explicit SpaceIterator(HeapObjectCallback size_func);
1604  virtual ~SpaceIterator();
1605
1606  bool has_next();
1607  ObjectIterator* next();
1608
1609 private:
1610  ObjectIterator* CreateIterator();
1611
1612  int current_space_;  // from enum AllocationSpace.
1613  ObjectIterator* iterator_;  // object iterator for the current space.
1614  HeapObjectCallback size_func_;
1615};
1616
1617
1618// A HeapIterator provides iteration over the whole heap. It
1619// aggregates the specific iterators for the different spaces as
1620// these can only iterate over one space only.
1621//
1622// HeapIterator can skip free list nodes (that is, de-allocated heap
1623// objects that still remain in the heap). As implementation of free
1624// nodes filtering uses GC marks, it can't be used during MS/MC GC
1625// phases. Also, it is forbidden to interrupt iteration in this mode,
1626// as this will leave heap objects marked (and thus, unusable).
1627class HeapObjectsFilter;
1628
1629class HeapIterator BASE_EMBEDDED {
1630 public:
1631  enum HeapObjectsFiltering {
1632    kNoFiltering,
1633    kFilterFreeListNodes,
1634    kFilterUnreachable
1635  };
1636
1637  HeapIterator();
1638  explicit HeapIterator(HeapObjectsFiltering filtering);
1639  ~HeapIterator();
1640
1641  HeapObject* next();
1642  void reset();
1643
1644 private:
1645  // Perform the initialization.
1646  void Init();
1647  // Perform all necessary shutdown (destruction) work.
1648  void Shutdown();
1649  HeapObject* NextObject();
1650
1651  HeapObjectsFiltering filtering_;
1652  HeapObjectsFilter* filter_;
1653  // Space iterator for iterating all the spaces.
1654  SpaceIterator* space_iterator_;
1655  // Object iterator for the space currently being iterated.
1656  ObjectIterator* object_iterator_;
1657};
1658
1659
1660// Cache for mapping (map, property name) into field offset.
1661// Cleared at startup and prior to mark sweep collection.
1662class KeyedLookupCache {
1663 public:
1664  // Lookup field offset for (map, name). If absent, -1 is returned.
1665  static int Lookup(Map* map, String* name);
1666
1667  // Update an element in the cache.
1668  static void Update(Map* map, String* name, int field_offset);
1669
1670  // Clear the cache.
1671  static void Clear();
1672
1673  static const int kLength = 64;
1674  static const int kCapacityMask = kLength - 1;
1675  static const int kMapHashShift = 2;
1676
1677 private:
1678  static inline int Hash(Map* map, String* name);
1679
1680  // Get the address of the keys and field_offsets arrays.  Used in
1681  // generated code to perform cache lookups.
1682  static Address keys_address() {
1683    return reinterpret_cast<Address>(&keys_);
1684  }
1685
1686  static Address field_offsets_address() {
1687    return reinterpret_cast<Address>(&field_offsets_);
1688  }
1689
1690  struct Key {
1691    Map* map;
1692    String* name;
1693  };
1694  static Key keys_[kLength];
1695  static int field_offsets_[kLength];
1696
1697  friend class ExternalReference;
1698};
1699
1700
1701// Cache for mapping (array, property name) into descriptor index.
1702// The cache contains both positive and negative results.
1703// Descriptor index equals kNotFound means the property is absent.
1704// Cleared at startup and prior to any gc.
1705class DescriptorLookupCache {
1706 public:
1707  // Lookup descriptor index for (map, name).
1708  // If absent, kAbsent is returned.
1709  static int Lookup(DescriptorArray* array, String* name) {
1710    if (!StringShape(name).IsSymbol()) return kAbsent;
1711    int index = Hash(array, name);
1712    Key& key = keys_[index];
1713    if ((key.array == array) && (key.name == name)) return results_[index];
1714    return kAbsent;
1715  }
1716
1717  // Update an element in the cache.
1718  static void Update(DescriptorArray* array, String* name, int result) {
1719    ASSERT(result != kAbsent);
1720    if (StringShape(name).IsSymbol()) {
1721      int index = Hash(array, name);
1722      Key& key = keys_[index];
1723      key.array = array;
1724      key.name = name;
1725      results_[index] = result;
1726    }
1727  }
1728
1729  // Clear the cache.
1730  static void Clear();
1731
1732  static const int kAbsent = -2;
1733 private:
1734  static int Hash(DescriptorArray* array, String* name) {
1735    // Uses only lower 32 bits if pointers are larger.
1736    uint32_t array_hash =
1737        static_cast<uint32_t>(reinterpret_cast<uintptr_t>(array)) >> 2;
1738    uint32_t name_hash =
1739        static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >> 2;
1740    return (array_hash ^ name_hash) % kLength;
1741  }
1742
1743  static const int kLength = 64;
1744  struct Key {
1745    DescriptorArray* array;
1746    String* name;
1747  };
1748
1749  static Key keys_[kLength];
1750  static int results_[kLength];
1751};
1752
1753
1754// ----------------------------------------------------------------------------
1755// Marking stack for tracing live objects.
1756
1757class MarkingStack {
1758 public:
1759  void Initialize(Address low, Address high) {
1760    top_ = low_ = reinterpret_cast<HeapObject**>(low);
1761    high_ = reinterpret_cast<HeapObject**>(high);
1762    overflowed_ = false;
1763  }
1764
1765  bool is_full() { return top_ >= high_; }
1766
1767  bool is_empty() { return top_ <= low_; }
1768
1769  bool overflowed() { return overflowed_; }
1770
1771  void clear_overflowed() { overflowed_ = false; }
1772
1773  // Push the (marked) object on the marking stack if there is room,
1774  // otherwise mark the object as overflowed and wait for a rescan of the
1775  // heap.
1776  void Push(HeapObject* object) {
1777    CHECK(object->IsHeapObject());
1778    if (is_full()) {
1779      object->SetOverflow();
1780      overflowed_ = true;
1781    } else {
1782      *(top_++) = object;
1783    }
1784  }
1785
1786  HeapObject* Pop() {
1787    ASSERT(!is_empty());
1788    HeapObject* object = *(--top_);
1789    CHECK(object->IsHeapObject());
1790    return object;
1791  }
1792
1793 private:
1794  HeapObject** low_;
1795  HeapObject** top_;
1796  HeapObject** high_;
1797  bool overflowed_;
1798};
1799
1800
1801// A helper class to document/test C++ scopes where we do not
1802// expect a GC. Usage:
1803//
1804// /* Allocation not allowed: we cannot handle a GC in this scope. */
1805// { AssertNoAllocation nogc;
1806//   ...
1807// }
1808
1809#ifdef DEBUG
1810
1811class DisallowAllocationFailure {
1812 public:
1813  DisallowAllocationFailure() {
1814    old_state_ = Heap::disallow_allocation_failure_;
1815    Heap::disallow_allocation_failure_ = true;
1816  }
1817  ~DisallowAllocationFailure() {
1818    Heap::disallow_allocation_failure_ = old_state_;
1819  }
1820 private:
1821  bool old_state_;
1822};
1823
1824class AssertNoAllocation {
1825 public:
1826  AssertNoAllocation() {
1827    old_state_ = Heap::allow_allocation(false);
1828  }
1829
1830  ~AssertNoAllocation() {
1831    Heap::allow_allocation(old_state_);
1832  }
1833
1834 private:
1835  bool old_state_;
1836};
1837
1838class DisableAssertNoAllocation {
1839 public:
1840  DisableAssertNoAllocation() {
1841    old_state_ = Heap::allow_allocation(true);
1842  }
1843
1844  ~DisableAssertNoAllocation() {
1845    Heap::allow_allocation(old_state_);
1846  }
1847
1848 private:
1849  bool old_state_;
1850};
1851
1852#else  // ndef DEBUG
1853
1854class AssertNoAllocation {
1855 public:
1856  AssertNoAllocation() { }
1857  ~AssertNoAllocation() { }
1858};
1859
1860class DisableAssertNoAllocation {
1861 public:
1862  DisableAssertNoAllocation() { }
1863  ~DisableAssertNoAllocation() { }
1864};
1865
1866#endif
1867
1868// GCTracer collects and prints ONE line after each garbage collector
1869// invocation IFF --trace_gc is used.
1870
1871class GCTracer BASE_EMBEDDED {
1872 public:
1873  class Scope BASE_EMBEDDED {
1874   public:
1875    enum ScopeId {
1876      EXTERNAL,
1877      MC_MARK,
1878      MC_SWEEP,
1879      MC_SWEEP_NEWSPACE,
1880      MC_COMPACT,
1881      MC_FLUSH_CODE,
1882      kNumberOfScopes
1883    };
1884
1885    Scope(GCTracer* tracer, ScopeId scope)
1886        : tracer_(tracer),
1887        scope_(scope) {
1888      start_time_ = OS::TimeCurrentMillis();
1889    }
1890
1891    ~Scope() {
1892      ASSERT(scope_ < kNumberOfScopes);  // scope_ is unsigned.
1893      tracer_->scopes_[scope_] += OS::TimeCurrentMillis() - start_time_;
1894    }
1895
1896   private:
1897    GCTracer* tracer_;
1898    ScopeId scope_;
1899    double start_time_;
1900  };
1901
1902  GCTracer();
1903  ~GCTracer();
1904
1905  // Sets the collector.
1906  void set_collector(GarbageCollector collector) { collector_ = collector; }
1907
1908  // Sets the GC count.
1909  void set_gc_count(int count) { gc_count_ = count; }
1910
1911  // Sets the full GC count.
1912  void set_full_gc_count(int count) { full_gc_count_ = count; }
1913
1914  // Sets the flag that this is a compacting full GC.
1915  void set_is_compacting() { is_compacting_ = true; }
1916  bool is_compacting() const { return is_compacting_; }
1917
1918  // Increment and decrement the count of marked objects.
1919  void increment_marked_count() { ++marked_count_; }
1920  void decrement_marked_count() { --marked_count_; }
1921
1922  int marked_count() { return marked_count_; }
1923
1924  void increment_promoted_objects_size(int object_size) {
1925    promoted_objects_size_ += object_size;
1926  }
1927
1928  // Returns maximum GC pause.
1929  static int get_max_gc_pause() { return max_gc_pause_; }
1930
1931  // Returns maximum size of objects alive after GC.
1932  static intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
1933
1934  // Returns minimal interval between two subsequent collections.
1935  static int get_min_in_mutator() { return min_in_mutator_; }
1936
1937 private:
1938  // Returns a string matching the collector.
1939  const char* CollectorString();
1940
1941  // Returns size of object in heap (in MB).
1942  double SizeOfHeapObjects() {
1943    return (static_cast<double>(Heap::SizeOfObjects())) / MB;
1944  }
1945
1946  double start_time_;  // Timestamp set in the constructor.
1947  intptr_t start_size_;  // Size of objects in heap set in constructor.
1948  GarbageCollector collector_;  // Type of collector.
1949
1950  // A count (including this one, eg, the first collection is 1) of the
1951  // number of garbage collections.
1952  int gc_count_;
1953
1954  // A count (including this one) of the number of full garbage collections.
1955  int full_gc_count_;
1956
1957  // True if the current GC is a compacting full collection, false
1958  // otherwise.
1959  bool is_compacting_;
1960
1961  // True if the *previous* full GC cwas a compacting collection (will be
1962  // false if there has not been a previous full GC).
1963  bool previous_has_compacted_;
1964
1965  // On a full GC, a count of the number of marked objects.  Incremented
1966  // when an object is marked and decremented when an object's mark bit is
1967  // cleared.  Will be zero on a scavenge collection.
1968  int marked_count_;
1969
1970  // The count from the end of the previous full GC.  Will be zero if there
1971  // was no previous full GC.
1972  int previous_marked_count_;
1973
1974  // Amounts of time spent in different scopes during GC.
1975  double scopes_[Scope::kNumberOfScopes];
1976
1977  // Total amount of space either wasted or contained in one of free lists
1978  // before the current GC.
1979  intptr_t in_free_list_or_wasted_before_gc_;
1980
1981  // Difference between space used in the heap at the beginning of the current
1982  // collection and the end of the previous collection.
1983  intptr_t allocated_since_last_gc_;
1984
1985  // Amount of time spent in mutator that is time elapsed between end of the
1986  // previous collection and the beginning of the current one.
1987  double spent_in_mutator_;
1988
1989  // Size of objects promoted during the current collection.
1990  intptr_t promoted_objects_size_;
1991
1992  // Maximum GC pause.
1993  static int max_gc_pause_;
1994
1995  // Maximum size of objects alive after GC.
1996  static intptr_t max_alive_after_gc_;
1997
1998  // Minimal interval between two subsequent collections.
1999  static int min_in_mutator_;
2000
2001  // Size of objects alive after last GC.
2002  static intptr_t alive_after_last_gc_;
2003
2004  static double last_gc_end_timestamp_;
2005};
2006
2007
2008class TranscendentalCache {
2009 public:
2010  enum Type {ACOS, ASIN, ATAN, COS, EXP, LOG, SIN, TAN, kNumberOfCaches};
2011  static const int kTranscendentalTypeBits = 3;
2012  STATIC_ASSERT((1 << kTranscendentalTypeBits) >= kNumberOfCaches);
2013
2014  explicit TranscendentalCache(Type t);
2015
2016  // Returns a heap number with f(input), where f is a math function specified
2017  // by the 'type' argument.
2018  MUST_USE_RESULT static inline MaybeObject* Get(Type type, double input) {
2019    TranscendentalCache* cache = caches_[type];
2020    if (cache == NULL) {
2021      caches_[type] = cache = new TranscendentalCache(type);
2022    }
2023    return cache->Get(input);
2024  }
2025
2026  // The cache contains raw Object pointers.  This method disposes of
2027  // them before a garbage collection.
2028  static void Clear();
2029
2030 private:
2031  MUST_USE_RESULT inline MaybeObject* Get(double input) {
2032    Converter c;
2033    c.dbl = input;
2034    int hash = Hash(c);
2035    Element e = elements_[hash];
2036    if (e.in[0] == c.integers[0] &&
2037        e.in[1] == c.integers[1]) {
2038      ASSERT(e.output != NULL);
2039      Counters::transcendental_cache_hit.Increment();
2040      return e.output;
2041    }
2042    double answer = Calculate(input);
2043    Counters::transcendental_cache_miss.Increment();
2044    Object* heap_number;
2045    { MaybeObject* maybe_heap_number = Heap::AllocateHeapNumber(answer);
2046      if (!maybe_heap_number->ToObject(&heap_number)) return maybe_heap_number;
2047    }
2048    elements_[hash].in[0] = c.integers[0];
2049    elements_[hash].in[1] = c.integers[1];
2050    elements_[hash].output = heap_number;
2051    return heap_number;
2052  }
2053
2054  inline double Calculate(double input) {
2055    switch (type_) {
2056      case ACOS:
2057        return acos(input);
2058      case ASIN:
2059        return asin(input);
2060      case ATAN:
2061        return atan(input);
2062      case COS:
2063        return cos(input);
2064      case EXP:
2065        return exp(input);
2066      case LOG:
2067        return log(input);
2068      case SIN:
2069        return sin(input);
2070      case TAN:
2071        return tan(input);
2072      default:
2073        return 0.0;  // Never happens.
2074    }
2075  }
2076  static const int kCacheSize = 512;
2077  struct Element {
2078    uint32_t in[2];
2079    Object* output;
2080  };
2081  union Converter {
2082    double dbl;
2083    uint32_t integers[2];
2084  };
2085  inline static int Hash(const Converter& c) {
2086    uint32_t hash = (c.integers[0] ^ c.integers[1]);
2087    hash ^= static_cast<int32_t>(hash) >> 16;
2088    hash ^= static_cast<int32_t>(hash) >> 8;
2089    return (hash & (kCacheSize - 1));
2090  }
2091
2092  static Address cache_array_address() {
2093    // Used to create an external reference.
2094    return reinterpret_cast<Address>(caches_);
2095  }
2096
2097  // Allow access to the caches_ array as an ExternalReference.
2098  friend class ExternalReference;
2099  // Inline implementation of the cache.
2100  friend class TranscendentalCacheStub;
2101
2102  static TranscendentalCache* caches_[kNumberOfCaches];
2103  Element elements_[kCacheSize];
2104  Type type_;
2105};
2106
2107
2108// External strings table is a place where all external strings are
2109// registered.  We need to keep track of such strings to properly
2110// finalize them.
2111class ExternalStringTable : public AllStatic {
2112 public:
2113  // Registers an external string.
2114  inline static void AddString(String* string);
2115
2116  inline static void Iterate(ObjectVisitor* v);
2117
2118  // Restores internal invariant and gets rid of collected strings.
2119  // Must be called after each Iterate() that modified the strings.
2120  static void CleanUp();
2121
2122  // Destroys all allocated memory.
2123  static void TearDown();
2124
2125 private:
2126  friend class Heap;
2127
2128  inline static void Verify();
2129
2130  inline static void AddOldString(String* string);
2131
2132  // Notifies the table that only a prefix of the new list is valid.
2133  inline static void ShrinkNewStrings(int position);
2134
2135  // To speed up scavenge collections new space string are kept
2136  // separate from old space strings.
2137  static List<Object*> new_space_strings_;
2138  static List<Object*> old_space_strings_;
2139};
2140
2141
2142// Abstract base class for checking whether a weak object should be retained.
2143class WeakObjectRetainer {
2144 public:
2145  virtual ~WeakObjectRetainer() {}
2146
2147  // Return whether this object should be retained. If NULL is returned the
2148  // object has no references. Otherwise the address of the retained object
2149  // should be returned as in some GC situations the object has been moved.
2150  virtual Object* RetainAs(Object* object) = 0;
2151};
2152
2153
2154} }  // namespace v8::internal
2155
2156#endif  // V8_HEAP_H_
2157