heap.h revision 8a31eba00023874d4a1dcdc5f411cc4336776874
1// Copyright 2010 the V8 project authors. All rights reserved. 2// Redistribution and use in source and binary forms, with or without 3// modification, are permitted provided that the following conditions are 4// met: 5// 6// * Redistributions of source code must retain the above copyright 7// notice, this list of conditions and the following disclaimer. 8// * Redistributions in binary form must reproduce the above 9// copyright notice, this list of conditions and the following 10// disclaimer in the documentation and/or other materials provided 11// with the distribution. 12// * Neither the name of Google Inc. nor the names of its 13// contributors may be used to endorse or promote products derived 14// from this software without specific prior written permission. 15// 16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28#ifndef V8_HEAP_H_ 29#define V8_HEAP_H_ 30 31#include <math.h> 32 33#include "spaces.h" 34#include "splay-tree-inl.h" 35#include "v8-counters.h" 36 37namespace v8 { 38namespace internal { 39 40 41// Defines all the roots in Heap. 42#define UNCONDITIONAL_STRONG_ROOT_LIST(V) \ 43 /* Put the byte array map early. We need it to be in place by the time */ \ 44 /* the deserializer hits the next page, since it wants to put a byte */ \ 45 /* array in the unused space at the end of the page. */ \ 46 V(Map, byte_array_map, ByteArrayMap) \ 47 V(Map, one_pointer_filler_map, OnePointerFillerMap) \ 48 V(Map, two_pointer_filler_map, TwoPointerFillerMap) \ 49 /* Cluster the most popular ones in a few cache lines here at the top. */ \ 50 V(Smi, stack_limit, StackLimit) \ 51 V(Object, undefined_value, UndefinedValue) \ 52 V(Object, the_hole_value, TheHoleValue) \ 53 V(Object, null_value, NullValue) \ 54 V(Object, true_value, TrueValue) \ 55 V(Object, false_value, FalseValue) \ 56 V(Map, heap_number_map, HeapNumberMap) \ 57 V(Map, global_context_map, GlobalContextMap) \ 58 V(Map, fixed_array_map, FixedArrayMap) \ 59 V(Map, fixed_cow_array_map, FixedCOWArrayMap) \ 60 V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \ 61 V(Map, meta_map, MetaMap) \ 62 V(Object, termination_exception, TerminationException) \ 63 V(Map, hash_table_map, HashTableMap) \ 64 V(FixedArray, empty_fixed_array, EmptyFixedArray) \ 65 V(Map, string_map, StringMap) \ 66 V(Map, ascii_string_map, AsciiStringMap) \ 67 V(Map, symbol_map, SymbolMap) \ 68 V(Map, ascii_symbol_map, AsciiSymbolMap) \ 69 V(Map, cons_symbol_map, ConsSymbolMap) \ 70 V(Map, cons_ascii_symbol_map, ConsAsciiSymbolMap) \ 71 V(Map, external_symbol_map, ExternalSymbolMap) \ 72 V(Map, external_symbol_with_ascii_data_map, ExternalSymbolWithAsciiDataMap) \ 73 V(Map, external_ascii_symbol_map, ExternalAsciiSymbolMap) \ 74 V(Map, cons_string_map, ConsStringMap) \ 75 V(Map, cons_ascii_string_map, ConsAsciiStringMap) \ 76 V(Map, external_string_map, ExternalStringMap) \ 77 V(Map, external_string_with_ascii_data_map, ExternalStringWithAsciiDataMap) \ 78 V(Map, external_ascii_string_map, ExternalAsciiStringMap) \ 79 V(Map, undetectable_string_map, UndetectableStringMap) \ 80 V(Map, undetectable_ascii_string_map, UndetectableAsciiStringMap) \ 81 V(Map, pixel_array_map, PixelArrayMap) \ 82 V(Map, external_byte_array_map, ExternalByteArrayMap) \ 83 V(Map, external_unsigned_byte_array_map, ExternalUnsignedByteArrayMap) \ 84 V(Map, external_short_array_map, ExternalShortArrayMap) \ 85 V(Map, external_unsigned_short_array_map, ExternalUnsignedShortArrayMap) \ 86 V(Map, external_int_array_map, ExternalIntArrayMap) \ 87 V(Map, external_unsigned_int_array_map, ExternalUnsignedIntArrayMap) \ 88 V(Map, external_float_array_map, ExternalFloatArrayMap) \ 89 V(Map, context_map, ContextMap) \ 90 V(Map, catch_context_map, CatchContextMap) \ 91 V(Map, code_map, CodeMap) \ 92 V(Map, oddball_map, OddballMap) \ 93 V(Map, global_property_cell_map, GlobalPropertyCellMap) \ 94 V(Map, shared_function_info_map, SharedFunctionInfoMap) \ 95 V(Map, proxy_map, ProxyMap) \ 96 V(Object, nan_value, NanValue) \ 97 V(Object, minus_zero_value, MinusZeroValue) \ 98 V(Object, instanceof_cache_function, InstanceofCacheFunction) \ 99 V(Object, instanceof_cache_map, InstanceofCacheMap) \ 100 V(Object, instanceof_cache_answer, InstanceofCacheAnswer) \ 101 V(String, empty_string, EmptyString) \ 102 V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \ 103 V(Map, neander_map, NeanderMap) \ 104 V(JSObject, message_listeners, MessageListeners) \ 105 V(Proxy, prototype_accessors, PrototypeAccessors) \ 106 V(NumberDictionary, code_stubs, CodeStubs) \ 107 V(NumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \ 108 V(Code, js_entry_code, JsEntryCode) \ 109 V(Code, js_construct_entry_code, JsConstructEntryCode) \ 110 V(Code, c_entry_code, CEntryCode) \ 111 V(FixedArray, number_string_cache, NumberStringCache) \ 112 V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \ 113 V(FixedArray, natives_source_cache, NativesSourceCache) \ 114 V(Object, last_script_id, LastScriptId) \ 115 V(Script, empty_script, EmptyScript) \ 116 V(Smi, real_stack_limit, RealStackLimit) \ 117 V(StringDictionary, intrinsic_function_names, IntrinsicFunctionNames) \ 118 119#if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP 120#define STRONG_ROOT_LIST(V) \ 121 UNCONDITIONAL_STRONG_ROOT_LIST(V) \ 122 V(Code, re_c_entry_code, RegExpCEntryCode) 123#else 124#define STRONG_ROOT_LIST(V) UNCONDITIONAL_STRONG_ROOT_LIST(V) 125#endif 126 127#define ROOT_LIST(V) \ 128 STRONG_ROOT_LIST(V) \ 129 V(SymbolTable, symbol_table, SymbolTable) 130 131#define SYMBOL_LIST(V) \ 132 V(Array_symbol, "Array") \ 133 V(Object_symbol, "Object") \ 134 V(Proto_symbol, "__proto__") \ 135 V(StringImpl_symbol, "StringImpl") \ 136 V(arguments_symbol, "arguments") \ 137 V(Arguments_symbol, "Arguments") \ 138 V(arguments_shadow_symbol, ".arguments") \ 139 V(call_symbol, "call") \ 140 V(apply_symbol, "apply") \ 141 V(caller_symbol, "caller") \ 142 V(boolean_symbol, "boolean") \ 143 V(Boolean_symbol, "Boolean") \ 144 V(callee_symbol, "callee") \ 145 V(constructor_symbol, "constructor") \ 146 V(code_symbol, ".code") \ 147 V(result_symbol, ".result") \ 148 V(catch_var_symbol, ".catch-var") \ 149 V(empty_symbol, "") \ 150 V(eval_symbol, "eval") \ 151 V(function_symbol, "function") \ 152 V(length_symbol, "length") \ 153 V(name_symbol, "name") \ 154 V(number_symbol, "number") \ 155 V(Number_symbol, "Number") \ 156 V(RegExp_symbol, "RegExp") \ 157 V(source_symbol, "source") \ 158 V(global_symbol, "global") \ 159 V(ignore_case_symbol, "ignoreCase") \ 160 V(multiline_symbol, "multiline") \ 161 V(input_symbol, "input") \ 162 V(index_symbol, "index") \ 163 V(last_index_symbol, "lastIndex") \ 164 V(object_symbol, "object") \ 165 V(prototype_symbol, "prototype") \ 166 V(string_symbol, "string") \ 167 V(String_symbol, "String") \ 168 V(Date_symbol, "Date") \ 169 V(this_symbol, "this") \ 170 V(to_string_symbol, "toString") \ 171 V(char_at_symbol, "CharAt") \ 172 V(undefined_symbol, "undefined") \ 173 V(value_of_symbol, "valueOf") \ 174 V(InitializeVarGlobal_symbol, "InitializeVarGlobal") \ 175 V(InitializeConstGlobal_symbol, "InitializeConstGlobal") \ 176 V(stack_overflow_symbol, "kStackOverflowBoilerplate") \ 177 V(illegal_access_symbol, "illegal access") \ 178 V(out_of_memory_symbol, "out-of-memory") \ 179 V(illegal_execution_state_symbol, "illegal execution state") \ 180 V(get_symbol, "get") \ 181 V(set_symbol, "set") \ 182 V(function_class_symbol, "Function") \ 183 V(illegal_argument_symbol, "illegal argument") \ 184 V(MakeReferenceError_symbol, "MakeReferenceError") \ 185 V(MakeSyntaxError_symbol, "MakeSyntaxError") \ 186 V(MakeTypeError_symbol, "MakeTypeError") \ 187 V(invalid_lhs_in_assignment_symbol, "invalid_lhs_in_assignment") \ 188 V(invalid_lhs_in_for_in_symbol, "invalid_lhs_in_for_in") \ 189 V(invalid_lhs_in_postfix_op_symbol, "invalid_lhs_in_postfix_op") \ 190 V(invalid_lhs_in_prefix_op_symbol, "invalid_lhs_in_prefix_op") \ 191 V(illegal_return_symbol, "illegal_return") \ 192 V(illegal_break_symbol, "illegal_break") \ 193 V(illegal_continue_symbol, "illegal_continue") \ 194 V(unknown_label_symbol, "unknown_label") \ 195 V(redeclaration_symbol, "redeclaration") \ 196 V(failure_symbol, "<failure>") \ 197 V(space_symbol, " ") \ 198 V(exec_symbol, "exec") \ 199 V(zero_symbol, "0") \ 200 V(global_eval_symbol, "GlobalEval") \ 201 V(identity_hash_symbol, "v8::IdentityHash") \ 202 V(closure_symbol, "(closure)") 203 204 205// Forward declarations. 206class GCTracer; 207class HeapStats; 208class WeakObjectRetainer; 209 210 211typedef String* (*ExternalStringTableUpdaterCallback)(Object** pointer); 212 213typedef bool (*DirtyRegionCallback)(Address start, 214 Address end, 215 ObjectSlotCallback copy_object_func); 216 217 218// The all static Heap captures the interface to the global object heap. 219// All JavaScript contexts by this process share the same object heap. 220 221class Heap : public AllStatic { 222 public: 223 // Configure heap size before setup. Return false if the heap has been 224 // setup already. 225 static bool ConfigureHeap(int max_semispace_size, 226 int max_old_gen_size, 227 int max_executable_size); 228 static bool ConfigureHeapDefault(); 229 230 // Initializes the global object heap. If create_heap_objects is true, 231 // also creates the basic non-mutable objects. 232 // Returns whether it succeeded. 233 static bool Setup(bool create_heap_objects); 234 235 // Destroys all memory allocated by the heap. 236 static void TearDown(); 237 238 // Set the stack limit in the roots_ array. Some architectures generate 239 // code that looks here, because it is faster than loading from the static 240 // jslimit_/real_jslimit_ variable in the StackGuard. 241 static void SetStackLimits(); 242 243 // Returns whether Setup has been called. 244 static bool HasBeenSetup(); 245 246 // Returns the maximum amount of memory reserved for the heap. For 247 // the young generation, we reserve 4 times the amount needed for a 248 // semi space. The young generation consists of two semi spaces and 249 // we reserve twice the amount needed for those in order to ensure 250 // that new space can be aligned to its size. 251 static intptr_t MaxReserved() { 252 return 4 * reserved_semispace_size_ + max_old_generation_size_; 253 } 254 static int MaxSemiSpaceSize() { return max_semispace_size_; } 255 static int ReservedSemiSpaceSize() { return reserved_semispace_size_; } 256 static int InitialSemiSpaceSize() { return initial_semispace_size_; } 257 static intptr_t MaxOldGenerationSize() { return max_old_generation_size_; } 258 static intptr_t MaxExecutableSize() { return max_executable_size_; } 259 260 // Returns the capacity of the heap in bytes w/o growing. Heap grows when 261 // more spaces are needed until it reaches the limit. 262 static intptr_t Capacity(); 263 264 // Returns the amount of memory currently committed for the heap. 265 static intptr_t CommittedMemory(); 266 267 // Returns the amount of executable memory currently committed for the heap. 268 static intptr_t CommittedMemoryExecutable(); 269 270 // Returns the available bytes in space w/o growing. 271 // Heap doesn't guarantee that it can allocate an object that requires 272 // all available bytes. Check MaxHeapObjectSize() instead. 273 static intptr_t Available(); 274 275 // Returns the maximum object size in paged space. 276 static inline int MaxObjectSizeInPagedSpace(); 277 278 // Returns of size of all objects residing in the heap. 279 static intptr_t SizeOfObjects(); 280 281 // Return the starting address and a mask for the new space. And-masking an 282 // address with the mask will result in the start address of the new space 283 // for all addresses in either semispace. 284 static Address NewSpaceStart() { return new_space_.start(); } 285 static uintptr_t NewSpaceMask() { return new_space_.mask(); } 286 static Address NewSpaceTop() { return new_space_.top(); } 287 288 static NewSpace* new_space() { return &new_space_; } 289 static OldSpace* old_pointer_space() { return old_pointer_space_; } 290 static OldSpace* old_data_space() { return old_data_space_; } 291 static OldSpace* code_space() { return code_space_; } 292 static MapSpace* map_space() { return map_space_; } 293 static CellSpace* cell_space() { return cell_space_; } 294 static LargeObjectSpace* lo_space() { return lo_space_; } 295 296 static bool always_allocate() { return always_allocate_scope_depth_ != 0; } 297 static Address always_allocate_scope_depth_address() { 298 return reinterpret_cast<Address>(&always_allocate_scope_depth_); 299 } 300 static bool linear_allocation() { 301 return linear_allocation_scope_depth_ != 0; 302 } 303 304 static Address* NewSpaceAllocationTopAddress() { 305 return new_space_.allocation_top_address(); 306 } 307 static Address* NewSpaceAllocationLimitAddress() { 308 return new_space_.allocation_limit_address(); 309 } 310 311 // Uncommit unused semi space. 312 static bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); } 313 314#ifdef ENABLE_HEAP_PROTECTION 315 // Protect/unprotect the heap by marking all spaces read-only/writable. 316 static void Protect(); 317 static void Unprotect(); 318#endif 319 320 // Allocates and initializes a new JavaScript object based on a 321 // constructor. 322 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 323 // failed. 324 // Please note this does not perform a garbage collection. 325 MUST_USE_RESULT static MaybeObject* AllocateJSObject( 326 JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED); 327 328 // Allocates and initializes a new global object based on a constructor. 329 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 330 // failed. 331 // Please note this does not perform a garbage collection. 332 MUST_USE_RESULT static MaybeObject* AllocateGlobalObject( 333 JSFunction* constructor); 334 335 // Returns a deep copy of the JavaScript object. 336 // Properties and elements are copied too. 337 // Returns failure if allocation failed. 338 MUST_USE_RESULT static MaybeObject* CopyJSObject(JSObject* source); 339 340 // Allocates the function prototype. 341 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 342 // failed. 343 // Please note this does not perform a garbage collection. 344 MUST_USE_RESULT static MaybeObject* AllocateFunctionPrototype( 345 JSFunction* function); 346 347 // Reinitialize an JSGlobalProxy based on a constructor. The object 348 // must have the same size as objects allocated using the 349 // constructor. The object is reinitialized and behaves as an 350 // object that has been freshly allocated using the constructor. 351 MUST_USE_RESULT static MaybeObject* ReinitializeJSGlobalProxy( 352 JSFunction* constructor, 353 JSGlobalProxy* global); 354 355 // Allocates and initializes a new JavaScript object based on a map. 356 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 357 // failed. 358 // Please note this does not perform a garbage collection. 359 MUST_USE_RESULT static MaybeObject* AllocateJSObjectFromMap( 360 Map* map, PretenureFlag pretenure = NOT_TENURED); 361 362 // Allocates a heap object based on the map. 363 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 364 // failed. 365 // Please note this function does not perform a garbage collection. 366 MUST_USE_RESULT static MaybeObject* Allocate(Map* map, AllocationSpace space); 367 368 // Allocates a JS Map in the heap. 369 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 370 // failed. 371 // Please note this function does not perform a garbage collection. 372 MUST_USE_RESULT static MaybeObject* AllocateMap(InstanceType instance_type, 373 int instance_size); 374 375 // Allocates a partial map for bootstrapping. 376 MUST_USE_RESULT static MaybeObject* AllocatePartialMap( 377 InstanceType instance_type, 378 int instance_size); 379 380 // Allocate a map for the specified function 381 MUST_USE_RESULT static MaybeObject* AllocateInitialMap(JSFunction* fun); 382 383 // Allocates an empty code cache. 384 MUST_USE_RESULT static MaybeObject* AllocateCodeCache(); 385 386 // Clear the Instanceof cache (used when a prototype changes). 387 static void ClearInstanceofCache() { 388 set_instanceof_cache_function(the_hole_value()); 389 } 390 391 // Allocates and fully initializes a String. There are two String 392 // encodings: ASCII and two byte. One should choose between the three string 393 // allocation functions based on the encoding of the string buffer used to 394 // initialized the string. 395 // - ...FromAscii initializes the string from a buffer that is ASCII 396 // encoded (it does not check that the buffer is ASCII encoded) and the 397 // result will be ASCII encoded. 398 // - ...FromUTF8 initializes the string from a buffer that is UTF-8 399 // encoded. If the characters are all single-byte characters, the 400 // result will be ASCII encoded, otherwise it will converted to two 401 // byte. 402 // - ...FromTwoByte initializes the string from a buffer that is two-byte 403 // encoded. If the characters are all single-byte characters, the 404 // result will be converted to ASCII, otherwise it will be left as 405 // two-byte. 406 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 407 // failed. 408 // Please note this does not perform a garbage collection. 409 MUST_USE_RESULT static MaybeObject* AllocateStringFromAscii( 410 Vector<const char> str, 411 PretenureFlag pretenure = NOT_TENURED); 412 MUST_USE_RESULT static MaybeObject* AllocateStringFromUtf8( 413 Vector<const char> str, 414 PretenureFlag pretenure = NOT_TENURED); 415 MUST_USE_RESULT static MaybeObject* AllocateStringFromTwoByte( 416 Vector<const uc16> str, 417 PretenureFlag pretenure = NOT_TENURED); 418 419 // Allocates a symbol in old space based on the character stream. 420 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 421 // failed. 422 // Please note this function does not perform a garbage collection. 423 MUST_USE_RESULT static inline MaybeObject* AllocateSymbol( 424 Vector<const char> str, 425 int chars, 426 uint32_t hash_field); 427 428 MUST_USE_RESULT static MaybeObject* AllocateInternalSymbol( 429 unibrow::CharacterStream* buffer, int chars, uint32_t hash_field); 430 431 MUST_USE_RESULT static MaybeObject* AllocateExternalSymbol( 432 Vector<const char> str, 433 int chars); 434 435 436 // Allocates and partially initializes a String. There are two String 437 // encodings: ASCII and two byte. These functions allocate a string of the 438 // given length and set its map and length fields. The characters of the 439 // string are uninitialized. 440 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 441 // failed. 442 // Please note this does not perform a garbage collection. 443 MUST_USE_RESULT static MaybeObject* AllocateRawAsciiString( 444 int length, 445 PretenureFlag pretenure = NOT_TENURED); 446 MUST_USE_RESULT static MaybeObject* AllocateRawTwoByteString( 447 int length, 448 PretenureFlag pretenure = NOT_TENURED); 449 450 // Computes a single character string where the character has code. 451 // A cache is used for ascii codes. 452 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 453 // failed. Please note this does not perform a garbage collection. 454 MUST_USE_RESULT static MaybeObject* LookupSingleCharacterStringFromCode( 455 uint16_t code); 456 457 // Allocate a byte array of the specified length 458 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 459 // failed. 460 // Please note this does not perform a garbage collection. 461 MUST_USE_RESULT static MaybeObject* AllocateByteArray(int length, 462 PretenureFlag pretenure); 463 464 // Allocate a non-tenured byte array of the specified length 465 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 466 // failed. 467 // Please note this does not perform a garbage collection. 468 MUST_USE_RESULT static MaybeObject* AllocateByteArray(int length); 469 470 // Allocate a pixel array of the specified length 471 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 472 // failed. 473 // Please note this does not perform a garbage collection. 474 MUST_USE_RESULT static MaybeObject* AllocatePixelArray(int length, 475 uint8_t* external_pointer, 476 PretenureFlag pretenure); 477 478 // Allocates an external array of the specified length and type. 479 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 480 // failed. 481 // Please note this does not perform a garbage collection. 482 MUST_USE_RESULT static MaybeObject* AllocateExternalArray( 483 int length, 484 ExternalArrayType array_type, 485 void* external_pointer, 486 PretenureFlag pretenure); 487 488 // Allocate a tenured JS global property cell. 489 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 490 // failed. 491 // Please note this does not perform a garbage collection. 492 MUST_USE_RESULT static MaybeObject* AllocateJSGlobalPropertyCell( 493 Object* value); 494 495 // Allocates a fixed array initialized with undefined values 496 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 497 // failed. 498 // Please note this does not perform a garbage collection. 499 MUST_USE_RESULT static MaybeObject* AllocateFixedArray( 500 int length, 501 PretenureFlag pretenure); 502 // Allocates a fixed array initialized with undefined values 503 MUST_USE_RESULT static MaybeObject* AllocateFixedArray(int length); 504 505 // Allocates an uninitialized fixed array. It must be filled by the caller. 506 // 507 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 508 // failed. 509 // Please note this does not perform a garbage collection. 510 MUST_USE_RESULT static MaybeObject* AllocateUninitializedFixedArray( 511 int length); 512 513 // Make a copy of src and return it. Returns 514 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. 515 MUST_USE_RESULT static inline MaybeObject* CopyFixedArray(FixedArray* src); 516 517 // Make a copy of src, set the map, and return the copy. Returns 518 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. 519 MUST_USE_RESULT static MaybeObject* CopyFixedArrayWithMap(FixedArray* src, 520 Map* map); 521 522 // Allocates a fixed array initialized with the hole values. 523 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 524 // failed. 525 // Please note this does not perform a garbage collection. 526 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithHoles( 527 int length, 528 PretenureFlag pretenure = NOT_TENURED); 529 530 // AllocateHashTable is identical to AllocateFixedArray except 531 // that the resulting object has hash_table_map as map. 532 MUST_USE_RESULT static MaybeObject* AllocateHashTable( 533 int length, PretenureFlag pretenure = NOT_TENURED); 534 535 // Allocate a global (but otherwise uninitialized) context. 536 MUST_USE_RESULT static MaybeObject* AllocateGlobalContext(); 537 538 // Allocate a function context. 539 MUST_USE_RESULT static MaybeObject* AllocateFunctionContext( 540 int length, 541 JSFunction* closure); 542 543 // Allocate a 'with' context. 544 MUST_USE_RESULT static MaybeObject* AllocateWithContext( 545 Context* previous, 546 JSObject* extension, 547 bool is_catch_context); 548 549 // Allocates a new utility object in the old generation. 550 MUST_USE_RESULT static MaybeObject* AllocateStruct(InstanceType type); 551 552 // Allocates a function initialized with a shared part. 553 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 554 // failed. 555 // Please note this does not perform a garbage collection. 556 MUST_USE_RESULT static MaybeObject* AllocateFunction( 557 Map* function_map, 558 SharedFunctionInfo* shared, 559 Object* prototype, 560 PretenureFlag pretenure = TENURED); 561 562 // Indicies for direct access into argument objects. 563 static const int kArgumentsObjectSize = 564 JSObject::kHeaderSize + 2 * kPointerSize; 565 static const int arguments_callee_index = 0; 566 static const int arguments_length_index = 1; 567 568 // Allocates an arguments object - optionally with an elements array. 569 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 570 // failed. 571 // Please note this does not perform a garbage collection. 572 MUST_USE_RESULT static MaybeObject* AllocateArgumentsObject(Object* callee, 573 int length); 574 575 // Same as NewNumberFromDouble, but may return a preallocated/immutable 576 // number object (e.g., minus_zero_value_, nan_value_) 577 MUST_USE_RESULT static MaybeObject* NumberFromDouble( 578 double value, PretenureFlag pretenure = NOT_TENURED); 579 580 // Allocated a HeapNumber from value. 581 MUST_USE_RESULT static MaybeObject* AllocateHeapNumber( 582 double value, 583 PretenureFlag pretenure); 584 // pretenure = NOT_TENURED. 585 MUST_USE_RESULT static MaybeObject* AllocateHeapNumber(double value); 586 587 // Converts an int into either a Smi or a HeapNumber object. 588 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 589 // failed. 590 // Please note this does not perform a garbage collection. 591 MUST_USE_RESULT static inline MaybeObject* NumberFromInt32(int32_t value); 592 593 // Converts an int into either a Smi or a HeapNumber object. 594 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 595 // failed. 596 // Please note this does not perform a garbage collection. 597 MUST_USE_RESULT static inline MaybeObject* NumberFromUint32(uint32_t value); 598 599 // Allocates a new proxy object. 600 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 601 // failed. 602 // Please note this does not perform a garbage collection. 603 MUST_USE_RESULT static MaybeObject* AllocateProxy( 604 Address proxy, 605 PretenureFlag pretenure = NOT_TENURED); 606 607 // Allocates a new SharedFunctionInfo object. 608 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 609 // failed. 610 // Please note this does not perform a garbage collection. 611 MUST_USE_RESULT static MaybeObject* AllocateSharedFunctionInfo(Object* name); 612 613 // Allocates a new cons string object. 614 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 615 // failed. 616 // Please note this does not perform a garbage collection. 617 MUST_USE_RESULT static MaybeObject* AllocateConsString(String* first, 618 String* second); 619 620 // Allocates a new sub string object which is a substring of an underlying 621 // string buffer stretching from the index start (inclusive) to the index 622 // end (exclusive). 623 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 624 // failed. 625 // Please note this does not perform a garbage collection. 626 MUST_USE_RESULT static MaybeObject* AllocateSubString( 627 String* buffer, 628 int start, 629 int end, 630 PretenureFlag pretenure = NOT_TENURED); 631 632 // Allocate a new external string object, which is backed by a string 633 // resource that resides outside the V8 heap. 634 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 635 // failed. 636 // Please note this does not perform a garbage collection. 637 MUST_USE_RESULT static MaybeObject* AllocateExternalStringFromAscii( 638 ExternalAsciiString::Resource* resource); 639 MUST_USE_RESULT static MaybeObject* AllocateExternalStringFromTwoByte( 640 ExternalTwoByteString::Resource* resource); 641 642 // Finalizes an external string by deleting the associated external 643 // data and clearing the resource pointer. 644 static inline void FinalizeExternalString(String* string); 645 646 // Allocates an uninitialized object. The memory is non-executable if the 647 // hardware and OS allow. 648 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 649 // failed. 650 // Please note this function does not perform a garbage collection. 651 MUST_USE_RESULT static inline MaybeObject* AllocateRaw( 652 int size_in_bytes, 653 AllocationSpace space, 654 AllocationSpace retry_space); 655 656 // Initialize a filler object to keep the ability to iterate over the heap 657 // when shortening objects. 658 static void CreateFillerObjectAt(Address addr, int size); 659 660 // Makes a new native code object 661 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 662 // failed. On success, the pointer to the Code object is stored in the 663 // self_reference. This allows generated code to reference its own Code 664 // object by containing this pointer. 665 // Please note this function does not perform a garbage collection. 666 MUST_USE_RESULT static MaybeObject* CreateCode(const CodeDesc& desc, 667 Code::Flags flags, 668 Handle<Object> self_reference); 669 670 MUST_USE_RESULT static MaybeObject* CopyCode(Code* code); 671 672 // Copy the code and scope info part of the code object, but insert 673 // the provided data as the relocation information. 674 MUST_USE_RESULT static MaybeObject* CopyCode(Code* code, 675 Vector<byte> reloc_info); 676 677 // Finds the symbol for string in the symbol table. 678 // If not found, a new symbol is added to the table and returned. 679 // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation 680 // failed. 681 // Please note this function does not perform a garbage collection. 682 MUST_USE_RESULT static MaybeObject* LookupSymbol(Vector<const char> str); 683 MUST_USE_RESULT static MaybeObject* LookupAsciiSymbol(const char* str) { 684 return LookupSymbol(CStrVector(str)); 685 } 686 MUST_USE_RESULT static MaybeObject* LookupSymbol(String* str); 687 static bool LookupSymbolIfExists(String* str, String** symbol); 688 static bool LookupTwoCharsSymbolIfExists(String* str, String** symbol); 689 690 // Compute the matching symbol map for a string if possible. 691 // NULL is returned if string is in new space or not flattened. 692 static Map* SymbolMapForString(String* str); 693 694 // Tries to flatten a string before compare operation. 695 // 696 // Returns a failure in case it was decided that flattening was 697 // necessary and failed. Note, if flattening is not necessary the 698 // string might stay non-flat even when not a failure is returned. 699 // 700 // Please note this function does not perform a garbage collection. 701 MUST_USE_RESULT static inline MaybeObject* PrepareForCompare(String* str); 702 703 // Converts the given boolean condition to JavaScript boolean value. 704 static Object* ToBoolean(bool condition) { 705 return condition ? true_value() : false_value(); 706 } 707 708 // Code that should be run before and after each GC. Includes some 709 // reporting/verification activities when compiled with DEBUG set. 710 static void GarbageCollectionPrologue(); 711 static void GarbageCollectionEpilogue(); 712 713 // Performs garbage collection operation. 714 // Returns whether there is a chance that another major GC could 715 // collect more garbage. 716 static bool CollectGarbage(AllocationSpace space, GarbageCollector collector); 717 718 // Performs garbage collection operation. 719 // Returns whether there is a chance that another major GC could 720 // collect more garbage. 721 inline static bool CollectGarbage(AllocationSpace space); 722 723 // Performs a full garbage collection. Force compaction if the 724 // parameter is true. 725 static void CollectAllGarbage(bool force_compaction); 726 727 // Last hope GC, should try to squeeze as much as possible. 728 static void CollectAllAvailableGarbage(); 729 730 // Notify the heap that a context has been disposed. 731 static int NotifyContextDisposed() { return ++contexts_disposed_; } 732 733 // Utility to invoke the scavenger. This is needed in test code to 734 // ensure correct callback for weak global handles. 735 static void PerformScavenge(); 736 737#ifdef DEBUG 738 // Utility used with flag gc-greedy. 739 static void GarbageCollectionGreedyCheck(); 740#endif 741 742 static void AddGCPrologueCallback( 743 GCEpilogueCallback callback, GCType gc_type_filter); 744 static void RemoveGCPrologueCallback(GCEpilogueCallback callback); 745 746 static void AddGCEpilogueCallback( 747 GCEpilogueCallback callback, GCType gc_type_filter); 748 static void RemoveGCEpilogueCallback(GCEpilogueCallback callback); 749 750 static void SetGlobalGCPrologueCallback(GCCallback callback) { 751 ASSERT((callback == NULL) ^ (global_gc_prologue_callback_ == NULL)); 752 global_gc_prologue_callback_ = callback; 753 } 754 static void SetGlobalGCEpilogueCallback(GCCallback callback) { 755 ASSERT((callback == NULL) ^ (global_gc_epilogue_callback_ == NULL)); 756 global_gc_epilogue_callback_ = callback; 757 } 758 759 // Heap root getters. We have versions with and without type::cast() here. 760 // You can't use type::cast during GC because the assert fails. 761#define ROOT_ACCESSOR(type, name, camel_name) \ 762 static inline type* name() { \ 763 return type::cast(roots_[k##camel_name##RootIndex]); \ 764 } \ 765 static inline type* raw_unchecked_##name() { \ 766 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \ 767 } 768 ROOT_LIST(ROOT_ACCESSOR) 769#undef ROOT_ACCESSOR 770 771// Utility type maps 772#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \ 773 static inline Map* name##_map() { \ 774 return Map::cast(roots_[k##Name##MapRootIndex]); \ 775 } 776 STRUCT_LIST(STRUCT_MAP_ACCESSOR) 777#undef STRUCT_MAP_ACCESSOR 778 779#define SYMBOL_ACCESSOR(name, str) static inline String* name() { \ 780 return String::cast(roots_[k##name##RootIndex]); \ 781 } 782 SYMBOL_LIST(SYMBOL_ACCESSOR) 783#undef SYMBOL_ACCESSOR 784 785 // The hidden_symbol is special because it is the empty string, but does 786 // not match the empty string. 787 static String* hidden_symbol() { return hidden_symbol_; } 788 789 static void set_global_contexts_list(Object* object) { 790 global_contexts_list_ = object; 791 } 792 static Object* global_contexts_list() { return global_contexts_list_; } 793 794 // Iterates over all roots in the heap. 795 static void IterateRoots(ObjectVisitor* v, VisitMode mode); 796 // Iterates over all strong roots in the heap. 797 static void IterateStrongRoots(ObjectVisitor* v, VisitMode mode); 798 // Iterates over all the other roots in the heap. 799 static void IterateWeakRoots(ObjectVisitor* v, VisitMode mode); 800 801 enum ExpectedPageWatermarkState { 802 WATERMARK_SHOULD_BE_VALID, 803 WATERMARK_CAN_BE_INVALID 804 }; 805 806 // For each dirty region on a page in use from an old space call 807 // visit_dirty_region callback. 808 // If either visit_dirty_region or callback can cause an allocation 809 // in old space and changes in allocation watermark then 810 // can_preallocate_during_iteration should be set to true. 811 // All pages will be marked as having invalid watermark upon 812 // iteration completion. 813 static void IterateDirtyRegions( 814 PagedSpace* space, 815 DirtyRegionCallback visit_dirty_region, 816 ObjectSlotCallback callback, 817 ExpectedPageWatermarkState expected_page_watermark_state); 818 819 // Interpret marks as a bitvector of dirty marks for regions of size 820 // Page::kRegionSize aligned by Page::kRegionAlignmentMask and covering 821 // memory interval from start to top. For each dirty region call a 822 // visit_dirty_region callback. Return updated bitvector of dirty marks. 823 static uint32_t IterateDirtyRegions(uint32_t marks, 824 Address start, 825 Address end, 826 DirtyRegionCallback visit_dirty_region, 827 ObjectSlotCallback callback); 828 829 // Iterate pointers to from semispace of new space found in memory interval 830 // from start to end. 831 // Update dirty marks for page containing start address. 832 static void IterateAndMarkPointersToFromSpace(Address start, 833 Address end, 834 ObjectSlotCallback callback); 835 836 // Iterate pointers to new space found in memory interval from start to end. 837 // Return true if pointers to new space was found. 838 static bool IteratePointersInDirtyRegion(Address start, 839 Address end, 840 ObjectSlotCallback callback); 841 842 843 // Iterate pointers to new space found in memory interval from start to end. 844 // This interval is considered to belong to the map space. 845 // Return true if pointers to new space was found. 846 static bool IteratePointersInDirtyMapsRegion(Address start, 847 Address end, 848 ObjectSlotCallback callback); 849 850 851 // Returns whether the object resides in new space. 852 static inline bool InNewSpace(Object* object); 853 static inline bool InFromSpace(Object* object); 854 static inline bool InToSpace(Object* object); 855 856 // Checks whether an address/object in the heap (including auxiliary 857 // area and unused area). 858 static bool Contains(Address addr); 859 static bool Contains(HeapObject* value); 860 861 // Checks whether an address/object in a space. 862 // Currently used by tests, serialization and heap verification only. 863 static bool InSpace(Address addr, AllocationSpace space); 864 static bool InSpace(HeapObject* value, AllocationSpace space); 865 866 // Finds out which space an object should get promoted to based on its type. 867 static inline OldSpace* TargetSpace(HeapObject* object); 868 static inline AllocationSpace TargetSpaceId(InstanceType type); 869 870 // Sets the stub_cache_ (only used when expanding the dictionary). 871 static void public_set_code_stubs(NumberDictionary* value) { 872 roots_[kCodeStubsRootIndex] = value; 873 } 874 875 // Support for computing object sizes for old objects during GCs. Returns 876 // a function that is guaranteed to be safe for computing object sizes in 877 // the current GC phase. 878 static HeapObjectCallback GcSafeSizeOfOldObjectFunction() { 879 return gc_safe_size_of_old_object_; 880 } 881 882 // Sets the non_monomorphic_cache_ (only used when expanding the dictionary). 883 static void public_set_non_monomorphic_cache(NumberDictionary* value) { 884 roots_[kNonMonomorphicCacheRootIndex] = value; 885 } 886 887 static void public_set_empty_script(Script* script) { 888 roots_[kEmptyScriptRootIndex] = script; 889 } 890 891 // Update the next script id. 892 static inline void SetLastScriptId(Object* last_script_id); 893 894 // Generated code can embed this address to get access to the roots. 895 static Object** roots_address() { return roots_; } 896 897 // Get address of global contexts list for serialization support. 898 static Object** global_contexts_list_address() { 899 return &global_contexts_list_; 900 } 901 902#ifdef DEBUG 903 static void Print(); 904 static void PrintHandles(); 905 906 // Verify the heap is in its normal state before or after a GC. 907 static void Verify(); 908 909 // Report heap statistics. 910 static void ReportHeapStatistics(const char* title); 911 static void ReportCodeStatistics(const char* title); 912 913 // Fill in bogus values in from space 914 static void ZapFromSpace(); 915#endif 916 917#if defined(ENABLE_LOGGING_AND_PROFILING) 918 // Print short heap statistics. 919 static void PrintShortHeapStatistics(); 920#endif 921 922 // Makes a new symbol object 923 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 924 // failed. 925 // Please note this function does not perform a garbage collection. 926 MUST_USE_RESULT static MaybeObject* CreateSymbol(const char* str, 927 int length, 928 int hash); 929 MUST_USE_RESULT static MaybeObject* CreateSymbol(String* str); 930 931 // Write barrier support for address[offset] = o. 932 static inline void RecordWrite(Address address, int offset); 933 934 // Write barrier support for address[start : start + len[ = o. 935 static inline void RecordWrites(Address address, int start, int len); 936 937 // Given an address occupied by a live code object, return that object. 938 static Object* FindCodeObject(Address a); 939 940 // Invoke Shrink on shrinkable spaces. 941 static void Shrink(); 942 943 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT }; 944 static inline HeapState gc_state() { return gc_state_; } 945 946#ifdef DEBUG 947 static bool IsAllocationAllowed() { return allocation_allowed_; } 948 static inline bool allow_allocation(bool enable); 949 950 static bool disallow_allocation_failure() { 951 return disallow_allocation_failure_; 952 } 953 954 static void TracePathToObject(Object* target); 955 static void TracePathToGlobal(); 956#endif 957 958 // Callback function passed to Heap::Iterate etc. Copies an object if 959 // necessary, the object might be promoted to an old space. The caller must 960 // ensure the precondition that the object is (a) a heap object and (b) in 961 // the heap's from space. 962 static void ScavengePointer(HeapObject** p); 963 static inline void ScavengeObject(HeapObject** p, HeapObject* object); 964 965 // Commits from space if it is uncommitted. 966 static void EnsureFromSpaceIsCommitted(); 967 968 // Support for partial snapshots. After calling this we can allocate a 969 // certain number of bytes using only linear allocation (with a 970 // LinearAllocationScope and an AlwaysAllocateScope) without using freelists 971 // or causing a GC. It returns true of space was reserved or false if a GC is 972 // needed. For paged spaces the space requested must include the space wasted 973 // at the end of each page when allocating linearly. 974 static void ReserveSpace( 975 int new_space_size, 976 int pointer_space_size, 977 int data_space_size, 978 int code_space_size, 979 int map_space_size, 980 int cell_space_size, 981 int large_object_size); 982 983 // 984 // Support for the API. 985 // 986 987 static bool CreateApiObjects(); 988 989 // Attempt to find the number in a small cache. If we finds it, return 990 // the string representation of the number. Otherwise return undefined. 991 static Object* GetNumberStringCache(Object* number); 992 993 // Update the cache with a new number-string pair. 994 static void SetNumberStringCache(Object* number, String* str); 995 996 // Adjusts the amount of registered external memory. 997 // Returns the adjusted value. 998 static inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes); 999 1000 // Allocate uninitialized fixed array. 1001 MUST_USE_RESULT static MaybeObject* AllocateRawFixedArray(int length); 1002 MUST_USE_RESULT static MaybeObject* AllocateRawFixedArray( 1003 int length, 1004 PretenureFlag pretenure); 1005 1006 // True if we have reached the allocation limit in the old generation that 1007 // should force the next GC (caused normally) to be a full one. 1008 static bool OldGenerationPromotionLimitReached() { 1009 return (PromotedSpaceSize() + PromotedExternalMemorySize()) 1010 > old_gen_promotion_limit_; 1011 } 1012 1013 static intptr_t OldGenerationSpaceAvailable() { 1014 return old_gen_allocation_limit_ - 1015 (PromotedSpaceSize() + PromotedExternalMemorySize()); 1016 } 1017 1018 // True if we have reached the allocation limit in the old generation that 1019 // should artificially cause a GC right now. 1020 static bool OldGenerationAllocationLimitReached() { 1021 return OldGenerationSpaceAvailable() < 0; 1022 } 1023 1024 // Can be called when the embedding application is idle. 1025 static bool IdleNotification(); 1026 1027 // Declare all the root indices. 1028 enum RootListIndex { 1029#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex, 1030 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION) 1031#undef ROOT_INDEX_DECLARATION 1032 1033// Utility type maps 1034#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex, 1035 STRUCT_LIST(DECLARE_STRUCT_MAP) 1036#undef DECLARE_STRUCT_MAP 1037 1038#define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex, 1039 SYMBOL_LIST(SYMBOL_INDEX_DECLARATION) 1040#undef SYMBOL_DECLARATION 1041 1042 kSymbolTableRootIndex, 1043 kStrongRootListLength = kSymbolTableRootIndex, 1044 kRootListLength 1045 }; 1046 1047 MUST_USE_RESULT static MaybeObject* NumberToString( 1048 Object* number, 1049 bool check_number_string_cache = true); 1050 1051 static Map* MapForExternalArrayType(ExternalArrayType array_type); 1052 static RootListIndex RootIndexForExternalArrayType( 1053 ExternalArrayType array_type); 1054 1055 static void RecordStats(HeapStats* stats, bool take_snapshot = false); 1056 1057 // Copy block of memory from src to dst. Size of block should be aligned 1058 // by pointer size. 1059 static inline void CopyBlock(Address dst, Address src, int byte_size); 1060 1061 static inline void CopyBlockToOldSpaceAndUpdateRegionMarks(Address dst, 1062 Address src, 1063 int byte_size); 1064 1065 // Optimized version of memmove for blocks with pointer size aligned sizes and 1066 // pointer size aligned addresses. 1067 static inline void MoveBlock(Address dst, Address src, int byte_size); 1068 1069 static inline void MoveBlockToOldSpaceAndUpdateRegionMarks(Address dst, 1070 Address src, 1071 int byte_size); 1072 1073 // Check new space expansion criteria and expand semispaces if it was hit. 1074 static void CheckNewSpaceExpansionCriteria(); 1075 1076 static inline void IncrementYoungSurvivorsCounter(int survived) { 1077 young_survivors_after_last_gc_ = survived; 1078 survived_since_last_expansion_ += survived; 1079 } 1080 1081 static void UpdateNewSpaceReferencesInExternalStringTable( 1082 ExternalStringTableUpdaterCallback updater_func); 1083 1084 static void ProcessWeakReferences(WeakObjectRetainer* retainer); 1085 1086 // Helper function that governs the promotion policy from new space to 1087 // old. If the object's old address lies below the new space's age 1088 // mark or if we've already filled the bottom 1/16th of the to space, 1089 // we try to promote this object. 1090 static inline bool ShouldBePromoted(Address old_address, int object_size); 1091 1092 static int MaxObjectSizeInNewSpace() { return kMaxObjectSizeInNewSpace; } 1093 1094 static void ClearJSFunctionResultCaches(); 1095 1096 static void ClearNormalizedMapCaches(); 1097 1098 static GCTracer* tracer() { return tracer_; } 1099 1100 private: 1101 static int reserved_semispace_size_; 1102 static int max_semispace_size_; 1103 static int initial_semispace_size_; 1104 static intptr_t max_old_generation_size_; 1105 static intptr_t max_executable_size_; 1106 static intptr_t code_range_size_; 1107 1108 // For keeping track of how much data has survived 1109 // scavenge since last new space expansion. 1110 static int survived_since_last_expansion_; 1111 1112 static int always_allocate_scope_depth_; 1113 static int linear_allocation_scope_depth_; 1114 1115 // For keeping track of context disposals. 1116 static int contexts_disposed_; 1117 1118#if defined(V8_TARGET_ARCH_X64) 1119 static const int kMaxObjectSizeInNewSpace = 512*KB; 1120#else 1121 static const int kMaxObjectSizeInNewSpace = 256*KB; 1122#endif 1123 1124 static NewSpace new_space_; 1125 static OldSpace* old_pointer_space_; 1126 static OldSpace* old_data_space_; 1127 static OldSpace* code_space_; 1128 static MapSpace* map_space_; 1129 static CellSpace* cell_space_; 1130 static LargeObjectSpace* lo_space_; 1131 static HeapState gc_state_; 1132 1133 // Returns the size of object residing in non new spaces. 1134 static intptr_t PromotedSpaceSize(); 1135 1136 // Returns the amount of external memory registered since last global gc. 1137 static int PromotedExternalMemorySize(); 1138 1139 static int mc_count_; // how many mark-compact collections happened 1140 static int ms_count_; // how many mark-sweep collections happened 1141 static int gc_count_; // how many gc happened 1142 1143 // Total length of the strings we failed to flatten since the last GC. 1144 static int unflattened_strings_length_; 1145 1146#define ROOT_ACCESSOR(type, name, camel_name) \ 1147 static inline void set_##name(type* value) { \ 1148 roots_[k##camel_name##RootIndex] = value; \ 1149 } 1150 ROOT_LIST(ROOT_ACCESSOR) 1151#undef ROOT_ACCESSOR 1152 1153#ifdef DEBUG 1154 static bool allocation_allowed_; 1155 1156 // If the --gc-interval flag is set to a positive value, this 1157 // variable holds the value indicating the number of allocations 1158 // remain until the next failure and garbage collection. 1159 static int allocation_timeout_; 1160 1161 // Do we expect to be able to handle allocation failure at this 1162 // time? 1163 static bool disallow_allocation_failure_; 1164#endif // DEBUG 1165 1166 // Limit that triggers a global GC on the next (normally caused) GC. This 1167 // is checked when we have already decided to do a GC to help determine 1168 // which collector to invoke. 1169 static intptr_t old_gen_promotion_limit_; 1170 1171 // Limit that triggers a global GC as soon as is reasonable. This is 1172 // checked before expanding a paged space in the old generation and on 1173 // every allocation in large object space. 1174 static intptr_t old_gen_allocation_limit_; 1175 1176 // Limit on the amount of externally allocated memory allowed 1177 // between global GCs. If reached a global GC is forced. 1178 static intptr_t external_allocation_limit_; 1179 1180 // The amount of external memory registered through the API kept alive 1181 // by global handles 1182 static int amount_of_external_allocated_memory_; 1183 1184 // Caches the amount of external memory registered at the last global gc. 1185 static int amount_of_external_allocated_memory_at_last_global_gc_; 1186 1187 // Indicates that an allocation has failed in the old generation since the 1188 // last GC. 1189 static int old_gen_exhausted_; 1190 1191 static Object* roots_[kRootListLength]; 1192 1193 static Object* global_contexts_list_; 1194 1195 struct StringTypeTable { 1196 InstanceType type; 1197 int size; 1198 RootListIndex index; 1199 }; 1200 1201 struct ConstantSymbolTable { 1202 const char* contents; 1203 RootListIndex index; 1204 }; 1205 1206 struct StructTable { 1207 InstanceType type; 1208 int size; 1209 RootListIndex index; 1210 }; 1211 1212 static const StringTypeTable string_type_table[]; 1213 static const ConstantSymbolTable constant_symbol_table[]; 1214 static const StructTable struct_table[]; 1215 1216 // The special hidden symbol which is an empty string, but does not match 1217 // any string when looked up in properties. 1218 static String* hidden_symbol_; 1219 1220 // GC callback function, called before and after mark-compact GC. 1221 // Allocations in the callback function are disallowed. 1222 struct GCPrologueCallbackPair { 1223 GCPrologueCallbackPair(GCPrologueCallback callback, GCType gc_type) 1224 : callback(callback), gc_type(gc_type) { 1225 } 1226 bool operator==(const GCPrologueCallbackPair& pair) const { 1227 return pair.callback == callback; 1228 } 1229 GCPrologueCallback callback; 1230 GCType gc_type; 1231 }; 1232 static List<GCPrologueCallbackPair> gc_prologue_callbacks_; 1233 1234 struct GCEpilogueCallbackPair { 1235 GCEpilogueCallbackPair(GCEpilogueCallback callback, GCType gc_type) 1236 : callback(callback), gc_type(gc_type) { 1237 } 1238 bool operator==(const GCEpilogueCallbackPair& pair) const { 1239 return pair.callback == callback; 1240 } 1241 GCEpilogueCallback callback; 1242 GCType gc_type; 1243 }; 1244 static List<GCEpilogueCallbackPair> gc_epilogue_callbacks_; 1245 1246 static GCCallback global_gc_prologue_callback_; 1247 static GCCallback global_gc_epilogue_callback_; 1248 1249 // Support for computing object sizes during GC. 1250 static HeapObjectCallback gc_safe_size_of_old_object_; 1251 static int GcSafeSizeOfOldObject(HeapObject* object); 1252 static int GcSafeSizeOfOldObjectWithEncodedMap(HeapObject* object); 1253 1254 // Update the GC state. Called from the mark-compact collector. 1255 static void MarkMapPointersAsEncoded(bool encoded) { 1256 gc_safe_size_of_old_object_ = encoded 1257 ? &GcSafeSizeOfOldObjectWithEncodedMap 1258 : &GcSafeSizeOfOldObject; 1259 } 1260 1261 // Checks whether a global GC is necessary 1262 static GarbageCollector SelectGarbageCollector(AllocationSpace space); 1263 1264 // Performs garbage collection 1265 // Returns whether there is a chance another major GC could 1266 // collect more garbage. 1267 static bool PerformGarbageCollection(GarbageCollector collector, 1268 GCTracer* tracer); 1269 1270 // Allocate an uninitialized object in map space. The behavior is identical 1271 // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't 1272 // have to test the allocation space argument and (b) can reduce code size 1273 // (since both AllocateRaw and AllocateRawMap are inlined). 1274 MUST_USE_RESULT static inline MaybeObject* AllocateRawMap(); 1275 1276 // Allocate an uninitialized object in the global property cell space. 1277 MUST_USE_RESULT static inline MaybeObject* AllocateRawCell(); 1278 1279 // Initializes a JSObject based on its map. 1280 static void InitializeJSObjectFromMap(JSObject* obj, 1281 FixedArray* properties, 1282 Map* map); 1283 1284 static bool CreateInitialMaps(); 1285 static bool CreateInitialObjects(); 1286 1287 // These four Create*EntryStub functions are here and forced to not be inlined 1288 // because of a gcc-4.4 bug that assigns wrong vtable entries. 1289 NO_INLINE(static void CreateCEntryStub()); 1290 NO_INLINE(static void CreateJSEntryStub()); 1291 NO_INLINE(static void CreateJSConstructEntryStub()); 1292 NO_INLINE(static void CreateRegExpCEntryStub()); 1293 1294 static void CreateFixedStubs(); 1295 1296 MUST_USE_RESULT static MaybeObject* CreateOddball(const char* to_string, 1297 Object* to_number); 1298 1299 // Allocate empty fixed array. 1300 MUST_USE_RESULT static MaybeObject* AllocateEmptyFixedArray(); 1301 1302 // Performs a minor collection in new generation. 1303 static void Scavenge(); 1304 1305 static String* UpdateNewSpaceReferenceInExternalStringTableEntry( 1306 Object** pointer); 1307 1308 static Address DoScavenge(ObjectVisitor* scavenge_visitor, 1309 Address new_space_front); 1310 1311 // Performs a major collection in the whole heap. 1312 static void MarkCompact(GCTracer* tracer); 1313 1314 // Code to be run before and after mark-compact. 1315 static void MarkCompactPrologue(bool is_compacting); 1316 1317 // Completely clear the Instanceof cache (to stop it keeping objects alive 1318 // around a GC). 1319 static void CompletelyClearInstanceofCache() { 1320 set_instanceof_cache_map(the_hole_value()); 1321 set_instanceof_cache_function(the_hole_value()); 1322 } 1323 1324#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 1325 // Record statistics before and after garbage collection. 1326 static void ReportStatisticsBeforeGC(); 1327 static void ReportStatisticsAfterGC(); 1328#endif 1329 1330 // Slow part of scavenge object. 1331 static void ScavengeObjectSlow(HeapObject** p, HeapObject* object); 1332 1333 // Initializes a function with a shared part and prototype. 1334 // Returns the function. 1335 // Note: this code was factored out of AllocateFunction such that 1336 // other parts of the VM could use it. Specifically, a function that creates 1337 // instances of type JS_FUNCTION_TYPE benefit from the use of this function. 1338 // Please note this does not perform a garbage collection. 1339 MUST_USE_RESULT static inline MaybeObject* InitializeFunction( 1340 JSFunction* function, 1341 SharedFunctionInfo* shared, 1342 Object* prototype); 1343 1344 static GCTracer* tracer_; 1345 1346 1347 // Initializes the number to string cache based on the max semispace size. 1348 MUST_USE_RESULT static MaybeObject* InitializeNumberStringCache(); 1349 // Flush the number to string cache. 1350 static void FlushNumberStringCache(); 1351 1352 static void UpdateSurvivalRateTrend(int start_new_space_size); 1353 1354 enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING }; 1355 1356 static const int kYoungSurvivalRateThreshold = 90; 1357 static const int kYoungSurvivalRateAllowedDeviation = 15; 1358 1359 static int young_survivors_after_last_gc_; 1360 static int high_survival_rate_period_length_; 1361 static double survival_rate_; 1362 static SurvivalRateTrend previous_survival_rate_trend_; 1363 static SurvivalRateTrend survival_rate_trend_; 1364 1365 static void set_survival_rate_trend(SurvivalRateTrend survival_rate_trend) { 1366 ASSERT(survival_rate_trend != FLUCTUATING); 1367 previous_survival_rate_trend_ = survival_rate_trend_; 1368 survival_rate_trend_ = survival_rate_trend; 1369 } 1370 1371 static SurvivalRateTrend survival_rate_trend() { 1372 if (survival_rate_trend_ == STABLE) { 1373 return STABLE; 1374 } else if (previous_survival_rate_trend_ == STABLE) { 1375 return survival_rate_trend_; 1376 } else if (survival_rate_trend_ != previous_survival_rate_trend_) { 1377 return FLUCTUATING; 1378 } else { 1379 return survival_rate_trend_; 1380 } 1381 } 1382 1383 static bool IsStableOrIncreasingSurvivalTrend() { 1384 switch (survival_rate_trend()) { 1385 case STABLE: 1386 case INCREASING: 1387 return true; 1388 default: 1389 return false; 1390 } 1391 } 1392 1393 static bool IsIncreasingSurvivalTrend() { 1394 return survival_rate_trend() == INCREASING; 1395 } 1396 1397 static bool IsHighSurvivalRate() { 1398 return high_survival_rate_period_length_ > 0; 1399 } 1400 1401 static const int kInitialSymbolTableSize = 2048; 1402 static const int kInitialEvalCacheSize = 64; 1403 1404 friend class Factory; 1405 friend class DisallowAllocationFailure; 1406 friend class AlwaysAllocateScope; 1407 friend class LinearAllocationScope; 1408 friend class MarkCompactCollector; 1409}; 1410 1411 1412class HeapStats { 1413 public: 1414 static const int kStartMarker = 0xDECADE00; 1415 static const int kEndMarker = 0xDECADE01; 1416 1417 int* start_marker; // 0 1418 int* new_space_size; // 1 1419 int* new_space_capacity; // 2 1420 intptr_t* old_pointer_space_size; // 3 1421 intptr_t* old_pointer_space_capacity; // 4 1422 intptr_t* old_data_space_size; // 5 1423 intptr_t* old_data_space_capacity; // 6 1424 intptr_t* code_space_size; // 7 1425 intptr_t* code_space_capacity; // 8 1426 intptr_t* map_space_size; // 9 1427 intptr_t* map_space_capacity; // 10 1428 intptr_t* cell_space_size; // 11 1429 intptr_t* cell_space_capacity; // 12 1430 intptr_t* lo_space_size; // 13 1431 int* global_handle_count; // 14 1432 int* weak_global_handle_count; // 15 1433 int* pending_global_handle_count; // 16 1434 int* near_death_global_handle_count; // 17 1435 int* destroyed_global_handle_count; // 18 1436 intptr_t* memory_allocator_size; // 19 1437 intptr_t* memory_allocator_capacity; // 20 1438 int* objects_per_type; // 21 1439 int* size_per_type; // 22 1440 int* os_error; // 23 1441 int* end_marker; // 24 1442}; 1443 1444 1445class AlwaysAllocateScope { 1446 public: 1447 AlwaysAllocateScope() { 1448 // We shouldn't hit any nested scopes, because that requires 1449 // non-handle code to call handle code. The code still works but 1450 // performance will degrade, so we want to catch this situation 1451 // in debug mode. 1452 ASSERT(Heap::always_allocate_scope_depth_ == 0); 1453 Heap::always_allocate_scope_depth_++; 1454 } 1455 1456 ~AlwaysAllocateScope() { 1457 Heap::always_allocate_scope_depth_--; 1458 ASSERT(Heap::always_allocate_scope_depth_ == 0); 1459 } 1460}; 1461 1462 1463class LinearAllocationScope { 1464 public: 1465 LinearAllocationScope() { 1466 Heap::linear_allocation_scope_depth_++; 1467 } 1468 1469 ~LinearAllocationScope() { 1470 Heap::linear_allocation_scope_depth_--; 1471 ASSERT(Heap::linear_allocation_scope_depth_ >= 0); 1472 } 1473}; 1474 1475 1476#ifdef DEBUG 1477// Visitor class to verify interior pointers in spaces that do not contain 1478// or care about intergenerational references. All heap object pointers have to 1479// point into the heap to a location that has a map pointer at its first word. 1480// Caveat: Heap::Contains is an approximation because it can return true for 1481// objects in a heap space but above the allocation pointer. 1482class VerifyPointersVisitor: public ObjectVisitor { 1483 public: 1484 void VisitPointers(Object** start, Object** end) { 1485 for (Object** current = start; current < end; current++) { 1486 if ((*current)->IsHeapObject()) { 1487 HeapObject* object = HeapObject::cast(*current); 1488 ASSERT(Heap::Contains(object)); 1489 ASSERT(object->map()->IsMap()); 1490 } 1491 } 1492 } 1493}; 1494 1495 1496// Visitor class to verify interior pointers in spaces that use region marks 1497// to keep track of intergenerational references. 1498// As VerifyPointersVisitor but also checks that dirty marks are set 1499// for regions covering intergenerational references. 1500class VerifyPointersAndDirtyRegionsVisitor: public ObjectVisitor { 1501 public: 1502 void VisitPointers(Object** start, Object** end) { 1503 for (Object** current = start; current < end; current++) { 1504 if ((*current)->IsHeapObject()) { 1505 HeapObject* object = HeapObject::cast(*current); 1506 ASSERT(Heap::Contains(object)); 1507 ASSERT(object->map()->IsMap()); 1508 if (Heap::InNewSpace(object)) { 1509 ASSERT(Heap::InToSpace(object)); 1510 Address addr = reinterpret_cast<Address>(current); 1511 ASSERT(Page::FromAddress(addr)->IsRegionDirty(addr)); 1512 } 1513 } 1514 } 1515 } 1516}; 1517#endif 1518 1519 1520// Space iterator for iterating over all spaces of the heap. 1521// Returns each space in turn, and null when it is done. 1522class AllSpaces BASE_EMBEDDED { 1523 public: 1524 Space* next(); 1525 AllSpaces() { counter_ = FIRST_SPACE; } 1526 private: 1527 int counter_; 1528}; 1529 1530 1531// Space iterator for iterating over all old spaces of the heap: Old pointer 1532// space, old data space and code space. 1533// Returns each space in turn, and null when it is done. 1534class OldSpaces BASE_EMBEDDED { 1535 public: 1536 OldSpace* next(); 1537 OldSpaces() { counter_ = OLD_POINTER_SPACE; } 1538 private: 1539 int counter_; 1540}; 1541 1542 1543// Space iterator for iterating over all the paged spaces of the heap: 1544// Map space, old pointer space, old data space, code space and cell space. 1545// Returns each space in turn, and null when it is done. 1546class PagedSpaces BASE_EMBEDDED { 1547 public: 1548 PagedSpace* next(); 1549 PagedSpaces() { counter_ = OLD_POINTER_SPACE; } 1550 private: 1551 int counter_; 1552}; 1553 1554 1555// Space iterator for iterating over all spaces of the heap. 1556// For each space an object iterator is provided. The deallocation of the 1557// returned object iterators is handled by the space iterator. 1558class SpaceIterator : public Malloced { 1559 public: 1560 SpaceIterator(); 1561 explicit SpaceIterator(HeapObjectCallback size_func); 1562 virtual ~SpaceIterator(); 1563 1564 bool has_next(); 1565 ObjectIterator* next(); 1566 1567 private: 1568 ObjectIterator* CreateIterator(); 1569 1570 int current_space_; // from enum AllocationSpace. 1571 ObjectIterator* iterator_; // object iterator for the current space. 1572 HeapObjectCallback size_func_; 1573}; 1574 1575 1576// A HeapIterator provides iteration over the whole heap. It 1577// aggregates the specific iterators for the different spaces as 1578// these can only iterate over one space only. 1579// 1580// HeapIterator can skip free list nodes (that is, de-allocated heap 1581// objects that still remain in the heap). As implementation of free 1582// nodes filtering uses GC marks, it can't be used during MS/MC GC 1583// phases. Also, it is forbidden to interrupt iteration in this mode, 1584// as this will leave heap objects marked (and thus, unusable). 1585class FreeListNodesFilter; 1586 1587class HeapIterator BASE_EMBEDDED { 1588 public: 1589 enum FreeListNodesFiltering { 1590 kNoFiltering, 1591 kPreciseFiltering 1592 }; 1593 1594 HeapIterator(); 1595 explicit HeapIterator(FreeListNodesFiltering filtering); 1596 ~HeapIterator(); 1597 1598 HeapObject* next(); 1599 void reset(); 1600 1601 private: 1602 // Perform the initialization. 1603 void Init(); 1604 // Perform all necessary shutdown (destruction) work. 1605 void Shutdown(); 1606 HeapObject* NextObject(); 1607 1608 FreeListNodesFiltering filtering_; 1609 FreeListNodesFilter* filter_; 1610 // Space iterator for iterating all the spaces. 1611 SpaceIterator* space_iterator_; 1612 // Object iterator for the space currently being iterated. 1613 ObjectIterator* object_iterator_; 1614}; 1615 1616 1617// Cache for mapping (map, property name) into field offset. 1618// Cleared at startup and prior to mark sweep collection. 1619class KeyedLookupCache { 1620 public: 1621 // Lookup field offset for (map, name). If absent, -1 is returned. 1622 static int Lookup(Map* map, String* name); 1623 1624 // Update an element in the cache. 1625 static void Update(Map* map, String* name, int field_offset); 1626 1627 // Clear the cache. 1628 static void Clear(); 1629 1630 static const int kLength = 64; 1631 static const int kCapacityMask = kLength - 1; 1632 static const int kMapHashShift = 2; 1633 1634 private: 1635 static inline int Hash(Map* map, String* name); 1636 1637 // Get the address of the keys and field_offsets arrays. Used in 1638 // generated code to perform cache lookups. 1639 static Address keys_address() { 1640 return reinterpret_cast<Address>(&keys_); 1641 } 1642 1643 static Address field_offsets_address() { 1644 return reinterpret_cast<Address>(&field_offsets_); 1645 } 1646 1647 struct Key { 1648 Map* map; 1649 String* name; 1650 }; 1651 static Key keys_[kLength]; 1652 static int field_offsets_[kLength]; 1653 1654 friend class ExternalReference; 1655}; 1656 1657 1658// Cache for mapping (array, property name) into descriptor index. 1659// The cache contains both positive and negative results. 1660// Descriptor index equals kNotFound means the property is absent. 1661// Cleared at startup and prior to any gc. 1662class DescriptorLookupCache { 1663 public: 1664 // Lookup descriptor index for (map, name). 1665 // If absent, kAbsent is returned. 1666 static int Lookup(DescriptorArray* array, String* name) { 1667 if (!StringShape(name).IsSymbol()) return kAbsent; 1668 int index = Hash(array, name); 1669 Key& key = keys_[index]; 1670 if ((key.array == array) && (key.name == name)) return results_[index]; 1671 return kAbsent; 1672 } 1673 1674 // Update an element in the cache. 1675 static void Update(DescriptorArray* array, String* name, int result) { 1676 ASSERT(result != kAbsent); 1677 if (StringShape(name).IsSymbol()) { 1678 int index = Hash(array, name); 1679 Key& key = keys_[index]; 1680 key.array = array; 1681 key.name = name; 1682 results_[index] = result; 1683 } 1684 } 1685 1686 // Clear the cache. 1687 static void Clear(); 1688 1689 static const int kAbsent = -2; 1690 private: 1691 static int Hash(DescriptorArray* array, String* name) { 1692 // Uses only lower 32 bits if pointers are larger. 1693 uint32_t array_hash = 1694 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(array)) >> 2; 1695 uint32_t name_hash = 1696 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >> 2; 1697 return (array_hash ^ name_hash) % kLength; 1698 } 1699 1700 static const int kLength = 64; 1701 struct Key { 1702 DescriptorArray* array; 1703 String* name; 1704 }; 1705 1706 static Key keys_[kLength]; 1707 static int results_[kLength]; 1708}; 1709 1710 1711// ---------------------------------------------------------------------------- 1712// Marking stack for tracing live objects. 1713 1714class MarkingStack { 1715 public: 1716 void Initialize(Address low, Address high) { 1717 top_ = low_ = reinterpret_cast<HeapObject**>(low); 1718 high_ = reinterpret_cast<HeapObject**>(high); 1719 overflowed_ = false; 1720 } 1721 1722 bool is_full() { return top_ >= high_; } 1723 1724 bool is_empty() { return top_ <= low_; } 1725 1726 bool overflowed() { return overflowed_; } 1727 1728 void clear_overflowed() { overflowed_ = false; } 1729 1730 // Push the (marked) object on the marking stack if there is room, 1731 // otherwise mark the object as overflowed and wait for a rescan of the 1732 // heap. 1733 void Push(HeapObject* object) { 1734 CHECK(object->IsHeapObject()); 1735 if (is_full()) { 1736 object->SetOverflow(); 1737 overflowed_ = true; 1738 } else { 1739 *(top_++) = object; 1740 } 1741 } 1742 1743 HeapObject* Pop() { 1744 ASSERT(!is_empty()); 1745 HeapObject* object = *(--top_); 1746 CHECK(object->IsHeapObject()); 1747 return object; 1748 } 1749 1750 private: 1751 HeapObject** low_; 1752 HeapObject** top_; 1753 HeapObject** high_; 1754 bool overflowed_; 1755}; 1756 1757 1758// A helper class to document/test C++ scopes where we do not 1759// expect a GC. Usage: 1760// 1761// /* Allocation not allowed: we cannot handle a GC in this scope. */ 1762// { AssertNoAllocation nogc; 1763// ... 1764// } 1765 1766#ifdef DEBUG 1767 1768class DisallowAllocationFailure { 1769 public: 1770 DisallowAllocationFailure() { 1771 old_state_ = Heap::disallow_allocation_failure_; 1772 Heap::disallow_allocation_failure_ = true; 1773 } 1774 ~DisallowAllocationFailure() { 1775 Heap::disallow_allocation_failure_ = old_state_; 1776 } 1777 private: 1778 bool old_state_; 1779}; 1780 1781class AssertNoAllocation { 1782 public: 1783 AssertNoAllocation() { 1784 old_state_ = Heap::allow_allocation(false); 1785 } 1786 1787 ~AssertNoAllocation() { 1788 Heap::allow_allocation(old_state_); 1789 } 1790 1791 private: 1792 bool old_state_; 1793}; 1794 1795class DisableAssertNoAllocation { 1796 public: 1797 DisableAssertNoAllocation() { 1798 old_state_ = Heap::allow_allocation(true); 1799 } 1800 1801 ~DisableAssertNoAllocation() { 1802 Heap::allow_allocation(old_state_); 1803 } 1804 1805 private: 1806 bool old_state_; 1807}; 1808 1809#else // ndef DEBUG 1810 1811class AssertNoAllocation { 1812 public: 1813 AssertNoAllocation() { } 1814 ~AssertNoAllocation() { } 1815}; 1816 1817class DisableAssertNoAllocation { 1818 public: 1819 DisableAssertNoAllocation() { } 1820 ~DisableAssertNoAllocation() { } 1821}; 1822 1823#endif 1824 1825// GCTracer collects and prints ONE line after each garbage collector 1826// invocation IFF --trace_gc is used. 1827 1828class GCTracer BASE_EMBEDDED { 1829 public: 1830 class Scope BASE_EMBEDDED { 1831 public: 1832 enum ScopeId { 1833 EXTERNAL, 1834 MC_MARK, 1835 MC_SWEEP, 1836 MC_SWEEP_NEWSPACE, 1837 MC_COMPACT, 1838 MC_FLUSH_CODE, 1839 kNumberOfScopes 1840 }; 1841 1842 Scope(GCTracer* tracer, ScopeId scope) 1843 : tracer_(tracer), 1844 scope_(scope) { 1845 start_time_ = OS::TimeCurrentMillis(); 1846 } 1847 1848 ~Scope() { 1849 ASSERT((0 <= scope_) && (scope_ < kNumberOfScopes)); 1850 tracer_->scopes_[scope_] += OS::TimeCurrentMillis() - start_time_; 1851 } 1852 1853 private: 1854 GCTracer* tracer_; 1855 ScopeId scope_; 1856 double start_time_; 1857 }; 1858 1859 GCTracer(); 1860 ~GCTracer(); 1861 1862 // Sets the collector. 1863 void set_collector(GarbageCollector collector) { collector_ = collector; } 1864 1865 // Sets the GC count. 1866 void set_gc_count(int count) { gc_count_ = count; } 1867 1868 // Sets the full GC count. 1869 void set_full_gc_count(int count) { full_gc_count_ = count; } 1870 1871 // Sets the flag that this is a compacting full GC. 1872 void set_is_compacting() { is_compacting_ = true; } 1873 bool is_compacting() const { return is_compacting_; } 1874 1875 // Increment and decrement the count of marked objects. 1876 void increment_marked_count() { ++marked_count_; } 1877 void decrement_marked_count() { --marked_count_; } 1878 1879 int marked_count() { return marked_count_; } 1880 1881 void increment_promoted_objects_size(int object_size) { 1882 promoted_objects_size_ += object_size; 1883 } 1884 1885 // Returns maximum GC pause. 1886 static int get_max_gc_pause() { return max_gc_pause_; } 1887 1888 // Returns maximum size of objects alive after GC. 1889 static intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; } 1890 1891 // Returns minimal interval between two subsequent collections. 1892 static int get_min_in_mutator() { return min_in_mutator_; } 1893 1894 private: 1895 // Returns a string matching the collector. 1896 const char* CollectorString(); 1897 1898 // Returns size of object in heap (in MB). 1899 double SizeOfHeapObjects() { 1900 return (static_cast<double>(Heap::SizeOfObjects())) / MB; 1901 } 1902 1903 double start_time_; // Timestamp set in the constructor. 1904 intptr_t start_size_; // Size of objects in heap set in constructor. 1905 GarbageCollector collector_; // Type of collector. 1906 1907 // A count (including this one, eg, the first collection is 1) of the 1908 // number of garbage collections. 1909 int gc_count_; 1910 1911 // A count (including this one) of the number of full garbage collections. 1912 int full_gc_count_; 1913 1914 // True if the current GC is a compacting full collection, false 1915 // otherwise. 1916 bool is_compacting_; 1917 1918 // True if the *previous* full GC cwas a compacting collection (will be 1919 // false if there has not been a previous full GC). 1920 bool previous_has_compacted_; 1921 1922 // On a full GC, a count of the number of marked objects. Incremented 1923 // when an object is marked and decremented when an object's mark bit is 1924 // cleared. Will be zero on a scavenge collection. 1925 int marked_count_; 1926 1927 // The count from the end of the previous full GC. Will be zero if there 1928 // was no previous full GC. 1929 int previous_marked_count_; 1930 1931 // Amounts of time spent in different scopes during GC. 1932 double scopes_[Scope::kNumberOfScopes]; 1933 1934 // Total amount of space either wasted or contained in one of free lists 1935 // before the current GC. 1936 intptr_t in_free_list_or_wasted_before_gc_; 1937 1938 // Difference between space used in the heap at the beginning of the current 1939 // collection and the end of the previous collection. 1940 intptr_t allocated_since_last_gc_; 1941 1942 // Amount of time spent in mutator that is time elapsed between end of the 1943 // previous collection and the beginning of the current one. 1944 double spent_in_mutator_; 1945 1946 // Size of objects promoted during the current collection. 1947 intptr_t promoted_objects_size_; 1948 1949 // Maximum GC pause. 1950 static int max_gc_pause_; 1951 1952 // Maximum size of objects alive after GC. 1953 static intptr_t max_alive_after_gc_; 1954 1955 // Minimal interval between two subsequent collections. 1956 static int min_in_mutator_; 1957 1958 // Size of objects alive after last GC. 1959 static intptr_t alive_after_last_gc_; 1960 1961 static double last_gc_end_timestamp_; 1962}; 1963 1964 1965class TranscendentalCache { 1966 public: 1967 enum Type {ACOS, ASIN, ATAN, COS, EXP, LOG, SIN, TAN, kNumberOfCaches}; 1968 1969 explicit TranscendentalCache(Type t); 1970 1971 // Returns a heap number with f(input), where f is a math function specified 1972 // by the 'type' argument. 1973 MUST_USE_RESULT static inline MaybeObject* Get(Type type, double input) { 1974 TranscendentalCache* cache = caches_[type]; 1975 if (cache == NULL) { 1976 caches_[type] = cache = new TranscendentalCache(type); 1977 } 1978 return cache->Get(input); 1979 } 1980 1981 // The cache contains raw Object pointers. This method disposes of 1982 // them before a garbage collection. 1983 static void Clear(); 1984 1985 private: 1986 MUST_USE_RESULT inline MaybeObject* Get(double input) { 1987 Converter c; 1988 c.dbl = input; 1989 int hash = Hash(c); 1990 Element e = elements_[hash]; 1991 if (e.in[0] == c.integers[0] && 1992 e.in[1] == c.integers[1]) { 1993 ASSERT(e.output != NULL); 1994 Counters::transcendental_cache_hit.Increment(); 1995 return e.output; 1996 } 1997 double answer = Calculate(input); 1998 Counters::transcendental_cache_miss.Increment(); 1999 Object* heap_number; 2000 { MaybeObject* maybe_heap_number = Heap::AllocateHeapNumber(answer); 2001 if (!maybe_heap_number->ToObject(&heap_number)) return maybe_heap_number; 2002 } 2003 elements_[hash].in[0] = c.integers[0]; 2004 elements_[hash].in[1] = c.integers[1]; 2005 elements_[hash].output = heap_number; 2006 return heap_number; 2007 } 2008 2009 inline double Calculate(double input) { 2010 switch (type_) { 2011 case ACOS: 2012 return acos(input); 2013 case ASIN: 2014 return asin(input); 2015 case ATAN: 2016 return atan(input); 2017 case COS: 2018 return cos(input); 2019 case EXP: 2020 return exp(input); 2021 case LOG: 2022 return log(input); 2023 case SIN: 2024 return sin(input); 2025 case TAN: 2026 return tan(input); 2027 default: 2028 return 0.0; // Never happens. 2029 } 2030 } 2031 static const int kCacheSize = 512; 2032 struct Element { 2033 uint32_t in[2]; 2034 Object* output; 2035 }; 2036 union Converter { 2037 double dbl; 2038 uint32_t integers[2]; 2039 }; 2040 inline static int Hash(const Converter& c) { 2041 uint32_t hash = (c.integers[0] ^ c.integers[1]); 2042 hash ^= static_cast<int32_t>(hash) >> 16; 2043 hash ^= static_cast<int32_t>(hash) >> 8; 2044 return (hash & (kCacheSize - 1)); 2045 } 2046 2047 static Address cache_array_address() { 2048 // Used to create an external reference. 2049 return reinterpret_cast<Address>(caches_); 2050 } 2051 2052 // Allow access to the caches_ array as an ExternalReference. 2053 friend class ExternalReference; 2054 // Inline implementation of the caching. 2055 friend class TranscendentalCacheStub; 2056 2057 static TranscendentalCache* caches_[kNumberOfCaches]; 2058 Element elements_[kCacheSize]; 2059 Type type_; 2060}; 2061 2062 2063// External strings table is a place where all external strings are 2064// registered. We need to keep track of such strings to properly 2065// finalize them. 2066class ExternalStringTable : public AllStatic { 2067 public: 2068 // Registers an external string. 2069 inline static void AddString(String* string); 2070 2071 inline static void Iterate(ObjectVisitor* v); 2072 2073 // Restores internal invariant and gets rid of collected strings. 2074 // Must be called after each Iterate() that modified the strings. 2075 static void CleanUp(); 2076 2077 // Destroys all allocated memory. 2078 static void TearDown(); 2079 2080 private: 2081 friend class Heap; 2082 2083 inline static void Verify(); 2084 2085 inline static void AddOldString(String* string); 2086 2087 // Notifies the table that only a prefix of the new list is valid. 2088 inline static void ShrinkNewStrings(int position); 2089 2090 // To speed up scavenge collections new space string are kept 2091 // separate from old space strings. 2092 static List<Object*> new_space_strings_; 2093 static List<Object*> old_space_strings_; 2094}; 2095 2096 2097// Abstract base class for checking whether a weak object should be retained. 2098class WeakObjectRetainer { 2099 public: 2100 virtual ~WeakObjectRetainer() {} 2101 2102 // Return whether this object should be retained. If NULL is returned the 2103 // object has no references. Otherwise the address of the retained object 2104 // should be returned as in some GC situations the object has been moved. 2105 virtual Object* RetainAs(Object* object) = 0; 2106}; 2107 2108 2109} } // namespace v8::internal 2110 2111#endif // V8_HEAP_H_ 2112