heap.h revision 086aeeaae12517475c22695a200be45495516549
1// Copyright 2010 the V8 project authors. All rights reserved. 2// Redistribution and use in source and binary forms, with or without 3// modification, are permitted provided that the following conditions are 4// met: 5// 6// * Redistributions of source code must retain the above copyright 7// notice, this list of conditions and the following disclaimer. 8// * Redistributions in binary form must reproduce the above 9// copyright notice, this list of conditions and the following 10// disclaimer in the documentation and/or other materials provided 11// with the distribution. 12// * Neither the name of Google Inc. nor the names of its 13// contributors may be used to endorse or promote products derived 14// from this software without specific prior written permission. 15// 16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28#ifndef V8_HEAP_H_ 29#define V8_HEAP_H_ 30 31#include <math.h> 32 33#include "spaces.h" 34#include "splay-tree-inl.h" 35#include "v8-counters.h" 36 37namespace v8 { 38namespace internal { 39 40 41// Defines all the roots in Heap. 42#define UNCONDITIONAL_STRONG_ROOT_LIST(V) \ 43 /* Put the byte array map early. We need it to be in place by the time */ \ 44 /* the deserializer hits the next page, since it wants to put a byte */ \ 45 /* array in the unused space at the end of the page. */ \ 46 V(Map, byte_array_map, ByteArrayMap) \ 47 V(Map, one_pointer_filler_map, OnePointerFillerMap) \ 48 V(Map, two_pointer_filler_map, TwoPointerFillerMap) \ 49 /* Cluster the most popular ones in a few cache lines here at the top. */ \ 50 V(Smi, stack_limit, StackLimit) \ 51 V(Object, undefined_value, UndefinedValue) \ 52 V(Object, the_hole_value, TheHoleValue) \ 53 V(Object, null_value, NullValue) \ 54 V(Object, true_value, TrueValue) \ 55 V(Object, false_value, FalseValue) \ 56 V(Object, arguments_marker, ArgumentsMarker) \ 57 V(Map, heap_number_map, HeapNumberMap) \ 58 V(Map, global_context_map, GlobalContextMap) \ 59 V(Map, fixed_array_map, FixedArrayMap) \ 60 V(Map, fixed_cow_array_map, FixedCOWArrayMap) \ 61 V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \ 62 V(Map, meta_map, MetaMap) \ 63 V(Object, termination_exception, TerminationException) \ 64 V(Map, hash_table_map, HashTableMap) \ 65 V(FixedArray, empty_fixed_array, EmptyFixedArray) \ 66 V(ByteArray, empty_byte_array, EmptyByteArray) \ 67 V(Map, string_map, StringMap) \ 68 V(Map, ascii_string_map, AsciiStringMap) \ 69 V(Map, symbol_map, SymbolMap) \ 70 V(Map, ascii_symbol_map, AsciiSymbolMap) \ 71 V(Map, cons_symbol_map, ConsSymbolMap) \ 72 V(Map, cons_ascii_symbol_map, ConsAsciiSymbolMap) \ 73 V(Map, external_symbol_map, ExternalSymbolMap) \ 74 V(Map, external_symbol_with_ascii_data_map, ExternalSymbolWithAsciiDataMap) \ 75 V(Map, external_ascii_symbol_map, ExternalAsciiSymbolMap) \ 76 V(Map, cons_string_map, ConsStringMap) \ 77 V(Map, cons_ascii_string_map, ConsAsciiStringMap) \ 78 V(Map, external_string_map, ExternalStringMap) \ 79 V(Map, external_string_with_ascii_data_map, ExternalStringWithAsciiDataMap) \ 80 V(Map, external_ascii_string_map, ExternalAsciiStringMap) \ 81 V(Map, undetectable_string_map, UndetectableStringMap) \ 82 V(Map, undetectable_ascii_string_map, UndetectableAsciiStringMap) \ 83 V(Map, pixel_array_map, PixelArrayMap) \ 84 V(Map, external_byte_array_map, ExternalByteArrayMap) \ 85 V(Map, external_unsigned_byte_array_map, ExternalUnsignedByteArrayMap) \ 86 V(Map, external_short_array_map, ExternalShortArrayMap) \ 87 V(Map, external_unsigned_short_array_map, ExternalUnsignedShortArrayMap) \ 88 V(Map, external_int_array_map, ExternalIntArrayMap) \ 89 V(Map, external_unsigned_int_array_map, ExternalUnsignedIntArrayMap) \ 90 V(Map, external_float_array_map, ExternalFloatArrayMap) \ 91 V(Map, context_map, ContextMap) \ 92 V(Map, catch_context_map, CatchContextMap) \ 93 V(Map, code_map, CodeMap) \ 94 V(Map, oddball_map, OddballMap) \ 95 V(Map, global_property_cell_map, GlobalPropertyCellMap) \ 96 V(Map, shared_function_info_map, SharedFunctionInfoMap) \ 97 V(Map, proxy_map, ProxyMap) \ 98 V(Object, nan_value, NanValue) \ 99 V(Object, minus_zero_value, MinusZeroValue) \ 100 V(Object, instanceof_cache_function, InstanceofCacheFunction) \ 101 V(Object, instanceof_cache_map, InstanceofCacheMap) \ 102 V(Object, instanceof_cache_answer, InstanceofCacheAnswer) \ 103 V(String, empty_string, EmptyString) \ 104 V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \ 105 V(Map, neander_map, NeanderMap) \ 106 V(JSObject, message_listeners, MessageListeners) \ 107 V(Proxy, prototype_accessors, PrototypeAccessors) \ 108 V(NumberDictionary, code_stubs, CodeStubs) \ 109 V(NumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \ 110 V(Code, js_entry_code, JsEntryCode) \ 111 V(Code, js_construct_entry_code, JsConstructEntryCode) \ 112 V(Code, c_entry_code, CEntryCode) \ 113 V(FixedArray, number_string_cache, NumberStringCache) \ 114 V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \ 115 V(FixedArray, natives_source_cache, NativesSourceCache) \ 116 V(Object, last_script_id, LastScriptId) \ 117 V(Script, empty_script, EmptyScript) \ 118 V(Smi, real_stack_limit, RealStackLimit) \ 119 V(StringDictionary, intrinsic_function_names, IntrinsicFunctionNames) \ 120 121#if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP 122#define STRONG_ROOT_LIST(V) \ 123 UNCONDITIONAL_STRONG_ROOT_LIST(V) \ 124 V(Code, re_c_entry_code, RegExpCEntryCode) 125#else 126#define STRONG_ROOT_LIST(V) UNCONDITIONAL_STRONG_ROOT_LIST(V) 127#endif 128 129#define ROOT_LIST(V) \ 130 STRONG_ROOT_LIST(V) \ 131 V(SymbolTable, symbol_table, SymbolTable) 132 133#define SYMBOL_LIST(V) \ 134 V(Array_symbol, "Array") \ 135 V(Object_symbol, "Object") \ 136 V(Proto_symbol, "__proto__") \ 137 V(StringImpl_symbol, "StringImpl") \ 138 V(arguments_symbol, "arguments") \ 139 V(Arguments_symbol, "Arguments") \ 140 V(arguments_shadow_symbol, ".arguments") \ 141 V(call_symbol, "call") \ 142 V(apply_symbol, "apply") \ 143 V(caller_symbol, "caller") \ 144 V(boolean_symbol, "boolean") \ 145 V(Boolean_symbol, "Boolean") \ 146 V(callee_symbol, "callee") \ 147 V(constructor_symbol, "constructor") \ 148 V(code_symbol, ".code") \ 149 V(result_symbol, ".result") \ 150 V(catch_var_symbol, ".catch-var") \ 151 V(empty_symbol, "") \ 152 V(eval_symbol, "eval") \ 153 V(function_symbol, "function") \ 154 V(length_symbol, "length") \ 155 V(name_symbol, "name") \ 156 V(number_symbol, "number") \ 157 V(Number_symbol, "Number") \ 158 V(RegExp_symbol, "RegExp") \ 159 V(source_symbol, "source") \ 160 V(global_symbol, "global") \ 161 V(ignore_case_symbol, "ignoreCase") \ 162 V(multiline_symbol, "multiline") \ 163 V(input_symbol, "input") \ 164 V(index_symbol, "index") \ 165 V(last_index_symbol, "lastIndex") \ 166 V(object_symbol, "object") \ 167 V(prototype_symbol, "prototype") \ 168 V(string_symbol, "string") \ 169 V(String_symbol, "String") \ 170 V(Date_symbol, "Date") \ 171 V(this_symbol, "this") \ 172 V(to_string_symbol, "toString") \ 173 V(char_at_symbol, "CharAt") \ 174 V(undefined_symbol, "undefined") \ 175 V(value_of_symbol, "valueOf") \ 176 V(InitializeVarGlobal_symbol, "InitializeVarGlobal") \ 177 V(InitializeConstGlobal_symbol, "InitializeConstGlobal") \ 178 V(KeyedLoadSpecialized_symbol, "KeyedLoadSpecialized") \ 179 V(KeyedStoreSpecialized_symbol, "KeyedStoreSpecialized") \ 180 V(stack_overflow_symbol, "kStackOverflowBoilerplate") \ 181 V(illegal_access_symbol, "illegal access") \ 182 V(out_of_memory_symbol, "out-of-memory") \ 183 V(illegal_execution_state_symbol, "illegal execution state") \ 184 V(get_symbol, "get") \ 185 V(set_symbol, "set") \ 186 V(function_class_symbol, "Function") \ 187 V(illegal_argument_symbol, "illegal argument") \ 188 V(MakeReferenceError_symbol, "MakeReferenceError") \ 189 V(MakeSyntaxError_symbol, "MakeSyntaxError") \ 190 V(MakeTypeError_symbol, "MakeTypeError") \ 191 V(invalid_lhs_in_assignment_symbol, "invalid_lhs_in_assignment") \ 192 V(invalid_lhs_in_for_in_symbol, "invalid_lhs_in_for_in") \ 193 V(invalid_lhs_in_postfix_op_symbol, "invalid_lhs_in_postfix_op") \ 194 V(invalid_lhs_in_prefix_op_symbol, "invalid_lhs_in_prefix_op") \ 195 V(illegal_return_symbol, "illegal_return") \ 196 V(illegal_break_symbol, "illegal_break") \ 197 V(illegal_continue_symbol, "illegal_continue") \ 198 V(unknown_label_symbol, "unknown_label") \ 199 V(redeclaration_symbol, "redeclaration") \ 200 V(failure_symbol, "<failure>") \ 201 V(space_symbol, " ") \ 202 V(exec_symbol, "exec") \ 203 V(zero_symbol, "0") \ 204 V(global_eval_symbol, "GlobalEval") \ 205 V(identity_hash_symbol, "v8::IdentityHash") \ 206 V(closure_symbol, "(closure)") 207 208 209// Forward declarations. 210class GCTracer; 211class HeapStats; 212class WeakObjectRetainer; 213 214 215typedef String* (*ExternalStringTableUpdaterCallback)(Object** pointer); 216 217typedef bool (*DirtyRegionCallback)(Address start, 218 Address end, 219 ObjectSlotCallback copy_object_func); 220 221 222// The all static Heap captures the interface to the global object heap. 223// All JavaScript contexts by this process share the same object heap. 224 225class Heap : public AllStatic { 226 public: 227 // Configure heap size before setup. Return false if the heap has been 228 // setup already. 229 static bool ConfigureHeap(int max_semispace_size, 230 int max_old_gen_size, 231 int max_executable_size); 232 static bool ConfigureHeapDefault(); 233 234 // Initializes the global object heap. If create_heap_objects is true, 235 // also creates the basic non-mutable objects. 236 // Returns whether it succeeded. 237 static bool Setup(bool create_heap_objects); 238 239 // Destroys all memory allocated by the heap. 240 static void TearDown(); 241 242 // Set the stack limit in the roots_ array. Some architectures generate 243 // code that looks here, because it is faster than loading from the static 244 // jslimit_/real_jslimit_ variable in the StackGuard. 245 static void SetStackLimits(); 246 247 // Returns whether Setup has been called. 248 static bool HasBeenSetup(); 249 250 // Returns the maximum amount of memory reserved for the heap. For 251 // the young generation, we reserve 4 times the amount needed for a 252 // semi space. The young generation consists of two semi spaces and 253 // we reserve twice the amount needed for those in order to ensure 254 // that new space can be aligned to its size. 255 static intptr_t MaxReserved() { 256 return 4 * reserved_semispace_size_ + max_old_generation_size_; 257 } 258 static int MaxSemiSpaceSize() { return max_semispace_size_; } 259 static int ReservedSemiSpaceSize() { return reserved_semispace_size_; } 260 static int InitialSemiSpaceSize() { return initial_semispace_size_; } 261 static intptr_t MaxOldGenerationSize() { return max_old_generation_size_; } 262 static intptr_t MaxExecutableSize() { return max_executable_size_; } 263 264 // Returns the capacity of the heap in bytes w/o growing. Heap grows when 265 // more spaces are needed until it reaches the limit. 266 static intptr_t Capacity(); 267 268 // Returns the amount of memory currently committed for the heap. 269 static intptr_t CommittedMemory(); 270 271 // Returns the amount of executable memory currently committed for the heap. 272 static intptr_t CommittedMemoryExecutable(); 273 274 // Returns the available bytes in space w/o growing. 275 // Heap doesn't guarantee that it can allocate an object that requires 276 // all available bytes. Check MaxHeapObjectSize() instead. 277 static intptr_t Available(); 278 279 // Returns the maximum object size in paged space. 280 static inline int MaxObjectSizeInPagedSpace(); 281 282 // Returns of size of all objects residing in the heap. 283 static intptr_t SizeOfObjects(); 284 285 // Return the starting address and a mask for the new space. And-masking an 286 // address with the mask will result in the start address of the new space 287 // for all addresses in either semispace. 288 static Address NewSpaceStart() { return new_space_.start(); } 289 static uintptr_t NewSpaceMask() { return new_space_.mask(); } 290 static Address NewSpaceTop() { return new_space_.top(); } 291 292 static NewSpace* new_space() { return &new_space_; } 293 static OldSpace* old_pointer_space() { return old_pointer_space_; } 294 static OldSpace* old_data_space() { return old_data_space_; } 295 static OldSpace* code_space() { return code_space_; } 296 static MapSpace* map_space() { return map_space_; } 297 static CellSpace* cell_space() { return cell_space_; } 298 static LargeObjectSpace* lo_space() { return lo_space_; } 299 300 static bool always_allocate() { return always_allocate_scope_depth_ != 0; } 301 static Address always_allocate_scope_depth_address() { 302 return reinterpret_cast<Address>(&always_allocate_scope_depth_); 303 } 304 static bool linear_allocation() { 305 return linear_allocation_scope_depth_ != 0; 306 } 307 308 static Address* NewSpaceAllocationTopAddress() { 309 return new_space_.allocation_top_address(); 310 } 311 static Address* NewSpaceAllocationLimitAddress() { 312 return new_space_.allocation_limit_address(); 313 } 314 315 // Uncommit unused semi space. 316 static bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); } 317 318#ifdef ENABLE_HEAP_PROTECTION 319 // Protect/unprotect the heap by marking all spaces read-only/writable. 320 static void Protect(); 321 static void Unprotect(); 322#endif 323 324 // Allocates and initializes a new JavaScript object based on a 325 // constructor. 326 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 327 // failed. 328 // Please note this does not perform a garbage collection. 329 MUST_USE_RESULT static MaybeObject* AllocateJSObject( 330 JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED); 331 332 // Allocates and initializes a new global object based on a constructor. 333 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 334 // failed. 335 // Please note this does not perform a garbage collection. 336 MUST_USE_RESULT static MaybeObject* AllocateGlobalObject( 337 JSFunction* constructor); 338 339 // Returns a deep copy of the JavaScript object. 340 // Properties and elements are copied too. 341 // Returns failure if allocation failed. 342 MUST_USE_RESULT static MaybeObject* CopyJSObject(JSObject* source); 343 344 // Allocates the function prototype. 345 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 346 // failed. 347 // Please note this does not perform a garbage collection. 348 MUST_USE_RESULT static MaybeObject* AllocateFunctionPrototype( 349 JSFunction* function); 350 351 // Reinitialize an JSGlobalProxy based on a constructor. The object 352 // must have the same size as objects allocated using the 353 // constructor. The object is reinitialized and behaves as an 354 // object that has been freshly allocated using the constructor. 355 MUST_USE_RESULT static MaybeObject* ReinitializeJSGlobalProxy( 356 JSFunction* constructor, 357 JSGlobalProxy* global); 358 359 // Allocates and initializes a new JavaScript object based on a map. 360 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 361 // failed. 362 // Please note this does not perform a garbage collection. 363 MUST_USE_RESULT static MaybeObject* AllocateJSObjectFromMap( 364 Map* map, PretenureFlag pretenure = NOT_TENURED); 365 366 // Allocates a heap object based on the map. 367 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 368 // failed. 369 // Please note this function does not perform a garbage collection. 370 MUST_USE_RESULT static MaybeObject* Allocate(Map* map, AllocationSpace space); 371 372 // Allocates a JS Map in the heap. 373 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 374 // failed. 375 // Please note this function does not perform a garbage collection. 376 MUST_USE_RESULT static MaybeObject* AllocateMap(InstanceType instance_type, 377 int instance_size); 378 379 // Allocates a partial map for bootstrapping. 380 MUST_USE_RESULT static MaybeObject* AllocatePartialMap( 381 InstanceType instance_type, 382 int instance_size); 383 384 // Allocate a map for the specified function 385 MUST_USE_RESULT static MaybeObject* AllocateInitialMap(JSFunction* fun); 386 387 // Allocates an empty code cache. 388 MUST_USE_RESULT static MaybeObject* AllocateCodeCache(); 389 390 // Clear the Instanceof cache (used when a prototype changes). 391 static void ClearInstanceofCache() { 392 set_instanceof_cache_function(the_hole_value()); 393 } 394 395 // Allocates and fully initializes a String. There are two String 396 // encodings: ASCII and two byte. One should choose between the three string 397 // allocation functions based on the encoding of the string buffer used to 398 // initialized the string. 399 // - ...FromAscii initializes the string from a buffer that is ASCII 400 // encoded (it does not check that the buffer is ASCII encoded) and the 401 // result will be ASCII encoded. 402 // - ...FromUTF8 initializes the string from a buffer that is UTF-8 403 // encoded. If the characters are all single-byte characters, the 404 // result will be ASCII encoded, otherwise it will converted to two 405 // byte. 406 // - ...FromTwoByte initializes the string from a buffer that is two-byte 407 // encoded. If the characters are all single-byte characters, the 408 // result will be converted to ASCII, otherwise it will be left as 409 // two-byte. 410 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 411 // failed. 412 // Please note this does not perform a garbage collection. 413 MUST_USE_RESULT static MaybeObject* AllocateStringFromAscii( 414 Vector<const char> str, 415 PretenureFlag pretenure = NOT_TENURED); 416 MUST_USE_RESULT static inline MaybeObject* AllocateStringFromUtf8( 417 Vector<const char> str, 418 PretenureFlag pretenure = NOT_TENURED); 419 MUST_USE_RESULT static MaybeObject* AllocateStringFromUtf8Slow( 420 Vector<const char> str, 421 PretenureFlag pretenure = NOT_TENURED); 422 MUST_USE_RESULT static MaybeObject* AllocateStringFromTwoByte( 423 Vector<const uc16> str, 424 PretenureFlag pretenure = NOT_TENURED); 425 426 // Allocates a symbol in old space based on the character stream. 427 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 428 // failed. 429 // Please note this function does not perform a garbage collection. 430 MUST_USE_RESULT static inline MaybeObject* AllocateSymbol( 431 Vector<const char> str, 432 int chars, 433 uint32_t hash_field); 434 435 MUST_USE_RESULT static inline MaybeObject* AllocateAsciiSymbol( 436 Vector<const char> str, 437 uint32_t hash_field); 438 439 MUST_USE_RESULT static inline MaybeObject* AllocateTwoByteSymbol( 440 Vector<const uc16> str, 441 uint32_t hash_field); 442 443 MUST_USE_RESULT static MaybeObject* AllocateInternalSymbol( 444 unibrow::CharacterStream* buffer, int chars, uint32_t hash_field); 445 446 MUST_USE_RESULT static MaybeObject* AllocateExternalSymbol( 447 Vector<const char> str, 448 int chars); 449 450 451 // Allocates and partially initializes a String. There are two String 452 // encodings: ASCII and two byte. These functions allocate a string of the 453 // given length and set its map and length fields. The characters of the 454 // string are uninitialized. 455 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 456 // failed. 457 // Please note this does not perform a garbage collection. 458 MUST_USE_RESULT static MaybeObject* AllocateRawAsciiString( 459 int length, 460 PretenureFlag pretenure = NOT_TENURED); 461 MUST_USE_RESULT static MaybeObject* AllocateRawTwoByteString( 462 int length, 463 PretenureFlag pretenure = NOT_TENURED); 464 465 // Computes a single character string where the character has code. 466 // A cache is used for ascii codes. 467 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 468 // failed. Please note this does not perform a garbage collection. 469 MUST_USE_RESULT static MaybeObject* LookupSingleCharacterStringFromCode( 470 uint16_t code); 471 472 // Allocate a byte array of the specified length 473 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 474 // failed. 475 // Please note this does not perform a garbage collection. 476 MUST_USE_RESULT static MaybeObject* AllocateByteArray(int length, 477 PretenureFlag pretenure); 478 479 // Allocate a non-tenured byte array of the specified length 480 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 481 // failed. 482 // Please note this does not perform a garbage collection. 483 MUST_USE_RESULT static MaybeObject* AllocateByteArray(int length); 484 485 // Allocate a pixel array of the specified length 486 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 487 // failed. 488 // Please note this does not perform a garbage collection. 489 MUST_USE_RESULT static MaybeObject* AllocatePixelArray(int length, 490 uint8_t* external_pointer, 491 PretenureFlag pretenure); 492 493 // Allocates an external array of the specified length and type. 494 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 495 // failed. 496 // Please note this does not perform a garbage collection. 497 MUST_USE_RESULT static MaybeObject* AllocateExternalArray( 498 int length, 499 ExternalArrayType array_type, 500 void* external_pointer, 501 PretenureFlag pretenure); 502 503 // Allocate a tenured JS global property cell. 504 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 505 // failed. 506 // Please note this does not perform a garbage collection. 507 MUST_USE_RESULT static MaybeObject* AllocateJSGlobalPropertyCell( 508 Object* value); 509 510 // Allocates a fixed array initialized with undefined values 511 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 512 // failed. 513 // Please note this does not perform a garbage collection. 514 MUST_USE_RESULT static MaybeObject* AllocateFixedArray( 515 int length, 516 PretenureFlag pretenure); 517 // Allocates a fixed array initialized with undefined values 518 MUST_USE_RESULT static MaybeObject* AllocateFixedArray(int length); 519 520 // Allocates an uninitialized fixed array. It must be filled by the caller. 521 // 522 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 523 // failed. 524 // Please note this does not perform a garbage collection. 525 MUST_USE_RESULT static MaybeObject* AllocateUninitializedFixedArray( 526 int length); 527 528 // Make a copy of src and return it. Returns 529 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. 530 MUST_USE_RESULT static inline MaybeObject* CopyFixedArray(FixedArray* src); 531 532 // Make a copy of src, set the map, and return the copy. Returns 533 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. 534 MUST_USE_RESULT static MaybeObject* CopyFixedArrayWithMap(FixedArray* src, 535 Map* map); 536 537 // Allocates a fixed array initialized with the hole values. 538 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 539 // failed. 540 // Please note this does not perform a garbage collection. 541 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithHoles( 542 int length, 543 PretenureFlag pretenure = NOT_TENURED); 544 545 // AllocateHashTable is identical to AllocateFixedArray except 546 // that the resulting object has hash_table_map as map. 547 MUST_USE_RESULT static MaybeObject* AllocateHashTable( 548 int length, PretenureFlag pretenure = NOT_TENURED); 549 550 // Allocate a global (but otherwise uninitialized) context. 551 MUST_USE_RESULT static MaybeObject* AllocateGlobalContext(); 552 553 // Allocate a function context. 554 MUST_USE_RESULT static MaybeObject* AllocateFunctionContext( 555 int length, 556 JSFunction* closure); 557 558 // Allocate a 'with' context. 559 MUST_USE_RESULT static MaybeObject* AllocateWithContext( 560 Context* previous, 561 JSObject* extension, 562 bool is_catch_context); 563 564 // Allocates a new utility object in the old generation. 565 MUST_USE_RESULT static MaybeObject* AllocateStruct(InstanceType type); 566 567 // Allocates a function initialized with a shared part. 568 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 569 // failed. 570 // Please note this does not perform a garbage collection. 571 MUST_USE_RESULT static MaybeObject* AllocateFunction( 572 Map* function_map, 573 SharedFunctionInfo* shared, 574 Object* prototype, 575 PretenureFlag pretenure = TENURED); 576 577 // Indicies for direct access into argument objects. 578 static const int kArgumentsObjectSize = 579 JSObject::kHeaderSize + 2 * kPointerSize; 580 static const int arguments_callee_index = 0; 581 static const int arguments_length_index = 1; 582 583 // Allocates an arguments object - optionally with an elements array. 584 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 585 // failed. 586 // Please note this does not perform a garbage collection. 587 MUST_USE_RESULT static MaybeObject* AllocateArgumentsObject(Object* callee, 588 int length); 589 590 // Same as NewNumberFromDouble, but may return a preallocated/immutable 591 // number object (e.g., minus_zero_value_, nan_value_) 592 MUST_USE_RESULT static MaybeObject* NumberFromDouble( 593 double value, PretenureFlag pretenure = NOT_TENURED); 594 595 // Allocated a HeapNumber from value. 596 MUST_USE_RESULT static MaybeObject* AllocateHeapNumber( 597 double value, 598 PretenureFlag pretenure); 599 // pretenure = NOT_TENURED. 600 MUST_USE_RESULT static MaybeObject* AllocateHeapNumber(double value); 601 602 // Converts an int into either a Smi or a HeapNumber object. 603 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 604 // failed. 605 // Please note this does not perform a garbage collection. 606 MUST_USE_RESULT static inline MaybeObject* NumberFromInt32(int32_t value); 607 608 // Converts an int into either a Smi or a HeapNumber object. 609 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 610 // failed. 611 // Please note this does not perform a garbage collection. 612 MUST_USE_RESULT static inline MaybeObject* NumberFromUint32(uint32_t value); 613 614 // Allocates a new proxy object. 615 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 616 // failed. 617 // Please note this does not perform a garbage collection. 618 MUST_USE_RESULT static MaybeObject* AllocateProxy( 619 Address proxy, 620 PretenureFlag pretenure = NOT_TENURED); 621 622 // Allocates a new SharedFunctionInfo object. 623 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 624 // failed. 625 // Please note this does not perform a garbage collection. 626 MUST_USE_RESULT static MaybeObject* AllocateSharedFunctionInfo(Object* name); 627 628 // Allocates a new cons string object. 629 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 630 // failed. 631 // Please note this does not perform a garbage collection. 632 MUST_USE_RESULT static MaybeObject* AllocateConsString(String* first, 633 String* second); 634 635 // Allocates a new sub string object which is a substring of an underlying 636 // string buffer stretching from the index start (inclusive) to the index 637 // end (exclusive). 638 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 639 // failed. 640 // Please note this does not perform a garbage collection. 641 MUST_USE_RESULT static MaybeObject* AllocateSubString( 642 String* buffer, 643 int start, 644 int end, 645 PretenureFlag pretenure = NOT_TENURED); 646 647 // Allocate a new external string object, which is backed by a string 648 // resource that resides outside the V8 heap. 649 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 650 // failed. 651 // Please note this does not perform a garbage collection. 652 MUST_USE_RESULT static MaybeObject* AllocateExternalStringFromAscii( 653 ExternalAsciiString::Resource* resource); 654 MUST_USE_RESULT static MaybeObject* AllocateExternalStringFromTwoByte( 655 ExternalTwoByteString::Resource* resource); 656 657 // Finalizes an external string by deleting the associated external 658 // data and clearing the resource pointer. 659 static inline void FinalizeExternalString(String* string); 660 661 // Allocates an uninitialized object. The memory is non-executable if the 662 // hardware and OS allow. 663 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 664 // failed. 665 // Please note this function does not perform a garbage collection. 666 MUST_USE_RESULT static inline MaybeObject* AllocateRaw( 667 int size_in_bytes, 668 AllocationSpace space, 669 AllocationSpace retry_space); 670 671 // Initialize a filler object to keep the ability to iterate over the heap 672 // when shortening objects. 673 static void CreateFillerObjectAt(Address addr, int size); 674 675 // Makes a new native code object 676 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 677 // failed. On success, the pointer to the Code object is stored in the 678 // self_reference. This allows generated code to reference its own Code 679 // object by containing this pointer. 680 // Please note this function does not perform a garbage collection. 681 MUST_USE_RESULT static MaybeObject* CreateCode(const CodeDesc& desc, 682 Code::Flags flags, 683 Handle<Object> self_reference); 684 685 MUST_USE_RESULT static MaybeObject* CopyCode(Code* code); 686 687 // Copy the code and scope info part of the code object, but insert 688 // the provided data as the relocation information. 689 MUST_USE_RESULT static MaybeObject* CopyCode(Code* code, 690 Vector<byte> reloc_info); 691 692 // Finds the symbol for string in the symbol table. 693 // If not found, a new symbol is added to the table and returned. 694 // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation 695 // failed. 696 // Please note this function does not perform a garbage collection. 697 MUST_USE_RESULT static MaybeObject* LookupSymbol(Vector<const char> str); 698 MUST_USE_RESULT static MaybeObject* LookupAsciiSymbol(Vector<const char> str); 699 MUST_USE_RESULT static MaybeObject* LookupTwoByteSymbol( 700 Vector<const uc16> str); 701 MUST_USE_RESULT static MaybeObject* LookupAsciiSymbol(const char* str) { 702 return LookupSymbol(CStrVector(str)); 703 } 704 MUST_USE_RESULT static MaybeObject* LookupSymbol(String* str); 705 static bool LookupSymbolIfExists(String* str, String** symbol); 706 static bool LookupTwoCharsSymbolIfExists(String* str, String** symbol); 707 708 // Compute the matching symbol map for a string if possible. 709 // NULL is returned if string is in new space or not flattened. 710 static Map* SymbolMapForString(String* str); 711 712 // Tries to flatten a string before compare operation. 713 // 714 // Returns a failure in case it was decided that flattening was 715 // necessary and failed. Note, if flattening is not necessary the 716 // string might stay non-flat even when not a failure is returned. 717 // 718 // Please note this function does not perform a garbage collection. 719 MUST_USE_RESULT static inline MaybeObject* PrepareForCompare(String* str); 720 721 // Converts the given boolean condition to JavaScript boolean value. 722 static Object* ToBoolean(bool condition) { 723 return condition ? true_value() : false_value(); 724 } 725 726 // Code that should be run before and after each GC. Includes some 727 // reporting/verification activities when compiled with DEBUG set. 728 static void GarbageCollectionPrologue(); 729 static void GarbageCollectionEpilogue(); 730 731 // Performs garbage collection operation. 732 // Returns whether there is a chance that another major GC could 733 // collect more garbage. 734 static bool CollectGarbage(AllocationSpace space, GarbageCollector collector); 735 736 // Performs garbage collection operation. 737 // Returns whether there is a chance that another major GC could 738 // collect more garbage. 739 inline static bool CollectGarbage(AllocationSpace space); 740 741 // Performs a full garbage collection. Force compaction if the 742 // parameter is true. 743 static void CollectAllGarbage(bool force_compaction); 744 745 // Last hope GC, should try to squeeze as much as possible. 746 static void CollectAllAvailableGarbage(); 747 748 // Notify the heap that a context has been disposed. 749 static int NotifyContextDisposed() { return ++contexts_disposed_; } 750 751 // Utility to invoke the scavenger. This is needed in test code to 752 // ensure correct callback for weak global handles. 753 static void PerformScavenge(); 754 755#ifdef DEBUG 756 // Utility used with flag gc-greedy. 757 static void GarbageCollectionGreedyCheck(); 758#endif 759 760 static void AddGCPrologueCallback( 761 GCEpilogueCallback callback, GCType gc_type_filter); 762 static void RemoveGCPrologueCallback(GCEpilogueCallback callback); 763 764 static void AddGCEpilogueCallback( 765 GCEpilogueCallback callback, GCType gc_type_filter); 766 static void RemoveGCEpilogueCallback(GCEpilogueCallback callback); 767 768 static void SetGlobalGCPrologueCallback(GCCallback callback) { 769 ASSERT((callback == NULL) ^ (global_gc_prologue_callback_ == NULL)); 770 global_gc_prologue_callback_ = callback; 771 } 772 static void SetGlobalGCEpilogueCallback(GCCallback callback) { 773 ASSERT((callback == NULL) ^ (global_gc_epilogue_callback_ == NULL)); 774 global_gc_epilogue_callback_ = callback; 775 } 776 777 // Heap root getters. We have versions with and without type::cast() here. 778 // You can't use type::cast during GC because the assert fails. 779#define ROOT_ACCESSOR(type, name, camel_name) \ 780 static inline type* name() { \ 781 return type::cast(roots_[k##camel_name##RootIndex]); \ 782 } \ 783 static inline type* raw_unchecked_##name() { \ 784 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \ 785 } 786 ROOT_LIST(ROOT_ACCESSOR) 787#undef ROOT_ACCESSOR 788 789// Utility type maps 790#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \ 791 static inline Map* name##_map() { \ 792 return Map::cast(roots_[k##Name##MapRootIndex]); \ 793 } 794 STRUCT_LIST(STRUCT_MAP_ACCESSOR) 795#undef STRUCT_MAP_ACCESSOR 796 797#define SYMBOL_ACCESSOR(name, str) static inline String* name() { \ 798 return String::cast(roots_[k##name##RootIndex]); \ 799 } 800 SYMBOL_LIST(SYMBOL_ACCESSOR) 801#undef SYMBOL_ACCESSOR 802 803 // The hidden_symbol is special because it is the empty string, but does 804 // not match the empty string. 805 static String* hidden_symbol() { return hidden_symbol_; } 806 807 static void set_global_contexts_list(Object* object) { 808 global_contexts_list_ = object; 809 } 810 static Object* global_contexts_list() { return global_contexts_list_; } 811 812 // Iterates over all roots in the heap. 813 static void IterateRoots(ObjectVisitor* v, VisitMode mode); 814 // Iterates over all strong roots in the heap. 815 static void IterateStrongRoots(ObjectVisitor* v, VisitMode mode); 816 // Iterates over all the other roots in the heap. 817 static void IterateWeakRoots(ObjectVisitor* v, VisitMode mode); 818 819 enum ExpectedPageWatermarkState { 820 WATERMARK_SHOULD_BE_VALID, 821 WATERMARK_CAN_BE_INVALID 822 }; 823 824 // For each dirty region on a page in use from an old space call 825 // visit_dirty_region callback. 826 // If either visit_dirty_region or callback can cause an allocation 827 // in old space and changes in allocation watermark then 828 // can_preallocate_during_iteration should be set to true. 829 // All pages will be marked as having invalid watermark upon 830 // iteration completion. 831 static void IterateDirtyRegions( 832 PagedSpace* space, 833 DirtyRegionCallback visit_dirty_region, 834 ObjectSlotCallback callback, 835 ExpectedPageWatermarkState expected_page_watermark_state); 836 837 // Interpret marks as a bitvector of dirty marks for regions of size 838 // Page::kRegionSize aligned by Page::kRegionAlignmentMask and covering 839 // memory interval from start to top. For each dirty region call a 840 // visit_dirty_region callback. Return updated bitvector of dirty marks. 841 static uint32_t IterateDirtyRegions(uint32_t marks, 842 Address start, 843 Address end, 844 DirtyRegionCallback visit_dirty_region, 845 ObjectSlotCallback callback); 846 847 // Iterate pointers to from semispace of new space found in memory interval 848 // from start to end. 849 // Update dirty marks for page containing start address. 850 static void IterateAndMarkPointersToFromSpace(Address start, 851 Address end, 852 ObjectSlotCallback callback); 853 854 // Iterate pointers to new space found in memory interval from start to end. 855 // Return true if pointers to new space was found. 856 static bool IteratePointersInDirtyRegion(Address start, 857 Address end, 858 ObjectSlotCallback callback); 859 860 861 // Iterate pointers to new space found in memory interval from start to end. 862 // This interval is considered to belong to the map space. 863 // Return true if pointers to new space was found. 864 static bool IteratePointersInDirtyMapsRegion(Address start, 865 Address end, 866 ObjectSlotCallback callback); 867 868 869 // Returns whether the object resides in new space. 870 static inline bool InNewSpace(Object* object); 871 static inline bool InFromSpace(Object* object); 872 static inline bool InToSpace(Object* object); 873 874 // Checks whether an address/object in the heap (including auxiliary 875 // area and unused area). 876 static bool Contains(Address addr); 877 static bool Contains(HeapObject* value); 878 879 // Checks whether an address/object in a space. 880 // Currently used by tests, serialization and heap verification only. 881 static bool InSpace(Address addr, AllocationSpace space); 882 static bool InSpace(HeapObject* value, AllocationSpace space); 883 884 // Finds out which space an object should get promoted to based on its type. 885 static inline OldSpace* TargetSpace(HeapObject* object); 886 static inline AllocationSpace TargetSpaceId(InstanceType type); 887 888 // Sets the stub_cache_ (only used when expanding the dictionary). 889 static void public_set_code_stubs(NumberDictionary* value) { 890 roots_[kCodeStubsRootIndex] = value; 891 } 892 893 // Support for computing object sizes for old objects during GCs. Returns 894 // a function that is guaranteed to be safe for computing object sizes in 895 // the current GC phase. 896 static HeapObjectCallback GcSafeSizeOfOldObjectFunction() { 897 return gc_safe_size_of_old_object_; 898 } 899 900 // Sets the non_monomorphic_cache_ (only used when expanding the dictionary). 901 static void public_set_non_monomorphic_cache(NumberDictionary* value) { 902 roots_[kNonMonomorphicCacheRootIndex] = value; 903 } 904 905 static void public_set_empty_script(Script* script) { 906 roots_[kEmptyScriptRootIndex] = script; 907 } 908 909 // Update the next script id. 910 static inline void SetLastScriptId(Object* last_script_id); 911 912 // Generated code can embed this address to get access to the roots. 913 static Object** roots_address() { return roots_; } 914 915 // Get address of global contexts list for serialization support. 916 static Object** global_contexts_list_address() { 917 return &global_contexts_list_; 918 } 919 920#ifdef DEBUG 921 static void Print(); 922 static void PrintHandles(); 923 924 // Verify the heap is in its normal state before or after a GC. 925 static void Verify(); 926 927 // Report heap statistics. 928 static void ReportHeapStatistics(const char* title); 929 static void ReportCodeStatistics(const char* title); 930 931 // Fill in bogus values in from space 932 static void ZapFromSpace(); 933#endif 934 935#if defined(ENABLE_LOGGING_AND_PROFILING) 936 // Print short heap statistics. 937 static void PrintShortHeapStatistics(); 938#endif 939 940 // Makes a new symbol object 941 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 942 // failed. 943 // Please note this function does not perform a garbage collection. 944 MUST_USE_RESULT static MaybeObject* CreateSymbol(const char* str, 945 int length, 946 int hash); 947 MUST_USE_RESULT static MaybeObject* CreateSymbol(String* str); 948 949 // Write barrier support for address[offset] = o. 950 static inline void RecordWrite(Address address, int offset); 951 952 // Write barrier support for address[start : start + len[ = o. 953 static inline void RecordWrites(Address address, int start, int len); 954 955 // Given an address occupied by a live code object, return that object. 956 static Object* FindCodeObject(Address a); 957 958 // Invoke Shrink on shrinkable spaces. 959 static void Shrink(); 960 961 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT }; 962 static inline HeapState gc_state() { return gc_state_; } 963 964#ifdef DEBUG 965 static bool IsAllocationAllowed() { return allocation_allowed_; } 966 static inline bool allow_allocation(bool enable); 967 968 static bool disallow_allocation_failure() { 969 return disallow_allocation_failure_; 970 } 971 972 static void TracePathToObject(Object* target); 973 static void TracePathToGlobal(); 974#endif 975 976 // Callback function passed to Heap::Iterate etc. Copies an object if 977 // necessary, the object might be promoted to an old space. The caller must 978 // ensure the precondition that the object is (a) a heap object and (b) in 979 // the heap's from space. 980 static void ScavengePointer(HeapObject** p); 981 static inline void ScavengeObject(HeapObject** p, HeapObject* object); 982 983 // Commits from space if it is uncommitted. 984 static void EnsureFromSpaceIsCommitted(); 985 986 // Support for partial snapshots. After calling this we can allocate a 987 // certain number of bytes using only linear allocation (with a 988 // LinearAllocationScope and an AlwaysAllocateScope) without using freelists 989 // or causing a GC. It returns true of space was reserved or false if a GC is 990 // needed. For paged spaces the space requested must include the space wasted 991 // at the end of each page when allocating linearly. 992 static void ReserveSpace( 993 int new_space_size, 994 int pointer_space_size, 995 int data_space_size, 996 int code_space_size, 997 int map_space_size, 998 int cell_space_size, 999 int large_object_size); 1000 1001 // 1002 // Support for the API. 1003 // 1004 1005 static bool CreateApiObjects(); 1006 1007 // Attempt to find the number in a small cache. If we finds it, return 1008 // the string representation of the number. Otherwise return undefined. 1009 static Object* GetNumberStringCache(Object* number); 1010 1011 // Update the cache with a new number-string pair. 1012 static void SetNumberStringCache(Object* number, String* str); 1013 1014 // Adjusts the amount of registered external memory. 1015 // Returns the adjusted value. 1016 static inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes); 1017 1018 // Allocate uninitialized fixed array. 1019 MUST_USE_RESULT static MaybeObject* AllocateRawFixedArray(int length); 1020 MUST_USE_RESULT static MaybeObject* AllocateRawFixedArray( 1021 int length, 1022 PretenureFlag pretenure); 1023 1024 // True if we have reached the allocation limit in the old generation that 1025 // should force the next GC (caused normally) to be a full one. 1026 static bool OldGenerationPromotionLimitReached() { 1027 return (PromotedSpaceSize() + PromotedExternalMemorySize()) 1028 > old_gen_promotion_limit_; 1029 } 1030 1031 static intptr_t OldGenerationSpaceAvailable() { 1032 return old_gen_allocation_limit_ - 1033 (PromotedSpaceSize() + PromotedExternalMemorySize()); 1034 } 1035 1036 // True if we have reached the allocation limit in the old generation that 1037 // should artificially cause a GC right now. 1038 static bool OldGenerationAllocationLimitReached() { 1039 return OldGenerationSpaceAvailable() < 0; 1040 } 1041 1042 // Can be called when the embedding application is idle. 1043 static bool IdleNotification(); 1044 1045 // Declare all the root indices. 1046 enum RootListIndex { 1047#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex, 1048 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION) 1049#undef ROOT_INDEX_DECLARATION 1050 1051// Utility type maps 1052#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex, 1053 STRUCT_LIST(DECLARE_STRUCT_MAP) 1054#undef DECLARE_STRUCT_MAP 1055 1056#define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex, 1057 SYMBOL_LIST(SYMBOL_INDEX_DECLARATION) 1058#undef SYMBOL_DECLARATION 1059 1060 kSymbolTableRootIndex, 1061 kStrongRootListLength = kSymbolTableRootIndex, 1062 kRootListLength 1063 }; 1064 1065 MUST_USE_RESULT static MaybeObject* NumberToString( 1066 Object* number, 1067 bool check_number_string_cache = true); 1068 1069 static Map* MapForExternalArrayType(ExternalArrayType array_type); 1070 static RootListIndex RootIndexForExternalArrayType( 1071 ExternalArrayType array_type); 1072 1073 static void RecordStats(HeapStats* stats, bool take_snapshot = false); 1074 1075 // Copy block of memory from src to dst. Size of block should be aligned 1076 // by pointer size. 1077 static inline void CopyBlock(Address dst, Address src, int byte_size); 1078 1079 static inline void CopyBlockToOldSpaceAndUpdateRegionMarks(Address dst, 1080 Address src, 1081 int byte_size); 1082 1083 // Optimized version of memmove for blocks with pointer size aligned sizes and 1084 // pointer size aligned addresses. 1085 static inline void MoveBlock(Address dst, Address src, int byte_size); 1086 1087 static inline void MoveBlockToOldSpaceAndUpdateRegionMarks(Address dst, 1088 Address src, 1089 int byte_size); 1090 1091 // Check new space expansion criteria and expand semispaces if it was hit. 1092 static void CheckNewSpaceExpansionCriteria(); 1093 1094 static inline void IncrementYoungSurvivorsCounter(int survived) { 1095 young_survivors_after_last_gc_ = survived; 1096 survived_since_last_expansion_ += survived; 1097 } 1098 1099 static void UpdateNewSpaceReferencesInExternalStringTable( 1100 ExternalStringTableUpdaterCallback updater_func); 1101 1102 static void ProcessWeakReferences(WeakObjectRetainer* retainer); 1103 1104 // Helper function that governs the promotion policy from new space to 1105 // old. If the object's old address lies below the new space's age 1106 // mark or if we've already filled the bottom 1/16th of the to space, 1107 // we try to promote this object. 1108 static inline bool ShouldBePromoted(Address old_address, int object_size); 1109 1110 static int MaxObjectSizeInNewSpace() { return kMaxObjectSizeInNewSpace; } 1111 1112 static void ClearJSFunctionResultCaches(); 1113 1114 static void ClearNormalizedMapCaches(); 1115 1116 static GCTracer* tracer() { return tracer_; } 1117 1118 private: 1119 static int reserved_semispace_size_; 1120 static int max_semispace_size_; 1121 static int initial_semispace_size_; 1122 static intptr_t max_old_generation_size_; 1123 static intptr_t max_executable_size_; 1124 static intptr_t code_range_size_; 1125 1126 // For keeping track of how much data has survived 1127 // scavenge since last new space expansion. 1128 static int survived_since_last_expansion_; 1129 1130 static int always_allocate_scope_depth_; 1131 static int linear_allocation_scope_depth_; 1132 1133 // For keeping track of context disposals. 1134 static int contexts_disposed_; 1135 1136#if defined(V8_TARGET_ARCH_X64) 1137 static const int kMaxObjectSizeInNewSpace = 1024*KB; 1138#else 1139 static const int kMaxObjectSizeInNewSpace = 512*KB; 1140#endif 1141 1142 static NewSpace new_space_; 1143 static OldSpace* old_pointer_space_; 1144 static OldSpace* old_data_space_; 1145 static OldSpace* code_space_; 1146 static MapSpace* map_space_; 1147 static CellSpace* cell_space_; 1148 static LargeObjectSpace* lo_space_; 1149 static HeapState gc_state_; 1150 1151 // Returns the size of object residing in non new spaces. 1152 static intptr_t PromotedSpaceSize(); 1153 1154 // Returns the amount of external memory registered since last global gc. 1155 static int PromotedExternalMemorySize(); 1156 1157 static int mc_count_; // how many mark-compact collections happened 1158 static int ms_count_; // how many mark-sweep collections happened 1159 static int gc_count_; // how many gc happened 1160 1161 // Total length of the strings we failed to flatten since the last GC. 1162 static int unflattened_strings_length_; 1163 1164#define ROOT_ACCESSOR(type, name, camel_name) \ 1165 static inline void set_##name(type* value) { \ 1166 roots_[k##camel_name##RootIndex] = value; \ 1167 } 1168 ROOT_LIST(ROOT_ACCESSOR) 1169#undef ROOT_ACCESSOR 1170 1171#ifdef DEBUG 1172 static bool allocation_allowed_; 1173 1174 // If the --gc-interval flag is set to a positive value, this 1175 // variable holds the value indicating the number of allocations 1176 // remain until the next failure and garbage collection. 1177 static int allocation_timeout_; 1178 1179 // Do we expect to be able to handle allocation failure at this 1180 // time? 1181 static bool disallow_allocation_failure_; 1182#endif // DEBUG 1183 1184 // Limit that triggers a global GC on the next (normally caused) GC. This 1185 // is checked when we have already decided to do a GC to help determine 1186 // which collector to invoke. 1187 static intptr_t old_gen_promotion_limit_; 1188 1189 // Limit that triggers a global GC as soon as is reasonable. This is 1190 // checked before expanding a paged space in the old generation and on 1191 // every allocation in large object space. 1192 static intptr_t old_gen_allocation_limit_; 1193 1194 // Limit on the amount of externally allocated memory allowed 1195 // between global GCs. If reached a global GC is forced. 1196 static intptr_t external_allocation_limit_; 1197 1198 // The amount of external memory registered through the API kept alive 1199 // by global handles 1200 static int amount_of_external_allocated_memory_; 1201 1202 // Caches the amount of external memory registered at the last global gc. 1203 static int amount_of_external_allocated_memory_at_last_global_gc_; 1204 1205 // Indicates that an allocation has failed in the old generation since the 1206 // last GC. 1207 static int old_gen_exhausted_; 1208 1209 static Object* roots_[kRootListLength]; 1210 1211 static Object* global_contexts_list_; 1212 1213 struct StringTypeTable { 1214 InstanceType type; 1215 int size; 1216 RootListIndex index; 1217 }; 1218 1219 struct ConstantSymbolTable { 1220 const char* contents; 1221 RootListIndex index; 1222 }; 1223 1224 struct StructTable { 1225 InstanceType type; 1226 int size; 1227 RootListIndex index; 1228 }; 1229 1230 static const StringTypeTable string_type_table[]; 1231 static const ConstantSymbolTable constant_symbol_table[]; 1232 static const StructTable struct_table[]; 1233 1234 // The special hidden symbol which is an empty string, but does not match 1235 // any string when looked up in properties. 1236 static String* hidden_symbol_; 1237 1238 // GC callback function, called before and after mark-compact GC. 1239 // Allocations in the callback function are disallowed. 1240 struct GCPrologueCallbackPair { 1241 GCPrologueCallbackPair(GCPrologueCallback callback, GCType gc_type) 1242 : callback(callback), gc_type(gc_type) { 1243 } 1244 bool operator==(const GCPrologueCallbackPair& pair) const { 1245 return pair.callback == callback; 1246 } 1247 GCPrologueCallback callback; 1248 GCType gc_type; 1249 }; 1250 static List<GCPrologueCallbackPair> gc_prologue_callbacks_; 1251 1252 struct GCEpilogueCallbackPair { 1253 GCEpilogueCallbackPair(GCEpilogueCallback callback, GCType gc_type) 1254 : callback(callback), gc_type(gc_type) { 1255 } 1256 bool operator==(const GCEpilogueCallbackPair& pair) const { 1257 return pair.callback == callback; 1258 } 1259 GCEpilogueCallback callback; 1260 GCType gc_type; 1261 }; 1262 static List<GCEpilogueCallbackPair> gc_epilogue_callbacks_; 1263 1264 static GCCallback global_gc_prologue_callback_; 1265 static GCCallback global_gc_epilogue_callback_; 1266 1267 // Support for computing object sizes during GC. 1268 static HeapObjectCallback gc_safe_size_of_old_object_; 1269 static int GcSafeSizeOfOldObject(HeapObject* object); 1270 static int GcSafeSizeOfOldObjectWithEncodedMap(HeapObject* object); 1271 1272 // Update the GC state. Called from the mark-compact collector. 1273 static void MarkMapPointersAsEncoded(bool encoded) { 1274 gc_safe_size_of_old_object_ = encoded 1275 ? &GcSafeSizeOfOldObjectWithEncodedMap 1276 : &GcSafeSizeOfOldObject; 1277 } 1278 1279 // Checks whether a global GC is necessary 1280 static GarbageCollector SelectGarbageCollector(AllocationSpace space); 1281 1282 // Performs garbage collection 1283 // Returns whether there is a chance another major GC could 1284 // collect more garbage. 1285 static bool PerformGarbageCollection(GarbageCollector collector, 1286 GCTracer* tracer); 1287 1288 // Allocate an uninitialized object in map space. The behavior is identical 1289 // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't 1290 // have to test the allocation space argument and (b) can reduce code size 1291 // (since both AllocateRaw and AllocateRawMap are inlined). 1292 MUST_USE_RESULT static inline MaybeObject* AllocateRawMap(); 1293 1294 // Allocate an uninitialized object in the global property cell space. 1295 MUST_USE_RESULT static inline MaybeObject* AllocateRawCell(); 1296 1297 // Initializes a JSObject based on its map. 1298 static void InitializeJSObjectFromMap(JSObject* obj, 1299 FixedArray* properties, 1300 Map* map); 1301 1302 static bool CreateInitialMaps(); 1303 static bool CreateInitialObjects(); 1304 1305 // These four Create*EntryStub functions are here and forced to not be inlined 1306 // because of a gcc-4.4 bug that assigns wrong vtable entries. 1307 NO_INLINE(static void CreateCEntryStub()); 1308 NO_INLINE(static void CreateJSEntryStub()); 1309 NO_INLINE(static void CreateJSConstructEntryStub()); 1310 NO_INLINE(static void CreateRegExpCEntryStub()); 1311 1312 static void CreateFixedStubs(); 1313 1314 MUST_USE_RESULT static MaybeObject* CreateOddball(const char* to_string, 1315 Object* to_number); 1316 1317 // Allocate empty fixed array. 1318 MUST_USE_RESULT static MaybeObject* AllocateEmptyFixedArray(); 1319 1320 // Performs a minor collection in new generation. 1321 static void Scavenge(); 1322 1323 static String* UpdateNewSpaceReferenceInExternalStringTableEntry( 1324 Object** pointer); 1325 1326 static Address DoScavenge(ObjectVisitor* scavenge_visitor, 1327 Address new_space_front); 1328 1329 // Performs a major collection in the whole heap. 1330 static void MarkCompact(GCTracer* tracer); 1331 1332 // Code to be run before and after mark-compact. 1333 static void MarkCompactPrologue(bool is_compacting); 1334 1335 // Completely clear the Instanceof cache (to stop it keeping objects alive 1336 // around a GC). 1337 static void CompletelyClearInstanceofCache() { 1338 set_instanceof_cache_map(the_hole_value()); 1339 set_instanceof_cache_function(the_hole_value()); 1340 } 1341 1342#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 1343 // Record statistics before and after garbage collection. 1344 static void ReportStatisticsBeforeGC(); 1345 static void ReportStatisticsAfterGC(); 1346#endif 1347 1348 // Slow part of scavenge object. 1349 static void ScavengeObjectSlow(HeapObject** p, HeapObject* object); 1350 1351 // Initializes a function with a shared part and prototype. 1352 // Returns the function. 1353 // Note: this code was factored out of AllocateFunction such that 1354 // other parts of the VM could use it. Specifically, a function that creates 1355 // instances of type JS_FUNCTION_TYPE benefit from the use of this function. 1356 // Please note this does not perform a garbage collection. 1357 MUST_USE_RESULT static inline MaybeObject* InitializeFunction( 1358 JSFunction* function, 1359 SharedFunctionInfo* shared, 1360 Object* prototype); 1361 1362 static GCTracer* tracer_; 1363 1364 1365 // Initializes the number to string cache based on the max semispace size. 1366 MUST_USE_RESULT static MaybeObject* InitializeNumberStringCache(); 1367 // Flush the number to string cache. 1368 static void FlushNumberStringCache(); 1369 1370 static void UpdateSurvivalRateTrend(int start_new_space_size); 1371 1372 enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING }; 1373 1374 static const int kYoungSurvivalRateThreshold = 90; 1375 static const int kYoungSurvivalRateAllowedDeviation = 15; 1376 1377 static int young_survivors_after_last_gc_; 1378 static int high_survival_rate_period_length_; 1379 static double survival_rate_; 1380 static SurvivalRateTrend previous_survival_rate_trend_; 1381 static SurvivalRateTrend survival_rate_trend_; 1382 1383 static void set_survival_rate_trend(SurvivalRateTrend survival_rate_trend) { 1384 ASSERT(survival_rate_trend != FLUCTUATING); 1385 previous_survival_rate_trend_ = survival_rate_trend_; 1386 survival_rate_trend_ = survival_rate_trend; 1387 } 1388 1389 static SurvivalRateTrend survival_rate_trend() { 1390 if (survival_rate_trend_ == STABLE) { 1391 return STABLE; 1392 } else if (previous_survival_rate_trend_ == STABLE) { 1393 return survival_rate_trend_; 1394 } else if (survival_rate_trend_ != previous_survival_rate_trend_) { 1395 return FLUCTUATING; 1396 } else { 1397 return survival_rate_trend_; 1398 } 1399 } 1400 1401 static bool IsStableOrIncreasingSurvivalTrend() { 1402 switch (survival_rate_trend()) { 1403 case STABLE: 1404 case INCREASING: 1405 return true; 1406 default: 1407 return false; 1408 } 1409 } 1410 1411 static bool IsIncreasingSurvivalTrend() { 1412 return survival_rate_trend() == INCREASING; 1413 } 1414 1415 static bool IsHighSurvivalRate() { 1416 return high_survival_rate_period_length_ > 0; 1417 } 1418 1419 static const int kInitialSymbolTableSize = 2048; 1420 static const int kInitialEvalCacheSize = 64; 1421 1422 friend class Factory; 1423 friend class DisallowAllocationFailure; 1424 friend class AlwaysAllocateScope; 1425 friend class LinearAllocationScope; 1426 friend class MarkCompactCollector; 1427}; 1428 1429 1430class HeapStats { 1431 public: 1432 static const int kStartMarker = 0xDECADE00; 1433 static const int kEndMarker = 0xDECADE01; 1434 1435 int* start_marker; // 0 1436 int* new_space_size; // 1 1437 int* new_space_capacity; // 2 1438 intptr_t* old_pointer_space_size; // 3 1439 intptr_t* old_pointer_space_capacity; // 4 1440 intptr_t* old_data_space_size; // 5 1441 intptr_t* old_data_space_capacity; // 6 1442 intptr_t* code_space_size; // 7 1443 intptr_t* code_space_capacity; // 8 1444 intptr_t* map_space_size; // 9 1445 intptr_t* map_space_capacity; // 10 1446 intptr_t* cell_space_size; // 11 1447 intptr_t* cell_space_capacity; // 12 1448 intptr_t* lo_space_size; // 13 1449 int* global_handle_count; // 14 1450 int* weak_global_handle_count; // 15 1451 int* pending_global_handle_count; // 16 1452 int* near_death_global_handle_count; // 17 1453 int* destroyed_global_handle_count; // 18 1454 intptr_t* memory_allocator_size; // 19 1455 intptr_t* memory_allocator_capacity; // 20 1456 int* objects_per_type; // 21 1457 int* size_per_type; // 22 1458 int* os_error; // 23 1459 int* end_marker; // 24 1460}; 1461 1462 1463class AlwaysAllocateScope { 1464 public: 1465 AlwaysAllocateScope() { 1466 // We shouldn't hit any nested scopes, because that requires 1467 // non-handle code to call handle code. The code still works but 1468 // performance will degrade, so we want to catch this situation 1469 // in debug mode. 1470 ASSERT(Heap::always_allocate_scope_depth_ == 0); 1471 Heap::always_allocate_scope_depth_++; 1472 } 1473 1474 ~AlwaysAllocateScope() { 1475 Heap::always_allocate_scope_depth_--; 1476 ASSERT(Heap::always_allocate_scope_depth_ == 0); 1477 } 1478}; 1479 1480 1481class LinearAllocationScope { 1482 public: 1483 LinearAllocationScope() { 1484 Heap::linear_allocation_scope_depth_++; 1485 } 1486 1487 ~LinearAllocationScope() { 1488 Heap::linear_allocation_scope_depth_--; 1489 ASSERT(Heap::linear_allocation_scope_depth_ >= 0); 1490 } 1491}; 1492 1493 1494#ifdef DEBUG 1495// Visitor class to verify interior pointers in spaces that do not contain 1496// or care about intergenerational references. All heap object pointers have to 1497// point into the heap to a location that has a map pointer at its first word. 1498// Caveat: Heap::Contains is an approximation because it can return true for 1499// objects in a heap space but above the allocation pointer. 1500class VerifyPointersVisitor: public ObjectVisitor { 1501 public: 1502 void VisitPointers(Object** start, Object** end) { 1503 for (Object** current = start; current < end; current++) { 1504 if ((*current)->IsHeapObject()) { 1505 HeapObject* object = HeapObject::cast(*current); 1506 ASSERT(Heap::Contains(object)); 1507 ASSERT(object->map()->IsMap()); 1508 } 1509 } 1510 } 1511}; 1512 1513 1514// Visitor class to verify interior pointers in spaces that use region marks 1515// to keep track of intergenerational references. 1516// As VerifyPointersVisitor but also checks that dirty marks are set 1517// for regions covering intergenerational references. 1518class VerifyPointersAndDirtyRegionsVisitor: public ObjectVisitor { 1519 public: 1520 void VisitPointers(Object** start, Object** end) { 1521 for (Object** current = start; current < end; current++) { 1522 if ((*current)->IsHeapObject()) { 1523 HeapObject* object = HeapObject::cast(*current); 1524 ASSERT(Heap::Contains(object)); 1525 ASSERT(object->map()->IsMap()); 1526 if (Heap::InNewSpace(object)) { 1527 ASSERT(Heap::InToSpace(object)); 1528 Address addr = reinterpret_cast<Address>(current); 1529 ASSERT(Page::FromAddress(addr)->IsRegionDirty(addr)); 1530 } 1531 } 1532 } 1533 } 1534}; 1535#endif 1536 1537 1538// Space iterator for iterating over all spaces of the heap. 1539// Returns each space in turn, and null when it is done. 1540class AllSpaces BASE_EMBEDDED { 1541 public: 1542 Space* next(); 1543 AllSpaces() { counter_ = FIRST_SPACE; } 1544 private: 1545 int counter_; 1546}; 1547 1548 1549// Space iterator for iterating over all old spaces of the heap: Old pointer 1550// space, old data space and code space. 1551// Returns each space in turn, and null when it is done. 1552class OldSpaces BASE_EMBEDDED { 1553 public: 1554 OldSpace* next(); 1555 OldSpaces() { counter_ = OLD_POINTER_SPACE; } 1556 private: 1557 int counter_; 1558}; 1559 1560 1561// Space iterator for iterating over all the paged spaces of the heap: 1562// Map space, old pointer space, old data space, code space and cell space. 1563// Returns each space in turn, and null when it is done. 1564class PagedSpaces BASE_EMBEDDED { 1565 public: 1566 PagedSpace* next(); 1567 PagedSpaces() { counter_ = OLD_POINTER_SPACE; } 1568 private: 1569 int counter_; 1570}; 1571 1572 1573// Space iterator for iterating over all spaces of the heap. 1574// For each space an object iterator is provided. The deallocation of the 1575// returned object iterators is handled by the space iterator. 1576class SpaceIterator : public Malloced { 1577 public: 1578 SpaceIterator(); 1579 explicit SpaceIterator(HeapObjectCallback size_func); 1580 virtual ~SpaceIterator(); 1581 1582 bool has_next(); 1583 ObjectIterator* next(); 1584 1585 private: 1586 ObjectIterator* CreateIterator(); 1587 1588 int current_space_; // from enum AllocationSpace. 1589 ObjectIterator* iterator_; // object iterator for the current space. 1590 HeapObjectCallback size_func_; 1591}; 1592 1593 1594// A HeapIterator provides iteration over the whole heap. It 1595// aggregates the specific iterators for the different spaces as 1596// these can only iterate over one space only. 1597// 1598// HeapIterator can skip free list nodes (that is, de-allocated heap 1599// objects that still remain in the heap). As implementation of free 1600// nodes filtering uses GC marks, it can't be used during MS/MC GC 1601// phases. Also, it is forbidden to interrupt iteration in this mode, 1602// as this will leave heap objects marked (and thus, unusable). 1603class HeapObjectsFilter; 1604 1605class HeapIterator BASE_EMBEDDED { 1606 public: 1607 enum HeapObjectsFiltering { 1608 kNoFiltering, 1609 kFilterFreeListNodes, 1610 kFilterUnreachable 1611 }; 1612 1613 HeapIterator(); 1614 explicit HeapIterator(HeapObjectsFiltering filtering); 1615 ~HeapIterator(); 1616 1617 HeapObject* next(); 1618 void reset(); 1619 1620 private: 1621 // Perform the initialization. 1622 void Init(); 1623 // Perform all necessary shutdown (destruction) work. 1624 void Shutdown(); 1625 HeapObject* NextObject(); 1626 1627 HeapObjectsFiltering filtering_; 1628 HeapObjectsFilter* filter_; 1629 // Space iterator for iterating all the spaces. 1630 SpaceIterator* space_iterator_; 1631 // Object iterator for the space currently being iterated. 1632 ObjectIterator* object_iterator_; 1633}; 1634 1635 1636// Cache for mapping (map, property name) into field offset. 1637// Cleared at startup and prior to mark sweep collection. 1638class KeyedLookupCache { 1639 public: 1640 // Lookup field offset for (map, name). If absent, -1 is returned. 1641 static int Lookup(Map* map, String* name); 1642 1643 // Update an element in the cache. 1644 static void Update(Map* map, String* name, int field_offset); 1645 1646 // Clear the cache. 1647 static void Clear(); 1648 1649 static const int kLength = 64; 1650 static const int kCapacityMask = kLength - 1; 1651 static const int kMapHashShift = 2; 1652 1653 private: 1654 static inline int Hash(Map* map, String* name); 1655 1656 // Get the address of the keys and field_offsets arrays. Used in 1657 // generated code to perform cache lookups. 1658 static Address keys_address() { 1659 return reinterpret_cast<Address>(&keys_); 1660 } 1661 1662 static Address field_offsets_address() { 1663 return reinterpret_cast<Address>(&field_offsets_); 1664 } 1665 1666 struct Key { 1667 Map* map; 1668 String* name; 1669 }; 1670 static Key keys_[kLength]; 1671 static int field_offsets_[kLength]; 1672 1673 friend class ExternalReference; 1674}; 1675 1676 1677// Cache for mapping (array, property name) into descriptor index. 1678// The cache contains both positive and negative results. 1679// Descriptor index equals kNotFound means the property is absent. 1680// Cleared at startup and prior to any gc. 1681class DescriptorLookupCache { 1682 public: 1683 // Lookup descriptor index for (map, name). 1684 // If absent, kAbsent is returned. 1685 static int Lookup(DescriptorArray* array, String* name) { 1686 if (!StringShape(name).IsSymbol()) return kAbsent; 1687 int index = Hash(array, name); 1688 Key& key = keys_[index]; 1689 if ((key.array == array) && (key.name == name)) return results_[index]; 1690 return kAbsent; 1691 } 1692 1693 // Update an element in the cache. 1694 static void Update(DescriptorArray* array, String* name, int result) { 1695 ASSERT(result != kAbsent); 1696 if (StringShape(name).IsSymbol()) { 1697 int index = Hash(array, name); 1698 Key& key = keys_[index]; 1699 key.array = array; 1700 key.name = name; 1701 results_[index] = result; 1702 } 1703 } 1704 1705 // Clear the cache. 1706 static void Clear(); 1707 1708 static const int kAbsent = -2; 1709 private: 1710 static int Hash(DescriptorArray* array, String* name) { 1711 // Uses only lower 32 bits if pointers are larger. 1712 uint32_t array_hash = 1713 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(array)) >> 2; 1714 uint32_t name_hash = 1715 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >> 2; 1716 return (array_hash ^ name_hash) % kLength; 1717 } 1718 1719 static const int kLength = 64; 1720 struct Key { 1721 DescriptorArray* array; 1722 String* name; 1723 }; 1724 1725 static Key keys_[kLength]; 1726 static int results_[kLength]; 1727}; 1728 1729 1730// ---------------------------------------------------------------------------- 1731// Marking stack for tracing live objects. 1732 1733class MarkingStack { 1734 public: 1735 void Initialize(Address low, Address high) { 1736 top_ = low_ = reinterpret_cast<HeapObject**>(low); 1737 high_ = reinterpret_cast<HeapObject**>(high); 1738 overflowed_ = false; 1739 } 1740 1741 bool is_full() { return top_ >= high_; } 1742 1743 bool is_empty() { return top_ <= low_; } 1744 1745 bool overflowed() { return overflowed_; } 1746 1747 void clear_overflowed() { overflowed_ = false; } 1748 1749 // Push the (marked) object on the marking stack if there is room, 1750 // otherwise mark the object as overflowed and wait for a rescan of the 1751 // heap. 1752 void Push(HeapObject* object) { 1753 CHECK(object->IsHeapObject()); 1754 if (is_full()) { 1755 object->SetOverflow(); 1756 overflowed_ = true; 1757 } else { 1758 *(top_++) = object; 1759 } 1760 } 1761 1762 HeapObject* Pop() { 1763 ASSERT(!is_empty()); 1764 HeapObject* object = *(--top_); 1765 CHECK(object->IsHeapObject()); 1766 return object; 1767 } 1768 1769 private: 1770 HeapObject** low_; 1771 HeapObject** top_; 1772 HeapObject** high_; 1773 bool overflowed_; 1774}; 1775 1776 1777// A helper class to document/test C++ scopes where we do not 1778// expect a GC. Usage: 1779// 1780// /* Allocation not allowed: we cannot handle a GC in this scope. */ 1781// { AssertNoAllocation nogc; 1782// ... 1783// } 1784 1785#ifdef DEBUG 1786 1787class DisallowAllocationFailure { 1788 public: 1789 DisallowAllocationFailure() { 1790 old_state_ = Heap::disallow_allocation_failure_; 1791 Heap::disallow_allocation_failure_ = true; 1792 } 1793 ~DisallowAllocationFailure() { 1794 Heap::disallow_allocation_failure_ = old_state_; 1795 } 1796 private: 1797 bool old_state_; 1798}; 1799 1800class AssertNoAllocation { 1801 public: 1802 AssertNoAllocation() { 1803 old_state_ = Heap::allow_allocation(false); 1804 } 1805 1806 ~AssertNoAllocation() { 1807 Heap::allow_allocation(old_state_); 1808 } 1809 1810 private: 1811 bool old_state_; 1812}; 1813 1814class DisableAssertNoAllocation { 1815 public: 1816 DisableAssertNoAllocation() { 1817 old_state_ = Heap::allow_allocation(true); 1818 } 1819 1820 ~DisableAssertNoAllocation() { 1821 Heap::allow_allocation(old_state_); 1822 } 1823 1824 private: 1825 bool old_state_; 1826}; 1827 1828#else // ndef DEBUG 1829 1830class AssertNoAllocation { 1831 public: 1832 AssertNoAllocation() { } 1833 ~AssertNoAllocation() { } 1834}; 1835 1836class DisableAssertNoAllocation { 1837 public: 1838 DisableAssertNoAllocation() { } 1839 ~DisableAssertNoAllocation() { } 1840}; 1841 1842#endif 1843 1844// GCTracer collects and prints ONE line after each garbage collector 1845// invocation IFF --trace_gc is used. 1846 1847class GCTracer BASE_EMBEDDED { 1848 public: 1849 class Scope BASE_EMBEDDED { 1850 public: 1851 enum ScopeId { 1852 EXTERNAL, 1853 MC_MARK, 1854 MC_SWEEP, 1855 MC_SWEEP_NEWSPACE, 1856 MC_COMPACT, 1857 MC_FLUSH_CODE, 1858 kNumberOfScopes 1859 }; 1860 1861 Scope(GCTracer* tracer, ScopeId scope) 1862 : tracer_(tracer), 1863 scope_(scope) { 1864 start_time_ = OS::TimeCurrentMillis(); 1865 } 1866 1867 ~Scope() { 1868 ASSERT((0 <= scope_) && (scope_ < kNumberOfScopes)); 1869 tracer_->scopes_[scope_] += OS::TimeCurrentMillis() - start_time_; 1870 } 1871 1872 private: 1873 GCTracer* tracer_; 1874 ScopeId scope_; 1875 double start_time_; 1876 }; 1877 1878 GCTracer(); 1879 ~GCTracer(); 1880 1881 // Sets the collector. 1882 void set_collector(GarbageCollector collector) { collector_ = collector; } 1883 1884 // Sets the GC count. 1885 void set_gc_count(int count) { gc_count_ = count; } 1886 1887 // Sets the full GC count. 1888 void set_full_gc_count(int count) { full_gc_count_ = count; } 1889 1890 // Sets the flag that this is a compacting full GC. 1891 void set_is_compacting() { is_compacting_ = true; } 1892 bool is_compacting() const { return is_compacting_; } 1893 1894 // Increment and decrement the count of marked objects. 1895 void increment_marked_count() { ++marked_count_; } 1896 void decrement_marked_count() { --marked_count_; } 1897 1898 int marked_count() { return marked_count_; } 1899 1900 void increment_promoted_objects_size(int object_size) { 1901 promoted_objects_size_ += object_size; 1902 } 1903 1904 // Returns maximum GC pause. 1905 static int get_max_gc_pause() { return max_gc_pause_; } 1906 1907 // Returns maximum size of objects alive after GC. 1908 static intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; } 1909 1910 // Returns minimal interval between two subsequent collections. 1911 static int get_min_in_mutator() { return min_in_mutator_; } 1912 1913 private: 1914 // Returns a string matching the collector. 1915 const char* CollectorString(); 1916 1917 // Returns size of object in heap (in MB). 1918 double SizeOfHeapObjects() { 1919 return (static_cast<double>(Heap::SizeOfObjects())) / MB; 1920 } 1921 1922 double start_time_; // Timestamp set in the constructor. 1923 intptr_t start_size_; // Size of objects in heap set in constructor. 1924 GarbageCollector collector_; // Type of collector. 1925 1926 // A count (including this one, eg, the first collection is 1) of the 1927 // number of garbage collections. 1928 int gc_count_; 1929 1930 // A count (including this one) of the number of full garbage collections. 1931 int full_gc_count_; 1932 1933 // True if the current GC is a compacting full collection, false 1934 // otherwise. 1935 bool is_compacting_; 1936 1937 // True if the *previous* full GC cwas a compacting collection (will be 1938 // false if there has not been a previous full GC). 1939 bool previous_has_compacted_; 1940 1941 // On a full GC, a count of the number of marked objects. Incremented 1942 // when an object is marked and decremented when an object's mark bit is 1943 // cleared. Will be zero on a scavenge collection. 1944 int marked_count_; 1945 1946 // The count from the end of the previous full GC. Will be zero if there 1947 // was no previous full GC. 1948 int previous_marked_count_; 1949 1950 // Amounts of time spent in different scopes during GC. 1951 double scopes_[Scope::kNumberOfScopes]; 1952 1953 // Total amount of space either wasted or contained in one of free lists 1954 // before the current GC. 1955 intptr_t in_free_list_or_wasted_before_gc_; 1956 1957 // Difference between space used in the heap at the beginning of the current 1958 // collection and the end of the previous collection. 1959 intptr_t allocated_since_last_gc_; 1960 1961 // Amount of time spent in mutator that is time elapsed between end of the 1962 // previous collection and the beginning of the current one. 1963 double spent_in_mutator_; 1964 1965 // Size of objects promoted during the current collection. 1966 intptr_t promoted_objects_size_; 1967 1968 // Maximum GC pause. 1969 static int max_gc_pause_; 1970 1971 // Maximum size of objects alive after GC. 1972 static intptr_t max_alive_after_gc_; 1973 1974 // Minimal interval between two subsequent collections. 1975 static int min_in_mutator_; 1976 1977 // Size of objects alive after last GC. 1978 static intptr_t alive_after_last_gc_; 1979 1980 static double last_gc_end_timestamp_; 1981}; 1982 1983 1984class TranscendentalCache { 1985 public: 1986 enum Type {ACOS, ASIN, ATAN, COS, EXP, LOG, SIN, TAN, kNumberOfCaches}; 1987 static const int kTranscendentalTypeBits = 3; 1988 STATIC_ASSERT((1 << kTranscendentalTypeBits) >= kNumberOfCaches); 1989 1990 explicit TranscendentalCache(Type t); 1991 1992 // Returns a heap number with f(input), where f is a math function specified 1993 // by the 'type' argument. 1994 MUST_USE_RESULT static inline MaybeObject* Get(Type type, double input) { 1995 TranscendentalCache* cache = caches_[type]; 1996 if (cache == NULL) { 1997 caches_[type] = cache = new TranscendentalCache(type); 1998 } 1999 return cache->Get(input); 2000 } 2001 2002 // The cache contains raw Object pointers. This method disposes of 2003 // them before a garbage collection. 2004 static void Clear(); 2005 2006 private: 2007 MUST_USE_RESULT inline MaybeObject* Get(double input) { 2008 Converter c; 2009 c.dbl = input; 2010 int hash = Hash(c); 2011 Element e = elements_[hash]; 2012 if (e.in[0] == c.integers[0] && 2013 e.in[1] == c.integers[1]) { 2014 ASSERT(e.output != NULL); 2015 Counters::transcendental_cache_hit.Increment(); 2016 return e.output; 2017 } 2018 double answer = Calculate(input); 2019 Counters::transcendental_cache_miss.Increment(); 2020 Object* heap_number; 2021 { MaybeObject* maybe_heap_number = Heap::AllocateHeapNumber(answer); 2022 if (!maybe_heap_number->ToObject(&heap_number)) return maybe_heap_number; 2023 } 2024 elements_[hash].in[0] = c.integers[0]; 2025 elements_[hash].in[1] = c.integers[1]; 2026 elements_[hash].output = heap_number; 2027 return heap_number; 2028 } 2029 2030 inline double Calculate(double input) { 2031 switch (type_) { 2032 case ACOS: 2033 return acos(input); 2034 case ASIN: 2035 return asin(input); 2036 case ATAN: 2037 return atan(input); 2038 case COS: 2039 return cos(input); 2040 case EXP: 2041 return exp(input); 2042 case LOG: 2043 return log(input); 2044 case SIN: 2045 return sin(input); 2046 case TAN: 2047 return tan(input); 2048 default: 2049 return 0.0; // Never happens. 2050 } 2051 } 2052 static const int kCacheSize = 512; 2053 struct Element { 2054 uint32_t in[2]; 2055 Object* output; 2056 }; 2057 union Converter { 2058 double dbl; 2059 uint32_t integers[2]; 2060 }; 2061 inline static int Hash(const Converter& c) { 2062 uint32_t hash = (c.integers[0] ^ c.integers[1]); 2063 hash ^= static_cast<int32_t>(hash) >> 16; 2064 hash ^= static_cast<int32_t>(hash) >> 8; 2065 return (hash & (kCacheSize - 1)); 2066 } 2067 2068 static Address cache_array_address() { 2069 // Used to create an external reference. 2070 return reinterpret_cast<Address>(caches_); 2071 } 2072 2073 // Allow access to the caches_ array as an ExternalReference. 2074 friend class ExternalReference; 2075 // Inline implementation of the cache. 2076 friend class TranscendentalCacheStub; 2077 2078 static TranscendentalCache* caches_[kNumberOfCaches]; 2079 Element elements_[kCacheSize]; 2080 Type type_; 2081}; 2082 2083 2084// External strings table is a place where all external strings are 2085// registered. We need to keep track of such strings to properly 2086// finalize them. 2087class ExternalStringTable : public AllStatic { 2088 public: 2089 // Registers an external string. 2090 inline static void AddString(String* string); 2091 2092 inline static void Iterate(ObjectVisitor* v); 2093 2094 // Restores internal invariant and gets rid of collected strings. 2095 // Must be called after each Iterate() that modified the strings. 2096 static void CleanUp(); 2097 2098 // Destroys all allocated memory. 2099 static void TearDown(); 2100 2101 private: 2102 friend class Heap; 2103 2104 inline static void Verify(); 2105 2106 inline static void AddOldString(String* string); 2107 2108 // Notifies the table that only a prefix of the new list is valid. 2109 inline static void ShrinkNewStrings(int position); 2110 2111 // To speed up scavenge collections new space string are kept 2112 // separate from old space strings. 2113 static List<Object*> new_space_strings_; 2114 static List<Object*> old_space_strings_; 2115}; 2116 2117 2118// Abstract base class for checking whether a weak object should be retained. 2119class WeakObjectRetainer { 2120 public: 2121 virtual ~WeakObjectRetainer() {} 2122 2123 // Return whether this object should be retained. If NULL is returned the 2124 // object has no references. Otherwise the address of the retained object 2125 // should be returned as in some GC situations the object has been moved. 2126 virtual Object* RetainAs(Object* object) = 0; 2127}; 2128 2129 2130} } // namespace v8::internal 2131 2132#endif // V8_HEAP_H_ 2133