v8globals.h revision 3fb3ca8c7ca439d408449a395897395c0faae8d1
1// Copyright 2011 the V8 project authors. All rights reserved. 2// Redistribution and use in source and binary forms, with or without 3// modification, are permitted provided that the following conditions are 4// met: 5// 6// * Redistributions of source code must retain the above copyright 7// notice, this list of conditions and the following disclaimer. 8// * Redistributions in binary form must reproduce the above 9// copyright notice, this list of conditions and the following 10// disclaimer in the documentation and/or other materials provided 11// with the distribution. 12// * Neither the name of Google Inc. nor the names of its 13// contributors may be used to endorse or promote products derived 14// from this software without specific prior written permission. 15// 16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28#ifndef V8_V8GLOBALS_H_ 29#define V8_V8GLOBALS_H_ 30 31#include "globals.h" 32 33namespace v8 { 34namespace internal { 35 36// This file contains constants and global declarations related to the 37// V8 system. 38 39// Mask for the sign bit in a smi. 40const intptr_t kSmiSignMask = kIntptrSignBit; 41 42const int kObjectAlignmentBits = kPointerSizeLog2; 43const intptr_t kObjectAlignment = 1 << kObjectAlignmentBits; 44const intptr_t kObjectAlignmentMask = kObjectAlignment - 1; 45 46// Desired alignment for pointers. 47const intptr_t kPointerAlignment = (1 << kPointerSizeLog2); 48const intptr_t kPointerAlignmentMask = kPointerAlignment - 1; 49 50// Desired alignment for maps. 51#if V8_HOST_ARCH_64_BIT 52const intptr_t kMapAlignmentBits = kObjectAlignmentBits; 53#else 54const intptr_t kMapAlignmentBits = kObjectAlignmentBits + 3; 55#endif 56const intptr_t kMapAlignment = (1 << kMapAlignmentBits); 57const intptr_t kMapAlignmentMask = kMapAlignment - 1; 58 59// Desired alignment for generated code is 32 bytes (to improve cache line 60// utilization). 61const int kCodeAlignmentBits = 5; 62const intptr_t kCodeAlignment = 1 << kCodeAlignmentBits; 63const intptr_t kCodeAlignmentMask = kCodeAlignment - 1; 64 65// Tag information for Failure. 66const int kFailureTag = 3; 67const int kFailureTagSize = 2; 68const intptr_t kFailureTagMask = (1 << kFailureTagSize) - 1; 69 70 71// Zap-value: The value used for zapping dead objects. 72// Should be a recognizable hex value tagged as a failure. 73#ifdef V8_HOST_ARCH_64_BIT 74const Address kZapValue = 75 reinterpret_cast<Address>(V8_UINT64_C(0xdeadbeedbeadbeef)); 76const Address kHandleZapValue = 77 reinterpret_cast<Address>(V8_UINT64_C(0x1baddead0baddeaf)); 78const Address kFromSpaceZapValue = 79 reinterpret_cast<Address>(V8_UINT64_C(0x1beefdad0beefdaf)); 80const uint64_t kDebugZapValue = V8_UINT64_C(0xbadbaddbbadbaddb); 81const uint64_t kSlotsZapValue = V8_UINT64_C(0xbeefdeadbeefdeef); 82#else 83const Address kZapValue = reinterpret_cast<Address>(0xdeadbeef); 84const Address kHandleZapValue = reinterpret_cast<Address>(0xbaddeaf); 85const Address kFromSpaceZapValue = reinterpret_cast<Address>(0xbeefdaf); 86const uint32_t kSlotsZapValue = 0xbeefdeef; 87const uint32_t kDebugZapValue = 0xbadbaddb; 88#endif 89 90 91// Number of bits to represent the page size for paged spaces. The value of 13 92// gives 8K bytes per page. 93const int kPageSizeBits = 13; 94 95// On Intel architecture, cache line size is 64 bytes. 96// On ARM it may be less (32 bytes), but as far this constant is 97// used for aligning data, it doesn't hurt to align on a greater value. 98const int kProcessorCacheLineSize = 64; 99 100// Constants relevant to double precision floating point numbers. 101// If looking only at the top 32 bits, the QNaN mask is bits 19 to 30. 102const uint32_t kQuietNaNHighBitsMask = 0xfff << (51 - 32); 103 104 105// ----------------------------------------------------------------------------- 106// Forward declarations for frequently used classes 107// (sorted alphabetically) 108 109class AccessorInfo; 110class Allocation; 111class Arguments; 112class Assembler; 113class AssertNoAllocation; 114class BreakableStatement; 115class Code; 116class CodeGenerator; 117class CodeStub; 118class Context; 119class Debug; 120class Debugger; 121class DebugInfo; 122class Descriptor; 123class DescriptorArray; 124class Expression; 125class ExternalReference; 126class FixedArray; 127class FunctionEntry; 128class FunctionLiteral; 129class FunctionTemplateInfo; 130class NumberDictionary; 131class StringDictionary; 132template <typename T> class Handle; 133class Heap; 134class HeapObject; 135class IC; 136class InterceptorInfo; 137class IterationStatement; 138class JSArray; 139class JSFunction; 140class JSObject; 141class LargeObjectSpace; 142class LookupResult; 143class MacroAssembler; 144class Map; 145class MapSpace; 146class MarkCompactCollector; 147class NewSpace; 148class NodeVisitor; 149class Object; 150class MaybeObject; 151class OldSpace; 152class Property; 153class Foreign; 154class RegExpNode; 155struct RegExpCompileData; 156class RegExpTree; 157class RegExpCompiler; 158class RegExpVisitor; 159class Scope; 160template<class Allocator = FreeStoreAllocationPolicy> class ScopeInfo; 161class SerializedScopeInfo; 162class Script; 163class Slot; 164class Smi; 165template <typename Config, class Allocator = FreeStoreAllocationPolicy> 166 class SplayTree; 167class Statement; 168class String; 169class Struct; 170class SwitchStatement; 171class AstVisitor; 172class Variable; 173class VariableProxy; 174class RelocInfo; 175class Deserializer; 176class MessageLocation; 177class ObjectGroup; 178class TickSample; 179class VirtualMemory; 180class Mutex; 181 182typedef bool (*WeakSlotCallback)(Object** pointer); 183 184typedef bool (*WeakSlotCallbackWithHeap)(Heap* heap, Object** pointer); 185 186// ----------------------------------------------------------------------------- 187// Miscellaneous 188 189// NOTE: SpaceIterator depends on AllocationSpace enumeration values being 190// consecutive. 191enum AllocationSpace { 192 NEW_SPACE, // Semispaces collected with copying collector. 193 OLD_POINTER_SPACE, // May contain pointers to new space. 194 OLD_DATA_SPACE, // Must not have pointers to new space. 195 CODE_SPACE, // No pointers to new space, marked executable. 196 MAP_SPACE, // Only and all map objects. 197 CELL_SPACE, // Only and all cell objects. 198 LO_SPACE, // Promoted large objects. 199 200 FIRST_SPACE = NEW_SPACE, 201 LAST_SPACE = LO_SPACE, 202 FIRST_PAGED_SPACE = OLD_POINTER_SPACE, 203 LAST_PAGED_SPACE = CELL_SPACE 204}; 205const int kSpaceTagSize = 3; 206const int kSpaceTagMask = (1 << kSpaceTagSize) - 1; 207 208 209// A flag that indicates whether objects should be pretenured when 210// allocated (allocated directly into the old generation) or not 211// (allocated in the young generation if the object size and type 212// allows). 213enum PretenureFlag { NOT_TENURED, TENURED }; 214 215enum GarbageCollector { SCAVENGER, MARK_COMPACTOR }; 216 217enum Executability { NOT_EXECUTABLE, EXECUTABLE }; 218 219enum VisitMode { 220 VISIT_ALL, 221 VISIT_ALL_IN_SCAVENGE, 222 VISIT_ALL_IN_SWEEP_NEWSPACE, 223 VISIT_ONLY_STRONG 224}; 225 226// Flag indicating whether code is built into the VM (one of the natives files). 227enum NativesFlag { NOT_NATIVES_CODE, NATIVES_CODE }; 228 229 230// A CodeDesc describes a buffer holding instructions and relocation 231// information. The instructions start at the beginning of the buffer 232// and grow forward, the relocation information starts at the end of 233// the buffer and grows backward. 234// 235// |<--------------- buffer_size ---------------->| 236// |<-- instr_size -->| |<-- reloc_size -->| 237// +==================+========+==================+ 238// | instructions | free | reloc info | 239// +==================+========+==================+ 240// ^ 241// | 242// buffer 243 244struct CodeDesc { 245 byte* buffer; 246 int buffer_size; 247 int instr_size; 248 int reloc_size; 249 Assembler* origin; 250}; 251 252 253// Callback function on object slots, used for iterating heap object slots in 254// HeapObjects, global pointers to heap objects, etc. The callback allows the 255// callback function to change the value of the slot. 256typedef void (*ObjectSlotCallback)(HeapObject** pointer); 257 258 259// Callback function used for iterating objects in heap spaces, 260// for example, scanning heap objects. 261typedef int (*HeapObjectCallback)(HeapObject* obj); 262 263 264// Callback function used for checking constraints when copying/relocating 265// objects. Returns true if an object can be copied/relocated from its 266// old_addr to a new_addr. 267typedef bool (*ConstraintCallback)(Address new_addr, Address old_addr); 268 269 270// Callback function on inline caches, used for iterating over inline caches 271// in compiled code. 272typedef void (*InlineCacheCallback)(Code* code, Address ic); 273 274 275// State for inline cache call sites. Aliased as IC::State. 276enum InlineCacheState { 277 // Has never been executed. 278 UNINITIALIZED, 279 // Has been executed but monomorhic state has been delayed. 280 PREMONOMORPHIC, 281 // Has been executed and only one receiver type has been seen. 282 MONOMORPHIC, 283 // Like MONOMORPHIC but check failed due to prototype. 284 MONOMORPHIC_PROTOTYPE_FAILURE, 285 // Multiple receiver types have been seen. 286 MEGAMORPHIC, 287 // Special states for debug break or step in prepare stubs. 288 DEBUG_BREAK, 289 DEBUG_PREPARE_STEP_IN 290}; 291 292 293enum CheckType { 294 RECEIVER_MAP_CHECK, 295 STRING_CHECK, 296 NUMBER_CHECK, 297 BOOLEAN_CHECK 298}; 299 300 301enum InLoopFlag { 302 NOT_IN_LOOP, 303 IN_LOOP 304}; 305 306 307enum CallFunctionFlags { 308 NO_CALL_FUNCTION_FLAGS = 0, 309 // Receiver might implicitly be the global objects. If it is, the 310 // hole is passed to the call function stub. 311 RECEIVER_MIGHT_BE_IMPLICIT = 1 << 0 312}; 313 314 315enum InlineCacheHolderFlag { 316 OWN_MAP, // For fast properties objects. 317 PROTOTYPE_MAP // For slow properties objects (except GlobalObjects). 318}; 319 320 321// Type of properties. 322// Order of properties is significant. 323// Must fit in the BitField PropertyDetails::TypeField. 324// A copy of this is in mirror-debugger.js. 325enum PropertyType { 326 NORMAL = 0, // only in slow mode 327 FIELD = 1, // only in fast mode 328 CONSTANT_FUNCTION = 2, // only in fast mode 329 CALLBACKS = 3, 330 HANDLER = 4, // only in lookup results, not in descriptors 331 INTERCEPTOR = 5, // only in lookup results, not in descriptors 332 MAP_TRANSITION = 6, // only in fast mode 333 EXTERNAL_ARRAY_TRANSITION = 7, 334 CONSTANT_TRANSITION = 8, // only in fast mode 335 NULL_DESCRIPTOR = 9, // only in fast mode 336 // All properties before MAP_TRANSITION are real. 337 FIRST_PHANTOM_PROPERTY_TYPE = MAP_TRANSITION, 338 // There are no IC stubs for NULL_DESCRIPTORS. Therefore, 339 // NULL_DESCRIPTOR can be used as the type flag for IC stubs for 340 // nonexistent properties. 341 NONEXISTENT = NULL_DESCRIPTOR 342}; 343 344 345// Whether to remove map transitions and constant transitions from a 346// DescriptorArray. 347enum TransitionFlag { 348 REMOVE_TRANSITIONS, 349 KEEP_TRANSITIONS 350}; 351 352 353// Union used for fast testing of specific double values. 354union DoubleRepresentation { 355 double value; 356 int64_t bits; 357 DoubleRepresentation(double x) { value = x; } 358}; 359 360 361// Union used for customized checking of the IEEE double types 362// inlined within v8 runtime, rather than going to the underlying 363// platform headers and libraries 364union IeeeDoubleLittleEndianArchType { 365 double d; 366 struct { 367 unsigned int man_low :32; 368 unsigned int man_high :20; 369 unsigned int exp :11; 370 unsigned int sign :1; 371 } bits; 372}; 373 374 375union IeeeDoubleBigEndianArchType { 376 double d; 377 struct { 378 unsigned int sign :1; 379 unsigned int exp :11; 380 unsigned int man_high :20; 381 unsigned int man_low :32; 382 } bits; 383}; 384 385 386// AccessorCallback 387struct AccessorDescriptor { 388 MaybeObject* (*getter)(Object* object, void* data); 389 MaybeObject* (*setter)(JSObject* object, Object* value, void* data); 390 void* data; 391}; 392 393 394// Logging and profiling. A StateTag represents a possible state of 395// the VM. The logger maintains a stack of these. Creating a VMState 396// object enters a state by pushing on the stack, and destroying a 397// VMState object leaves a state by popping the current state from the 398// stack. 399 400#define STATE_TAG_LIST(V) \ 401 V(JS) \ 402 V(GC) \ 403 V(COMPILER) \ 404 V(OTHER) \ 405 V(EXTERNAL) 406 407enum StateTag { 408#define DEF_STATE_TAG(name) name, 409 STATE_TAG_LIST(DEF_STATE_TAG) 410#undef DEF_STATE_TAG 411 // Pseudo-types. 412 state_tag_count 413}; 414 415 416// ----------------------------------------------------------------------------- 417// Macros 418 419// Testers for test. 420 421#define HAS_SMI_TAG(value) \ 422 ((reinterpret_cast<intptr_t>(value) & kSmiTagMask) == kSmiTag) 423 424#define HAS_FAILURE_TAG(value) \ 425 ((reinterpret_cast<intptr_t>(value) & kFailureTagMask) == kFailureTag) 426 427// OBJECT_POINTER_ALIGN returns the value aligned as a HeapObject pointer 428#define OBJECT_POINTER_ALIGN(value) \ 429 (((value) + kObjectAlignmentMask) & ~kObjectAlignmentMask) 430 431// POINTER_SIZE_ALIGN returns the value aligned as a pointer. 432#define POINTER_SIZE_ALIGN(value) \ 433 (((value) + kPointerAlignmentMask) & ~kPointerAlignmentMask) 434 435// MAP_POINTER_ALIGN returns the value aligned as a map pointer. 436#define MAP_POINTER_ALIGN(value) \ 437 (((value) + kMapAlignmentMask) & ~kMapAlignmentMask) 438 439// CODE_POINTER_ALIGN returns the value aligned as a generated code segment. 440#define CODE_POINTER_ALIGN(value) \ 441 (((value) + kCodeAlignmentMask) & ~kCodeAlignmentMask) 442 443// Support for tracking C++ memory allocation. Insert TRACK_MEMORY("Fisk") 444// inside a C++ class and new and delete will be overloaded so logging is 445// performed. 446// This file (globals.h) is included before log.h, so we use direct calls to 447// the Logger rather than the LOG macro. 448#ifdef DEBUG 449#define TRACK_MEMORY(name) \ 450 void* operator new(size_t size) { \ 451 void* result = ::operator new(size); \ 452 Logger::NewEventStatic(name, result, size); \ 453 return result; \ 454 } \ 455 void operator delete(void* object) { \ 456 Logger::DeleteEventStatic(name, object); \ 457 ::operator delete(object); \ 458 } 459#else 460#define TRACK_MEMORY(name) 461#endif 462 463 464// Feature flags bit positions. They are mostly based on the CPUID spec. 465// (We assign CPUID itself to one of the currently reserved bits -- 466// feel free to change this if needed.) 467// On X86/X64, values below 32 are bits in EDX, values above 32 are bits in ECX. 468enum CpuFeature { SSE4_1 = 32 + 19, // x86 469 SSE3 = 32 + 0, // x86 470 SSE2 = 26, // x86 471 CMOV = 15, // x86 472 RDTSC = 4, // x86 473 CPUID = 10, // x86 474 VFP3 = 1, // ARM 475 ARMv7 = 2, // ARM 476 SAHF = 0, // x86 477 FPU = 1}; // MIPS 478 479// The Strict Mode (ECMA-262 5th edition, 4.2.2). 480enum StrictModeFlag { 481 kNonStrictMode, 482 kStrictMode, 483 // This value is never used, but is needed to prevent GCC 4.5 from failing 484 // to compile when we assert that a flag is either kNonStrictMode or 485 // kStrictMode. 486 kInvalidStrictFlag 487}; 488 489 490// Used to specify if a macro instruction must perform a smi check on tagged 491// values. 492enum SmiCheckType { 493 DONT_DO_SMI_CHECK = 0, 494 DO_SMI_CHECK 495}; 496 497 498// Used to specify whether a receiver is implicitly or explicitly 499// provided to a call. 500enum CallKind { 501 CALL_AS_METHOD = 0, 502 CALL_AS_FUNCTION 503}; 504 505 506static const uint32_t kHoleNanUpper32 = 0x7FFFFFFF; 507static const uint32_t kHoleNanLower32 = 0xFFFFFFFF; 508static const uint32_t kNaNOrInfinityLowerBoundUpper32 = 0x7FF00000; 509 510const uint64_t kHoleNanInt64 = 511 (static_cast<uint64_t>(kHoleNanUpper32) << 32) | kHoleNanLower32; 512const uint64_t kLastNonNaNInt64 = 513 (static_cast<uint64_t>(kNaNOrInfinityLowerBoundUpper32) << 32); 514 515} } // namespace v8::internal 516 517#endif // V8_V8GLOBALS_H_ 518