1// Copyright 2012 the V8 project authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4// 5// Review notes: 6// 7// - The use of macros in these inline functions may seem superfluous 8// but it is absolutely needed to make sure gcc generates optimal 9// code. gcc is not happy when attempting to inline too deep. 10// 11 12#ifndef V8_OBJECTS_INL_H_ 13#define V8_OBJECTS_INL_H_ 14 15#include "src/base/atomicops.h" 16#include "src/base/bits.h" 17#include "src/contexts.h" 18#include "src/conversions-inl.h" 19#include "src/elements.h" 20#include "src/factory.h" 21#include "src/field-index-inl.h" 22#include "src/heap/heap-inl.h" 23#include "src/heap/heap.h" 24#include "src/heap/incremental-marking.h" 25#include "src/heap/objects-visiting.h" 26#include "src/heap/spaces.h" 27#include "src/heap/store-buffer.h" 28#include "src/isolate.h" 29#include "src/lookup.h" 30#include "src/objects.h" 31#include "src/property.h" 32#include "src/prototype.h" 33#include "src/transitions-inl.h" 34#include "src/type-feedback-vector-inl.h" 35#include "src/v8memory.h" 36 37namespace v8 { 38namespace internal { 39 40PropertyDetails::PropertyDetails(Smi* smi) { 41 value_ = smi->value(); 42} 43 44 45Smi* PropertyDetails::AsSmi() const { 46 // Ensure the upper 2 bits have the same value by sign extending it. This is 47 // necessary to be able to use the 31st bit of the property details. 48 int value = value_ << 1; 49 return Smi::FromInt(value >> 1); 50} 51 52 53PropertyDetails PropertyDetails::AsDeleted() const { 54 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1)); 55 return PropertyDetails(smi); 56} 57 58 59#define TYPE_CHECKER(type, instancetype) \ 60 bool Object::Is##type() const { \ 61 return Object::IsHeapObject() && \ 62 HeapObject::cast(this)->map()->instance_type() == instancetype; \ 63 } 64 65 66#define CAST_ACCESSOR(type) \ 67 type* type::cast(Object* object) { \ 68 SLOW_DCHECK(object->Is##type()); \ 69 return reinterpret_cast<type*>(object); \ 70 } \ 71 const type* type::cast(const Object* object) { \ 72 SLOW_DCHECK(object->Is##type()); \ 73 return reinterpret_cast<const type*>(object); \ 74 } 75 76 77#define INT_ACCESSORS(holder, name, offset) \ 78 int holder::name() const { return READ_INT_FIELD(this, offset); } \ 79 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); } 80 81 82#define ACCESSORS(holder, name, type, offset) \ 83 type* holder::name() const { return type::cast(READ_FIELD(this, offset)); } \ 84 void holder::set_##name(type* value, WriteBarrierMode mode) { \ 85 WRITE_FIELD(this, offset, value); \ 86 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \ 87 } 88 89 90// Getter that returns a tagged Smi and setter that writes a tagged Smi. 91#define ACCESSORS_TO_SMI(holder, name, offset) \ 92 Smi* holder::name() const { return Smi::cast(READ_FIELD(this, offset)); } \ 93 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \ 94 WRITE_FIELD(this, offset, value); \ 95 } 96 97 98// Getter that returns a Smi as an int and writes an int as a Smi. 99#define SMI_ACCESSORS(holder, name, offset) \ 100 int holder::name() const { \ 101 Object* value = READ_FIELD(this, offset); \ 102 return Smi::cast(value)->value(); \ 103 } \ 104 void holder::set_##name(int value) { \ 105 WRITE_FIELD(this, offset, Smi::FromInt(value)); \ 106 } 107 108#define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \ 109 int holder::synchronized_##name() const { \ 110 Object* value = ACQUIRE_READ_FIELD(this, offset); \ 111 return Smi::cast(value)->value(); \ 112 } \ 113 void holder::synchronized_set_##name(int value) { \ 114 RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \ 115 } 116 117#define NOBARRIER_SMI_ACCESSORS(holder, name, offset) \ 118 int holder::nobarrier_##name() const { \ 119 Object* value = NOBARRIER_READ_FIELD(this, offset); \ 120 return Smi::cast(value)->value(); \ 121 } \ 122 void holder::nobarrier_set_##name(int value) { \ 123 NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value)); \ 124 } 125 126#define BOOL_GETTER(holder, field, name, offset) \ 127 bool holder::name() const { \ 128 return BooleanBit::get(field(), offset); \ 129 } \ 130 131 132#define BOOL_ACCESSORS(holder, field, name, offset) \ 133 bool holder::name() const { \ 134 return BooleanBit::get(field(), offset); \ 135 } \ 136 void holder::set_##name(bool value) { \ 137 set_##field(BooleanBit::set(field(), offset, value)); \ 138 } 139 140 141bool Object::IsFixedArrayBase() const { 142 return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray() || 143 IsFixedTypedArrayBase() || IsExternalArray(); 144} 145 146 147// External objects are not extensible, so the map check is enough. 148bool Object::IsExternal() const { 149 return Object::IsHeapObject() && 150 HeapObject::cast(this)->map() == 151 HeapObject::cast(this)->GetHeap()->external_map(); 152} 153 154 155bool Object::IsAccessorInfo() const { 156 return IsExecutableAccessorInfo() || IsDeclaredAccessorInfo(); 157} 158 159 160bool Object::IsSmi() const { 161 return HAS_SMI_TAG(this); 162} 163 164 165bool Object::IsHeapObject() const { 166 return Internals::HasHeapObjectTag(this); 167} 168 169 170TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE) 171TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE) 172TYPE_CHECKER(Symbol, SYMBOL_TYPE) 173 174 175bool Object::IsString() const { 176 return Object::IsHeapObject() 177 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE; 178} 179 180 181bool Object::IsName() const { 182 return IsString() || IsSymbol(); 183} 184 185 186bool Object::IsUniqueName() const { 187 return IsInternalizedString() || IsSymbol(); 188} 189 190 191bool Object::IsSpecObject() const { 192 return Object::IsHeapObject() 193 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE; 194} 195 196 197bool Object::IsSpecFunction() const { 198 if (!Object::IsHeapObject()) return false; 199 InstanceType type = HeapObject::cast(this)->map()->instance_type(); 200 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE; 201} 202 203 204bool Object::IsTemplateInfo() const { 205 return IsObjectTemplateInfo() || IsFunctionTemplateInfo(); 206} 207 208 209bool Object::IsInternalizedString() const { 210 if (!this->IsHeapObject()) return false; 211 uint32_t type = HeapObject::cast(this)->map()->instance_type(); 212 STATIC_ASSERT(kNotInternalizedTag != 0); 213 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) == 214 (kStringTag | kInternalizedTag); 215} 216 217 218bool Object::IsConsString() const { 219 if (!IsString()) return false; 220 return StringShape(String::cast(this)).IsCons(); 221} 222 223 224bool Object::IsSlicedString() const { 225 if (!IsString()) return false; 226 return StringShape(String::cast(this)).IsSliced(); 227} 228 229 230bool Object::IsSeqString() const { 231 if (!IsString()) return false; 232 return StringShape(String::cast(this)).IsSequential(); 233} 234 235 236bool Object::IsSeqOneByteString() const { 237 if (!IsString()) return false; 238 return StringShape(String::cast(this)).IsSequential() && 239 String::cast(this)->IsOneByteRepresentation(); 240} 241 242 243bool Object::IsSeqTwoByteString() const { 244 if (!IsString()) return false; 245 return StringShape(String::cast(this)).IsSequential() && 246 String::cast(this)->IsTwoByteRepresentation(); 247} 248 249 250bool Object::IsExternalString() const { 251 if (!IsString()) return false; 252 return StringShape(String::cast(this)).IsExternal(); 253} 254 255 256bool Object::IsExternalOneByteString() const { 257 if (!IsString()) return false; 258 return StringShape(String::cast(this)).IsExternal() && 259 String::cast(this)->IsOneByteRepresentation(); 260} 261 262 263bool Object::IsExternalTwoByteString() const { 264 if (!IsString()) return false; 265 return StringShape(String::cast(this)).IsExternal() && 266 String::cast(this)->IsTwoByteRepresentation(); 267} 268 269 270bool Object::HasValidElements() { 271 // Dictionary is covered under FixedArray. 272 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray() || 273 IsFixedTypedArrayBase(); 274} 275 276 277Handle<Object> Object::NewStorageFor(Isolate* isolate, 278 Handle<Object> object, 279 Representation representation) { 280 if (representation.IsSmi() && object->IsUninitialized()) { 281 return handle(Smi::FromInt(0), isolate); 282 } 283 if (!representation.IsDouble()) return object; 284 double value; 285 if (object->IsUninitialized()) { 286 value = 0; 287 } else if (object->IsMutableHeapNumber()) { 288 value = HeapNumber::cast(*object)->value(); 289 } else { 290 value = object->Number(); 291 } 292 return isolate->factory()->NewHeapNumber(value, MUTABLE); 293} 294 295 296Handle<Object> Object::WrapForRead(Isolate* isolate, 297 Handle<Object> object, 298 Representation representation) { 299 DCHECK(!object->IsUninitialized()); 300 if (!representation.IsDouble()) { 301 DCHECK(object->FitsRepresentation(representation)); 302 return object; 303 } 304 return isolate->factory()->NewHeapNumber(HeapNumber::cast(*object)->value()); 305} 306 307 308StringShape::StringShape(const String* str) 309 : type_(str->map()->instance_type()) { 310 set_valid(); 311 DCHECK((type_ & kIsNotStringMask) == kStringTag); 312} 313 314 315StringShape::StringShape(Map* map) 316 : type_(map->instance_type()) { 317 set_valid(); 318 DCHECK((type_ & kIsNotStringMask) == kStringTag); 319} 320 321 322StringShape::StringShape(InstanceType t) 323 : type_(static_cast<uint32_t>(t)) { 324 set_valid(); 325 DCHECK((type_ & kIsNotStringMask) == kStringTag); 326} 327 328 329bool StringShape::IsInternalized() { 330 DCHECK(valid()); 331 STATIC_ASSERT(kNotInternalizedTag != 0); 332 return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) == 333 (kStringTag | kInternalizedTag); 334} 335 336 337bool String::IsOneByteRepresentation() const { 338 uint32_t type = map()->instance_type(); 339 return (type & kStringEncodingMask) == kOneByteStringTag; 340} 341 342 343bool String::IsTwoByteRepresentation() const { 344 uint32_t type = map()->instance_type(); 345 return (type & kStringEncodingMask) == kTwoByteStringTag; 346} 347 348 349bool String::IsOneByteRepresentationUnderneath() { 350 uint32_t type = map()->instance_type(); 351 STATIC_ASSERT(kIsIndirectStringTag != 0); 352 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0); 353 DCHECK(IsFlat()); 354 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) { 355 case kOneByteStringTag: 356 return true; 357 case kTwoByteStringTag: 358 return false; 359 default: // Cons or sliced string. Need to go deeper. 360 return GetUnderlying()->IsOneByteRepresentation(); 361 } 362} 363 364 365bool String::IsTwoByteRepresentationUnderneath() { 366 uint32_t type = map()->instance_type(); 367 STATIC_ASSERT(kIsIndirectStringTag != 0); 368 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0); 369 DCHECK(IsFlat()); 370 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) { 371 case kOneByteStringTag: 372 return false; 373 case kTwoByteStringTag: 374 return true; 375 default: // Cons or sliced string. Need to go deeper. 376 return GetUnderlying()->IsTwoByteRepresentation(); 377 } 378} 379 380 381bool String::HasOnlyOneByteChars() { 382 uint32_t type = map()->instance_type(); 383 return (type & kOneByteDataHintMask) == kOneByteDataHintTag || 384 IsOneByteRepresentation(); 385} 386 387 388bool StringShape::IsCons() { 389 return (type_ & kStringRepresentationMask) == kConsStringTag; 390} 391 392 393bool StringShape::IsSliced() { 394 return (type_ & kStringRepresentationMask) == kSlicedStringTag; 395} 396 397 398bool StringShape::IsIndirect() { 399 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag; 400} 401 402 403bool StringShape::IsExternal() { 404 return (type_ & kStringRepresentationMask) == kExternalStringTag; 405} 406 407 408bool StringShape::IsSequential() { 409 return (type_ & kStringRepresentationMask) == kSeqStringTag; 410} 411 412 413StringRepresentationTag StringShape::representation_tag() { 414 uint32_t tag = (type_ & kStringRepresentationMask); 415 return static_cast<StringRepresentationTag>(tag); 416} 417 418 419uint32_t StringShape::encoding_tag() { 420 return type_ & kStringEncodingMask; 421} 422 423 424uint32_t StringShape::full_representation_tag() { 425 return (type_ & (kStringRepresentationMask | kStringEncodingMask)); 426} 427 428 429STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) == 430 Internals::kFullStringRepresentationMask); 431 432STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) == 433 Internals::kStringEncodingMask); 434 435 436bool StringShape::IsSequentialOneByte() { 437 return full_representation_tag() == (kSeqStringTag | kOneByteStringTag); 438} 439 440 441bool StringShape::IsSequentialTwoByte() { 442 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag); 443} 444 445 446bool StringShape::IsExternalOneByte() { 447 return full_representation_tag() == (kExternalStringTag | kOneByteStringTag); 448} 449 450 451STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) == 452 Internals::kExternalOneByteRepresentationTag); 453 454STATIC_ASSERT(v8::String::ONE_BYTE_ENCODING == kOneByteStringTag); 455 456 457bool StringShape::IsExternalTwoByte() { 458 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag); 459} 460 461 462STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) == 463 Internals::kExternalTwoByteRepresentationTag); 464 465STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag); 466 467uc32 FlatStringReader::Get(int index) { 468 DCHECK(0 <= index && index <= length_); 469 if (is_one_byte_) { 470 return static_cast<const byte*>(start_)[index]; 471 } else { 472 return static_cast<const uc16*>(start_)[index]; 473 } 474} 475 476 477Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) { 478 return key->AsHandle(isolate); 479} 480 481 482Handle<Object> MapCacheShape::AsHandle(Isolate* isolate, HashTableKey* key) { 483 return key->AsHandle(isolate); 484} 485 486 487Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate, 488 HashTableKey* key) { 489 return key->AsHandle(isolate); 490} 491 492 493Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate, 494 HashTableKey* key) { 495 return key->AsHandle(isolate); 496} 497 498template <typename Char> 499class SequentialStringKey : public HashTableKey { 500 public: 501 explicit SequentialStringKey(Vector<const Char> string, uint32_t seed) 502 : string_(string), hash_field_(0), seed_(seed) { } 503 504 virtual uint32_t Hash() OVERRIDE { 505 hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(), 506 string_.length(), 507 seed_); 508 509 uint32_t result = hash_field_ >> String::kHashShift; 510 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed. 511 return result; 512 } 513 514 515 virtual uint32_t HashForObject(Object* other) OVERRIDE { 516 return String::cast(other)->Hash(); 517 } 518 519 Vector<const Char> string_; 520 uint32_t hash_field_; 521 uint32_t seed_; 522}; 523 524 525class OneByteStringKey : public SequentialStringKey<uint8_t> { 526 public: 527 OneByteStringKey(Vector<const uint8_t> str, uint32_t seed) 528 : SequentialStringKey<uint8_t>(str, seed) { } 529 530 virtual bool IsMatch(Object* string) OVERRIDE { 531 return String::cast(string)->IsOneByteEqualTo(string_); 532 } 533 534 virtual Handle<Object> AsHandle(Isolate* isolate) OVERRIDE; 535}; 536 537 538class SeqOneByteSubStringKey : public HashTableKey { 539 public: 540 SeqOneByteSubStringKey(Handle<SeqOneByteString> string, int from, int length) 541 : string_(string), from_(from), length_(length) { 542 DCHECK(string_->IsSeqOneByteString()); 543 } 544 545 virtual uint32_t Hash() OVERRIDE { 546 DCHECK(length_ >= 0); 547 DCHECK(from_ + length_ <= string_->length()); 548 const uint8_t* chars = string_->GetChars() + from_; 549 hash_field_ = StringHasher::HashSequentialString( 550 chars, length_, string_->GetHeap()->HashSeed()); 551 uint32_t result = hash_field_ >> String::kHashShift; 552 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed. 553 return result; 554 } 555 556 virtual uint32_t HashForObject(Object* other) OVERRIDE { 557 return String::cast(other)->Hash(); 558 } 559 560 virtual bool IsMatch(Object* string) OVERRIDE; 561 virtual Handle<Object> AsHandle(Isolate* isolate) OVERRIDE; 562 563 private: 564 Handle<SeqOneByteString> string_; 565 int from_; 566 int length_; 567 uint32_t hash_field_; 568}; 569 570 571class TwoByteStringKey : public SequentialStringKey<uc16> { 572 public: 573 explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed) 574 : SequentialStringKey<uc16>(str, seed) { } 575 576 virtual bool IsMatch(Object* string) OVERRIDE { 577 return String::cast(string)->IsTwoByteEqualTo(string_); 578 } 579 580 virtual Handle<Object> AsHandle(Isolate* isolate) OVERRIDE; 581}; 582 583 584// Utf8StringKey carries a vector of chars as key. 585class Utf8StringKey : public HashTableKey { 586 public: 587 explicit Utf8StringKey(Vector<const char> string, uint32_t seed) 588 : string_(string), hash_field_(0), seed_(seed) { } 589 590 virtual bool IsMatch(Object* string) OVERRIDE { 591 return String::cast(string)->IsUtf8EqualTo(string_); 592 } 593 594 virtual uint32_t Hash() OVERRIDE { 595 if (hash_field_ != 0) return hash_field_ >> String::kHashShift; 596 hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_); 597 uint32_t result = hash_field_ >> String::kHashShift; 598 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed. 599 return result; 600 } 601 602 virtual uint32_t HashForObject(Object* other) OVERRIDE { 603 return String::cast(other)->Hash(); 604 } 605 606 virtual Handle<Object> AsHandle(Isolate* isolate) OVERRIDE { 607 if (hash_field_ == 0) Hash(); 608 return isolate->factory()->NewInternalizedStringFromUtf8( 609 string_, chars_, hash_field_); 610 } 611 612 Vector<const char> string_; 613 uint32_t hash_field_; 614 int chars_; // Caches the number of characters when computing the hash code. 615 uint32_t seed_; 616}; 617 618 619bool Object::IsNumber() const { 620 return IsSmi() || IsHeapNumber(); 621} 622 623 624TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE) 625TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE) 626 627 628bool Object::IsFiller() const { 629 if (!Object::IsHeapObject()) return false; 630 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type(); 631 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE; 632} 633 634 635bool Object::IsExternalArray() const { 636 if (!Object::IsHeapObject()) 637 return false; 638 InstanceType instance_type = 639 HeapObject::cast(this)->map()->instance_type(); 640 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE && 641 instance_type <= LAST_EXTERNAL_ARRAY_TYPE); 642} 643 644 645#define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \ 646 TYPE_CHECKER(External##Type##Array, EXTERNAL_##TYPE##_ARRAY_TYPE) \ 647 TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE) 648 649TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER) 650#undef TYPED_ARRAY_TYPE_CHECKER 651 652 653bool Object::IsFixedTypedArrayBase() const { 654 if (!Object::IsHeapObject()) return false; 655 656 InstanceType instance_type = 657 HeapObject::cast(this)->map()->instance_type(); 658 return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE && 659 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE); 660} 661 662 663bool Object::IsJSReceiver() const { 664 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); 665 return IsHeapObject() && 666 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE; 667} 668 669 670bool Object::IsJSObject() const { 671 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE); 672 return IsHeapObject() && 673 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE; 674} 675 676 677bool Object::IsJSProxy() const { 678 if (!Object::IsHeapObject()) return false; 679 return HeapObject::cast(this)->map()->IsJSProxyMap(); 680} 681 682 683TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE) 684TYPE_CHECKER(JSSet, JS_SET_TYPE) 685TYPE_CHECKER(JSMap, JS_MAP_TYPE) 686TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE) 687TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE) 688TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE) 689TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE) 690TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE) 691TYPE_CHECKER(Map, MAP_TYPE) 692TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE) 693TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE) 694TYPE_CHECKER(ConstantPoolArray, CONSTANT_POOL_ARRAY_TYPE) 695 696 697bool Object::IsJSWeakCollection() const { 698 return IsJSWeakMap() || IsJSWeakSet(); 699} 700 701 702bool Object::IsDescriptorArray() const { 703 return IsFixedArray(); 704} 705 706 707bool Object::IsTransitionArray() const { 708 return IsFixedArray(); 709} 710 711 712bool Object::IsTypeFeedbackVector() const { return IsFixedArray(); } 713 714 715bool Object::IsDeoptimizationInputData() const { 716 // Must be a fixed array. 717 if (!IsFixedArray()) return false; 718 719 // There's no sure way to detect the difference between a fixed array and 720 // a deoptimization data array. Since this is used for asserts we can 721 // check that the length is zero or else the fixed size plus a multiple of 722 // the entry size. 723 int length = FixedArray::cast(this)->length(); 724 if (length == 0) return true; 725 726 length -= DeoptimizationInputData::kFirstDeoptEntryIndex; 727 return length >= 0 && length % DeoptimizationInputData::kDeoptEntrySize == 0; 728} 729 730 731bool Object::IsDeoptimizationOutputData() const { 732 if (!IsFixedArray()) return false; 733 // There's actually no way to see the difference between a fixed array and 734 // a deoptimization data array. Since this is used for asserts we can check 735 // that the length is plausible though. 736 if (FixedArray::cast(this)->length() % 2 != 0) return false; 737 return true; 738} 739 740 741bool Object::IsDependentCode() const { 742 if (!IsFixedArray()) return false; 743 // There's actually no way to see the difference between a fixed array and 744 // a dependent codes array. 745 return true; 746} 747 748 749bool Object::IsContext() const { 750 if (!Object::IsHeapObject()) return false; 751 Map* map = HeapObject::cast(this)->map(); 752 Heap* heap = map->GetHeap(); 753 return (map == heap->function_context_map() || 754 map == heap->catch_context_map() || 755 map == heap->with_context_map() || 756 map == heap->native_context_map() || 757 map == heap->block_context_map() || 758 map == heap->module_context_map() || 759 map == heap->global_context_map()); 760} 761 762 763bool Object::IsNativeContext() const { 764 return Object::IsHeapObject() && 765 HeapObject::cast(this)->map() == 766 HeapObject::cast(this)->GetHeap()->native_context_map(); 767} 768 769 770bool Object::IsScopeInfo() const { 771 return Object::IsHeapObject() && 772 HeapObject::cast(this)->map() == 773 HeapObject::cast(this)->GetHeap()->scope_info_map(); 774} 775 776 777TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE) 778 779 780template <> inline bool Is<JSFunction>(Object* obj) { 781 return obj->IsJSFunction(); 782} 783 784 785TYPE_CHECKER(Code, CODE_TYPE) 786TYPE_CHECKER(Oddball, ODDBALL_TYPE) 787TYPE_CHECKER(Cell, CELL_TYPE) 788TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE) 789TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE) 790TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE) 791TYPE_CHECKER(JSModule, JS_MODULE_TYPE) 792TYPE_CHECKER(JSValue, JS_VALUE_TYPE) 793TYPE_CHECKER(JSDate, JS_DATE_TYPE) 794TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE) 795 796 797bool Object::IsStringWrapper() const { 798 return IsJSValue() && JSValue::cast(this)->value()->IsString(); 799} 800 801 802TYPE_CHECKER(Foreign, FOREIGN_TYPE) 803 804 805bool Object::IsBoolean() const { 806 return IsOddball() && 807 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0); 808} 809 810 811TYPE_CHECKER(JSArray, JS_ARRAY_TYPE) 812TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE) 813TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE) 814TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE) 815 816 817bool Object::IsJSArrayBufferView() const { 818 return IsJSDataView() || IsJSTypedArray(); 819} 820 821 822TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE) 823 824 825template <> inline bool Is<JSArray>(Object* obj) { 826 return obj->IsJSArray(); 827} 828 829 830bool Object::IsHashTable() const { 831 return Object::IsHeapObject() && 832 HeapObject::cast(this)->map() == 833 HeapObject::cast(this)->GetHeap()->hash_table_map(); 834} 835 836 837bool Object::IsWeakHashTable() const { 838 return IsHashTable(); 839} 840 841 842bool Object::IsDictionary() const { 843 return IsHashTable() && 844 this != HeapObject::cast(this)->GetHeap()->string_table(); 845} 846 847 848bool Object::IsNameDictionary() const { 849 return IsDictionary(); 850} 851 852 853bool Object::IsSeededNumberDictionary() const { 854 return IsDictionary(); 855} 856 857 858bool Object::IsUnseededNumberDictionary() const { 859 return IsDictionary(); 860} 861 862 863bool Object::IsStringTable() const { 864 return IsHashTable(); 865} 866 867 868bool Object::IsJSFunctionResultCache() const { 869 if (!IsFixedArray()) return false; 870 const FixedArray* self = FixedArray::cast(this); 871 int length = self->length(); 872 if (length < JSFunctionResultCache::kEntriesIndex) return false; 873 if ((length - JSFunctionResultCache::kEntriesIndex) 874 % JSFunctionResultCache::kEntrySize != 0) { 875 return false; 876 } 877#ifdef VERIFY_HEAP 878 if (FLAG_verify_heap) { 879 // TODO(svenpanne) We use const_cast here and below to break our dependency 880 // cycle between the predicates and the verifiers. This can be removed when 881 // the verifiers are const-correct, too. 882 reinterpret_cast<JSFunctionResultCache*>(const_cast<Object*>(this))-> 883 JSFunctionResultCacheVerify(); 884 } 885#endif 886 return true; 887} 888 889 890bool Object::IsNormalizedMapCache() const { 891 return NormalizedMapCache::IsNormalizedMapCache(this); 892} 893 894 895int NormalizedMapCache::GetIndex(Handle<Map> map) { 896 return map->Hash() % NormalizedMapCache::kEntries; 897} 898 899 900bool NormalizedMapCache::IsNormalizedMapCache(const Object* obj) { 901 if (!obj->IsFixedArray()) return false; 902 if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) { 903 return false; 904 } 905#ifdef VERIFY_HEAP 906 if (FLAG_verify_heap) { 907 reinterpret_cast<NormalizedMapCache*>(const_cast<Object*>(obj))-> 908 NormalizedMapCacheVerify(); 909 } 910#endif 911 return true; 912} 913 914 915bool Object::IsCompilationCacheTable() const { 916 return IsHashTable(); 917} 918 919 920bool Object::IsCodeCacheHashTable() const { 921 return IsHashTable(); 922} 923 924 925bool Object::IsPolymorphicCodeCacheHashTable() const { 926 return IsHashTable(); 927} 928 929 930bool Object::IsMapCache() const { 931 return IsHashTable(); 932} 933 934 935bool Object::IsObjectHashTable() const { 936 return IsHashTable(); 937} 938 939 940bool Object::IsOrderedHashTable() const { 941 return IsHeapObject() && 942 HeapObject::cast(this)->map() == 943 HeapObject::cast(this)->GetHeap()->ordered_hash_table_map(); 944} 945 946 947bool Object::IsOrderedHashSet() const { 948 return IsOrderedHashTable(); 949} 950 951 952bool Object::IsOrderedHashMap() const { 953 return IsOrderedHashTable(); 954} 955 956 957bool Object::IsPrimitive() const { 958 return IsOddball() || IsNumber() || IsString(); 959} 960 961 962bool Object::IsJSGlobalProxy() const { 963 bool result = IsHeapObject() && 964 (HeapObject::cast(this)->map()->instance_type() == 965 JS_GLOBAL_PROXY_TYPE); 966 DCHECK(!result || 967 HeapObject::cast(this)->map()->is_access_check_needed()); 968 return result; 969} 970 971 972bool Object::IsGlobalObject() const { 973 if (!IsHeapObject()) return false; 974 975 InstanceType type = HeapObject::cast(this)->map()->instance_type(); 976 return type == JS_GLOBAL_OBJECT_TYPE || 977 type == JS_BUILTINS_OBJECT_TYPE; 978} 979 980 981TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE) 982TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE) 983 984 985bool Object::IsUndetectableObject() const { 986 return IsHeapObject() 987 && HeapObject::cast(this)->map()->is_undetectable(); 988} 989 990 991bool Object::IsAccessCheckNeeded() const { 992 if (!IsHeapObject()) return false; 993 if (IsJSGlobalProxy()) { 994 const JSGlobalProxy* proxy = JSGlobalProxy::cast(this); 995 GlobalObject* global = proxy->GetIsolate()->context()->global_object(); 996 return proxy->IsDetachedFrom(global); 997 } 998 return HeapObject::cast(this)->map()->is_access_check_needed(); 999} 1000 1001 1002bool Object::IsStruct() const { 1003 if (!IsHeapObject()) return false; 1004 switch (HeapObject::cast(this)->map()->instance_type()) { 1005#define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true; 1006 STRUCT_LIST(MAKE_STRUCT_CASE) 1007#undef MAKE_STRUCT_CASE 1008 default: return false; 1009 } 1010} 1011 1012 1013#define MAKE_STRUCT_PREDICATE(NAME, Name, name) \ 1014 bool Object::Is##Name() const { \ 1015 return Object::IsHeapObject() \ 1016 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \ 1017 } 1018 STRUCT_LIST(MAKE_STRUCT_PREDICATE) 1019#undef MAKE_STRUCT_PREDICATE 1020 1021 1022bool Object::IsUndefined() const { 1023 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined; 1024} 1025 1026 1027bool Object::IsNull() const { 1028 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull; 1029} 1030 1031 1032bool Object::IsTheHole() const { 1033 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole; 1034} 1035 1036 1037bool Object::IsException() const { 1038 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kException; 1039} 1040 1041 1042bool Object::IsUninitialized() const { 1043 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized; 1044} 1045 1046 1047bool Object::IsTrue() const { 1048 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue; 1049} 1050 1051 1052bool Object::IsFalse() const { 1053 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse; 1054} 1055 1056 1057bool Object::IsArgumentsMarker() const { 1058 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker; 1059} 1060 1061 1062double Object::Number() { 1063 DCHECK(IsNumber()); 1064 return IsSmi() 1065 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value()) 1066 : reinterpret_cast<HeapNumber*>(this)->value(); 1067} 1068 1069 1070bool Object::IsNaN() const { 1071 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value()); 1072} 1073 1074 1075bool Object::IsMinusZero() const { 1076 return this->IsHeapNumber() && 1077 i::IsMinusZero(HeapNumber::cast(this)->value()); 1078} 1079 1080 1081MaybeHandle<Smi> Object::ToSmi(Isolate* isolate, Handle<Object> object) { 1082 if (object->IsSmi()) return Handle<Smi>::cast(object); 1083 if (object->IsHeapNumber()) { 1084 double value = Handle<HeapNumber>::cast(object)->value(); 1085 int int_value = FastD2I(value); 1086 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) { 1087 return handle(Smi::FromInt(int_value), isolate); 1088 } 1089 } 1090 return Handle<Smi>(); 1091} 1092 1093 1094MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate, 1095 Handle<Object> object) { 1096 return ToObject( 1097 isolate, object, handle(isolate->context()->native_context(), isolate)); 1098} 1099 1100 1101bool Object::HasSpecificClassOf(String* name) { 1102 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name); 1103} 1104 1105 1106MaybeHandle<Object> Object::GetProperty(Handle<Object> object, 1107 Handle<Name> name) { 1108 LookupIterator it(object, name); 1109 return GetProperty(&it); 1110} 1111 1112 1113MaybeHandle<Object> Object::GetElement(Isolate* isolate, 1114 Handle<Object> object, 1115 uint32_t index) { 1116 // GetElement can trigger a getter which can cause allocation. 1117 // This was not always the case. This DCHECK is here to catch 1118 // leftover incorrect uses. 1119 DCHECK(AllowHeapAllocation::IsAllowed()); 1120 return Object::GetElementWithReceiver(isolate, object, object, index); 1121} 1122 1123 1124MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object, 1125 Handle<Name> name) { 1126 uint32_t index; 1127 Isolate* isolate = name->GetIsolate(); 1128 if (name->AsArrayIndex(&index)) return GetElement(isolate, object, index); 1129 return GetProperty(object, name); 1130} 1131 1132 1133MaybeHandle<Object> Object::GetProperty(Isolate* isolate, 1134 Handle<Object> object, 1135 const char* name) { 1136 Handle<String> str = isolate->factory()->InternalizeUtf8String(name); 1137 DCHECK(!str.is_null()); 1138#ifdef DEBUG 1139 uint32_t index; // Assert that the name is not an array index. 1140 DCHECK(!str->AsArrayIndex(&index)); 1141#endif // DEBUG 1142 return GetProperty(object, str); 1143} 1144 1145 1146MaybeHandle<Object> JSProxy::GetElementWithHandler(Handle<JSProxy> proxy, 1147 Handle<Object> receiver, 1148 uint32_t index) { 1149 return GetPropertyWithHandler( 1150 proxy, receiver, proxy->GetIsolate()->factory()->Uint32ToString(index)); 1151} 1152 1153 1154MaybeHandle<Object> JSProxy::SetElementWithHandler(Handle<JSProxy> proxy, 1155 Handle<JSReceiver> receiver, 1156 uint32_t index, 1157 Handle<Object> value, 1158 StrictMode strict_mode) { 1159 Isolate* isolate = proxy->GetIsolate(); 1160 Handle<String> name = isolate->factory()->Uint32ToString(index); 1161 return SetPropertyWithHandler(proxy, receiver, name, value, strict_mode); 1162} 1163 1164 1165Maybe<bool> JSProxy::HasElementWithHandler(Handle<JSProxy> proxy, 1166 uint32_t index) { 1167 Isolate* isolate = proxy->GetIsolate(); 1168 Handle<String> name = isolate->factory()->Uint32ToString(index); 1169 return HasPropertyWithHandler(proxy, name); 1170} 1171 1172 1173#define FIELD_ADDR(p, offset) \ 1174 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag) 1175 1176#define FIELD_ADDR_CONST(p, offset) \ 1177 (reinterpret_cast<const byte*>(p) + offset - kHeapObjectTag) 1178 1179#define READ_FIELD(p, offset) \ 1180 (*reinterpret_cast<Object* const*>(FIELD_ADDR_CONST(p, offset))) 1181 1182#define ACQUIRE_READ_FIELD(p, offset) \ 1183 reinterpret_cast<Object*>(base::Acquire_Load( \ 1184 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset)))) 1185 1186#define NOBARRIER_READ_FIELD(p, offset) \ 1187 reinterpret_cast<Object*>(base::NoBarrier_Load( \ 1188 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset)))) 1189 1190#define WRITE_FIELD(p, offset, value) \ 1191 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value) 1192 1193#define RELEASE_WRITE_FIELD(p, offset, value) \ 1194 base::Release_Store( \ 1195 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \ 1196 reinterpret_cast<base::AtomicWord>(value)); 1197 1198#define NOBARRIER_WRITE_FIELD(p, offset, value) \ 1199 base::NoBarrier_Store( \ 1200 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \ 1201 reinterpret_cast<base::AtomicWord>(value)); 1202 1203#define WRITE_BARRIER(heap, object, offset, value) \ 1204 heap->incremental_marking()->RecordWrite( \ 1205 object, HeapObject::RawField(object, offset), value); \ 1206 if (heap->InNewSpace(value)) { \ 1207 heap->RecordWrite(object->address(), offset); \ 1208 } 1209 1210#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \ 1211 if (mode == UPDATE_WRITE_BARRIER) { \ 1212 heap->incremental_marking()->RecordWrite( \ 1213 object, HeapObject::RawField(object, offset), value); \ 1214 if (heap->InNewSpace(value)) { \ 1215 heap->RecordWrite(object->address(), offset); \ 1216 } \ 1217 } 1218 1219#ifndef V8_TARGET_ARCH_MIPS 1220 #define READ_DOUBLE_FIELD(p, offset) \ 1221 (*reinterpret_cast<const double*>(FIELD_ADDR_CONST(p, offset))) 1222#else // V8_TARGET_ARCH_MIPS 1223 // Prevent gcc from using load-double (mips ldc1) on (possibly) 1224 // non-64-bit aligned HeapNumber::value. 1225 static inline double read_double_field(const void* p, int offset) { 1226 union conversion { 1227 double d; 1228 uint32_t u[2]; 1229 } c; 1230 c.u[0] = (*reinterpret_cast<const uint32_t*>( 1231 FIELD_ADDR_CONST(p, offset))); 1232 c.u[1] = (*reinterpret_cast<const uint32_t*>( 1233 FIELD_ADDR_CONST(p, offset + 4))); 1234 return c.d; 1235 } 1236 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset) 1237#endif // V8_TARGET_ARCH_MIPS 1238 1239#ifndef V8_TARGET_ARCH_MIPS 1240 #define WRITE_DOUBLE_FIELD(p, offset, value) \ 1241 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value) 1242#else // V8_TARGET_ARCH_MIPS 1243 // Prevent gcc from using store-double (mips sdc1) on (possibly) 1244 // non-64-bit aligned HeapNumber::value. 1245 static inline void write_double_field(void* p, int offset, 1246 double value) { 1247 union conversion { 1248 double d; 1249 uint32_t u[2]; 1250 } c; 1251 c.d = value; 1252 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0]; 1253 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1]; 1254 } 1255 #define WRITE_DOUBLE_FIELD(p, offset, value) \ 1256 write_double_field(p, offset, value) 1257#endif // V8_TARGET_ARCH_MIPS 1258 1259 1260#define READ_INT_FIELD(p, offset) \ 1261 (*reinterpret_cast<const int*>(FIELD_ADDR_CONST(p, offset))) 1262 1263#define WRITE_INT_FIELD(p, offset, value) \ 1264 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value) 1265 1266#define READ_INTPTR_FIELD(p, offset) \ 1267 (*reinterpret_cast<const intptr_t*>(FIELD_ADDR_CONST(p, offset))) 1268 1269#define WRITE_INTPTR_FIELD(p, offset, value) \ 1270 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value) 1271 1272#define READ_UINT32_FIELD(p, offset) \ 1273 (*reinterpret_cast<const uint32_t*>(FIELD_ADDR_CONST(p, offset))) 1274 1275#define WRITE_UINT32_FIELD(p, offset, value) \ 1276 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value) 1277 1278#define READ_INT32_FIELD(p, offset) \ 1279 (*reinterpret_cast<const int32_t*>(FIELD_ADDR_CONST(p, offset))) 1280 1281#define WRITE_INT32_FIELD(p, offset, value) \ 1282 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value) 1283 1284#define READ_INT64_FIELD(p, offset) \ 1285 (*reinterpret_cast<const int64_t*>(FIELD_ADDR_CONST(p, offset))) 1286 1287#define WRITE_INT64_FIELD(p, offset, value) \ 1288 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value) 1289 1290#define READ_SHORT_FIELD(p, offset) \ 1291 (*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset))) 1292 1293#define WRITE_SHORT_FIELD(p, offset, value) \ 1294 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value) 1295 1296#define READ_BYTE_FIELD(p, offset) \ 1297 (*reinterpret_cast<const byte*>(FIELD_ADDR_CONST(p, offset))) 1298 1299#define NOBARRIER_READ_BYTE_FIELD(p, offset) \ 1300 static_cast<byte>(base::NoBarrier_Load( \ 1301 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)))) 1302 1303#define WRITE_BYTE_FIELD(p, offset, value) \ 1304 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value) 1305 1306#define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \ 1307 base::NoBarrier_Store( \ 1308 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \ 1309 static_cast<base::Atomic8>(value)); 1310 1311Object** HeapObject::RawField(HeapObject* obj, int byte_offset) { 1312 return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset)); 1313} 1314 1315 1316int Smi::value() const { 1317 return Internals::SmiValue(this); 1318} 1319 1320 1321Smi* Smi::FromInt(int value) { 1322 DCHECK(Smi::IsValid(value)); 1323 return reinterpret_cast<Smi*>(Internals::IntToSmi(value)); 1324} 1325 1326 1327Smi* Smi::FromIntptr(intptr_t value) { 1328 DCHECK(Smi::IsValid(value)); 1329 int smi_shift_bits = kSmiTagSize + kSmiShiftSize; 1330 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag); 1331} 1332 1333 1334bool Smi::IsValid(intptr_t value) { 1335 bool result = Internals::IsValidSmi(value); 1336 DCHECK_EQ(result, value >= kMinValue && value <= kMaxValue); 1337 return result; 1338} 1339 1340 1341MapWord MapWord::FromMap(const Map* map) { 1342 return MapWord(reinterpret_cast<uintptr_t>(map)); 1343} 1344 1345 1346Map* MapWord::ToMap() { 1347 return reinterpret_cast<Map*>(value_); 1348} 1349 1350 1351bool MapWord::IsForwardingAddress() { 1352 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_)); 1353} 1354 1355 1356MapWord MapWord::FromForwardingAddress(HeapObject* object) { 1357 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag; 1358 return MapWord(reinterpret_cast<uintptr_t>(raw)); 1359} 1360 1361 1362HeapObject* MapWord::ToForwardingAddress() { 1363 DCHECK(IsForwardingAddress()); 1364 return HeapObject::FromAddress(reinterpret_cast<Address>(value_)); 1365} 1366 1367 1368#ifdef VERIFY_HEAP 1369void HeapObject::VerifyObjectField(int offset) { 1370 VerifyPointer(READ_FIELD(this, offset)); 1371} 1372 1373void HeapObject::VerifySmiField(int offset) { 1374 CHECK(READ_FIELD(this, offset)->IsSmi()); 1375} 1376#endif 1377 1378 1379Heap* HeapObject::GetHeap() const { 1380 Heap* heap = 1381 MemoryChunk::FromAddress(reinterpret_cast<const byte*>(this))->heap(); 1382 SLOW_DCHECK(heap != NULL); 1383 return heap; 1384} 1385 1386 1387Isolate* HeapObject::GetIsolate() const { 1388 return GetHeap()->isolate(); 1389} 1390 1391 1392Map* HeapObject::map() const { 1393#ifdef DEBUG 1394 // Clear mark potentially added by PathTracer. 1395 uintptr_t raw_value = 1396 map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag); 1397 return MapWord::FromRawValue(raw_value).ToMap(); 1398#else 1399 return map_word().ToMap(); 1400#endif 1401} 1402 1403 1404void HeapObject::set_map(Map* value) { 1405 set_map_word(MapWord::FromMap(value)); 1406 if (value != NULL) { 1407 // TODO(1600) We are passing NULL as a slot because maps can never be on 1408 // evacuation candidate. 1409 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value); 1410 } 1411} 1412 1413 1414Map* HeapObject::synchronized_map() { 1415 return synchronized_map_word().ToMap(); 1416} 1417 1418 1419void HeapObject::synchronized_set_map(Map* value) { 1420 synchronized_set_map_word(MapWord::FromMap(value)); 1421 if (value != NULL) { 1422 // TODO(1600) We are passing NULL as a slot because maps can never be on 1423 // evacuation candidate. 1424 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value); 1425 } 1426} 1427 1428 1429void HeapObject::synchronized_set_map_no_write_barrier(Map* value) { 1430 synchronized_set_map_word(MapWord::FromMap(value)); 1431} 1432 1433 1434// Unsafe accessor omitting write barrier. 1435void HeapObject::set_map_no_write_barrier(Map* value) { 1436 set_map_word(MapWord::FromMap(value)); 1437} 1438 1439 1440MapWord HeapObject::map_word() const { 1441 return MapWord( 1442 reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset))); 1443} 1444 1445 1446void HeapObject::set_map_word(MapWord map_word) { 1447 NOBARRIER_WRITE_FIELD( 1448 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_)); 1449} 1450 1451 1452MapWord HeapObject::synchronized_map_word() const { 1453 return MapWord( 1454 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset))); 1455} 1456 1457 1458void HeapObject::synchronized_set_map_word(MapWord map_word) { 1459 RELEASE_WRITE_FIELD( 1460 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_)); 1461} 1462 1463 1464HeapObject* HeapObject::FromAddress(Address address) { 1465 DCHECK_TAG_ALIGNED(address); 1466 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag); 1467} 1468 1469 1470Address HeapObject::address() { 1471 return reinterpret_cast<Address>(this) - kHeapObjectTag; 1472} 1473 1474 1475int HeapObject::Size() { 1476 return SizeFromMap(map()); 1477} 1478 1479 1480bool HeapObject::MayContainRawValues() { 1481 InstanceType type = map()->instance_type(); 1482 if (type <= LAST_NAME_TYPE) { 1483 if (type == SYMBOL_TYPE) { 1484 return false; 1485 } 1486 DCHECK(type < FIRST_NONSTRING_TYPE); 1487 // There are four string representations: sequential strings, external 1488 // strings, cons strings, and sliced strings. 1489 // Only the former two contain raw values and no heap pointers (besides the 1490 // map-word). 1491 return ((type & kIsIndirectStringMask) != kIsIndirectStringTag); 1492 } 1493 // The ConstantPoolArray contains heap pointers, but also raw values. 1494 if (type == CONSTANT_POOL_ARRAY_TYPE) return true; 1495 return (type <= LAST_DATA_TYPE); 1496} 1497 1498 1499void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) { 1500 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)), 1501 reinterpret_cast<Object**>(FIELD_ADDR(this, end))); 1502} 1503 1504 1505void HeapObject::IteratePointer(ObjectVisitor* v, int offset) { 1506 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset))); 1507} 1508 1509 1510void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) { 1511 v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset))); 1512} 1513 1514 1515double HeapNumber::value() const { 1516 return READ_DOUBLE_FIELD(this, kValueOffset); 1517} 1518 1519 1520void HeapNumber::set_value(double value) { 1521 WRITE_DOUBLE_FIELD(this, kValueOffset, value); 1522} 1523 1524 1525int HeapNumber::get_exponent() { 1526 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >> 1527 kExponentShift) - kExponentBias; 1528} 1529 1530 1531int HeapNumber::get_sign() { 1532 return READ_INT_FIELD(this, kExponentOffset) & kSignMask; 1533} 1534 1535 1536ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset) 1537 1538 1539Object** FixedArray::GetFirstElementAddress() { 1540 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0))); 1541} 1542 1543 1544bool FixedArray::ContainsOnlySmisOrHoles() { 1545 Object* the_hole = GetHeap()->the_hole_value(); 1546 Object** current = GetFirstElementAddress(); 1547 for (int i = 0; i < length(); ++i) { 1548 Object* candidate = *current++; 1549 if (!candidate->IsSmi() && candidate != the_hole) return false; 1550 } 1551 return true; 1552} 1553 1554 1555FixedArrayBase* JSObject::elements() const { 1556 Object* array = READ_FIELD(this, kElementsOffset); 1557 return static_cast<FixedArrayBase*>(array); 1558} 1559 1560 1561void JSObject::ValidateElements(Handle<JSObject> object) { 1562#ifdef ENABLE_SLOW_DCHECKS 1563 if (FLAG_enable_slow_asserts) { 1564 ElementsAccessor* accessor = object->GetElementsAccessor(); 1565 accessor->Validate(object); 1566 } 1567#endif 1568} 1569 1570 1571void AllocationSite::Initialize() { 1572 set_transition_info(Smi::FromInt(0)); 1573 SetElementsKind(GetInitialFastElementsKind()); 1574 set_nested_site(Smi::FromInt(0)); 1575 set_pretenure_data(Smi::FromInt(0)); 1576 set_pretenure_create_count(Smi::FromInt(0)); 1577 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()), 1578 SKIP_WRITE_BARRIER); 1579} 1580 1581 1582void AllocationSite::MarkZombie() { 1583 DCHECK(!IsZombie()); 1584 Initialize(); 1585 set_pretenure_decision(kZombie); 1586} 1587 1588 1589// Heuristic: We only need to create allocation site info if the boilerplate 1590// elements kind is the initial elements kind. 1591AllocationSiteMode AllocationSite::GetMode( 1592 ElementsKind boilerplate_elements_kind) { 1593 if (FLAG_pretenuring_call_new || 1594 IsFastSmiElementsKind(boilerplate_elements_kind)) { 1595 return TRACK_ALLOCATION_SITE; 1596 } 1597 1598 return DONT_TRACK_ALLOCATION_SITE; 1599} 1600 1601 1602AllocationSiteMode AllocationSite::GetMode(ElementsKind from, 1603 ElementsKind to) { 1604 if (FLAG_pretenuring_call_new || 1605 (IsFastSmiElementsKind(from) && 1606 IsMoreGeneralElementsKindTransition(from, to))) { 1607 return TRACK_ALLOCATION_SITE; 1608 } 1609 1610 return DONT_TRACK_ALLOCATION_SITE; 1611} 1612 1613 1614inline bool AllocationSite::CanTrack(InstanceType type) { 1615 if (FLAG_allocation_site_pretenuring) { 1616 return type == JS_ARRAY_TYPE || 1617 type == JS_OBJECT_TYPE || 1618 type < FIRST_NONSTRING_TYPE; 1619 } 1620 return type == JS_ARRAY_TYPE; 1621} 1622 1623 1624inline DependentCode::DependencyGroup AllocationSite::ToDependencyGroup( 1625 Reason reason) { 1626 switch (reason) { 1627 case TENURING: 1628 return DependentCode::kAllocationSiteTenuringChangedGroup; 1629 break; 1630 case TRANSITIONS: 1631 return DependentCode::kAllocationSiteTransitionChangedGroup; 1632 break; 1633 } 1634 UNREACHABLE(); 1635 return DependentCode::kAllocationSiteTransitionChangedGroup; 1636} 1637 1638 1639inline void AllocationSite::set_memento_found_count(int count) { 1640 int value = pretenure_data()->value(); 1641 // Verify that we can count more mementos than we can possibly find in one 1642 // new space collection. 1643 DCHECK((GetHeap()->MaxSemiSpaceSize() / 1644 (StaticVisitorBase::kMinObjectSizeInWords * kPointerSize + 1645 AllocationMemento::kSize)) < MementoFoundCountBits::kMax); 1646 DCHECK(count < MementoFoundCountBits::kMax); 1647 set_pretenure_data( 1648 Smi::FromInt(MementoFoundCountBits::update(value, count)), 1649 SKIP_WRITE_BARRIER); 1650} 1651 1652inline bool AllocationSite::IncrementMementoFoundCount() { 1653 if (IsZombie()) return false; 1654 1655 int value = memento_found_count(); 1656 set_memento_found_count(value + 1); 1657 return memento_found_count() == kPretenureMinimumCreated; 1658} 1659 1660 1661inline void AllocationSite::IncrementMementoCreateCount() { 1662 DCHECK(FLAG_allocation_site_pretenuring); 1663 int value = memento_create_count(); 1664 set_memento_create_count(value + 1); 1665} 1666 1667 1668inline bool AllocationSite::MakePretenureDecision( 1669 PretenureDecision current_decision, 1670 double ratio, 1671 bool maximum_size_scavenge) { 1672 // Here we just allow state transitions from undecided or maybe tenure 1673 // to don't tenure, maybe tenure, or tenure. 1674 if ((current_decision == kUndecided || current_decision == kMaybeTenure)) { 1675 if (ratio >= kPretenureRatio) { 1676 // We just transition into tenure state when the semi-space was at 1677 // maximum capacity. 1678 if (maximum_size_scavenge) { 1679 set_deopt_dependent_code(true); 1680 set_pretenure_decision(kTenure); 1681 // Currently we just need to deopt when we make a state transition to 1682 // tenure. 1683 return true; 1684 } 1685 set_pretenure_decision(kMaybeTenure); 1686 } else { 1687 set_pretenure_decision(kDontTenure); 1688 } 1689 } 1690 return false; 1691} 1692 1693 1694inline bool AllocationSite::DigestPretenuringFeedback( 1695 bool maximum_size_scavenge) { 1696 bool deopt = false; 1697 int create_count = memento_create_count(); 1698 int found_count = memento_found_count(); 1699 bool minimum_mementos_created = create_count >= kPretenureMinimumCreated; 1700 double ratio = 1701 minimum_mementos_created || FLAG_trace_pretenuring_statistics ? 1702 static_cast<double>(found_count) / create_count : 0.0; 1703 PretenureDecision current_decision = pretenure_decision(); 1704 1705 if (minimum_mementos_created) { 1706 deopt = MakePretenureDecision( 1707 current_decision, ratio, maximum_size_scavenge); 1708 } 1709 1710 if (FLAG_trace_pretenuring_statistics) { 1711 PrintF( 1712 "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n", 1713 static_cast<void*>(this), create_count, found_count, ratio, 1714 PretenureDecisionName(current_decision), 1715 PretenureDecisionName(pretenure_decision())); 1716 } 1717 1718 // Clear feedback calculation fields until the next gc. 1719 set_memento_found_count(0); 1720 set_memento_create_count(0); 1721 return deopt; 1722} 1723 1724 1725void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) { 1726 JSObject::ValidateElements(object); 1727 ElementsKind elements_kind = object->map()->elements_kind(); 1728 if (!IsFastObjectElementsKind(elements_kind)) { 1729 if (IsFastHoleyElementsKind(elements_kind)) { 1730 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS); 1731 } else { 1732 TransitionElementsKind(object, FAST_ELEMENTS); 1733 } 1734 } 1735} 1736 1737 1738void JSObject::EnsureCanContainElements(Handle<JSObject> object, 1739 Object** objects, 1740 uint32_t count, 1741 EnsureElementsMode mode) { 1742 ElementsKind current_kind = object->map()->elements_kind(); 1743 ElementsKind target_kind = current_kind; 1744 { 1745 DisallowHeapAllocation no_allocation; 1746 DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS); 1747 bool is_holey = IsFastHoleyElementsKind(current_kind); 1748 if (current_kind == FAST_HOLEY_ELEMENTS) return; 1749 Heap* heap = object->GetHeap(); 1750 Object* the_hole = heap->the_hole_value(); 1751 for (uint32_t i = 0; i < count; ++i) { 1752 Object* current = *objects++; 1753 if (current == the_hole) { 1754 is_holey = true; 1755 target_kind = GetHoleyElementsKind(target_kind); 1756 } else if (!current->IsSmi()) { 1757 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) { 1758 if (IsFastSmiElementsKind(target_kind)) { 1759 if (is_holey) { 1760 target_kind = FAST_HOLEY_DOUBLE_ELEMENTS; 1761 } else { 1762 target_kind = FAST_DOUBLE_ELEMENTS; 1763 } 1764 } 1765 } else if (is_holey) { 1766 target_kind = FAST_HOLEY_ELEMENTS; 1767 break; 1768 } else { 1769 target_kind = FAST_ELEMENTS; 1770 } 1771 } 1772 } 1773 } 1774 if (target_kind != current_kind) { 1775 TransitionElementsKind(object, target_kind); 1776 } 1777} 1778 1779 1780void JSObject::EnsureCanContainElements(Handle<JSObject> object, 1781 Handle<FixedArrayBase> elements, 1782 uint32_t length, 1783 EnsureElementsMode mode) { 1784 Heap* heap = object->GetHeap(); 1785 if (elements->map() != heap->fixed_double_array_map()) { 1786 DCHECK(elements->map() == heap->fixed_array_map() || 1787 elements->map() == heap->fixed_cow_array_map()); 1788 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) { 1789 mode = DONT_ALLOW_DOUBLE_ELEMENTS; 1790 } 1791 Object** objects = 1792 Handle<FixedArray>::cast(elements)->GetFirstElementAddress(); 1793 EnsureCanContainElements(object, objects, length, mode); 1794 return; 1795 } 1796 1797 DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS); 1798 if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) { 1799 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS); 1800 } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) { 1801 Handle<FixedDoubleArray> double_array = 1802 Handle<FixedDoubleArray>::cast(elements); 1803 for (uint32_t i = 0; i < length; ++i) { 1804 if (double_array->is_the_hole(i)) { 1805 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS); 1806 return; 1807 } 1808 } 1809 TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS); 1810 } 1811} 1812 1813 1814void JSObject::SetMapAndElements(Handle<JSObject> object, 1815 Handle<Map> new_map, 1816 Handle<FixedArrayBase> value) { 1817 JSObject::MigrateToMap(object, new_map); 1818 DCHECK((object->map()->has_fast_smi_or_object_elements() || 1819 (*value == object->GetHeap()->empty_fixed_array())) == 1820 (value->map() == object->GetHeap()->fixed_array_map() || 1821 value->map() == object->GetHeap()->fixed_cow_array_map())); 1822 DCHECK((*value == object->GetHeap()->empty_fixed_array()) || 1823 (object->map()->has_fast_double_elements() == 1824 value->IsFixedDoubleArray())); 1825 object->set_elements(*value); 1826} 1827 1828 1829void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) { 1830 WRITE_FIELD(this, kElementsOffset, value); 1831 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode); 1832} 1833 1834 1835void JSObject::initialize_properties() { 1836 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array())); 1837 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array()); 1838} 1839 1840 1841void JSObject::initialize_elements() { 1842 FixedArrayBase* elements = map()->GetInitialElements(); 1843 WRITE_FIELD(this, kElementsOffset, elements); 1844} 1845 1846 1847Handle<String> Map::ExpectedTransitionKey(Handle<Map> map) { 1848 DisallowHeapAllocation no_gc; 1849 if (!map->HasTransitionArray()) return Handle<String>::null(); 1850 TransitionArray* transitions = map->transitions(); 1851 if (!transitions->IsSimpleTransition()) return Handle<String>::null(); 1852 int transition = TransitionArray::kSimpleTransitionIndex; 1853 PropertyDetails details = transitions->GetTargetDetails(transition); 1854 Name* name = transitions->GetKey(transition); 1855 if (details.type() != FIELD) return Handle<String>::null(); 1856 if (details.attributes() != NONE) return Handle<String>::null(); 1857 if (!name->IsString()) return Handle<String>::null(); 1858 return Handle<String>(String::cast(name)); 1859} 1860 1861 1862Handle<Map> Map::ExpectedTransitionTarget(Handle<Map> map) { 1863 DCHECK(!ExpectedTransitionKey(map).is_null()); 1864 return Handle<Map>(map->transitions()->GetTarget( 1865 TransitionArray::kSimpleTransitionIndex)); 1866} 1867 1868 1869Handle<Map> Map::FindTransitionToField(Handle<Map> map, Handle<Name> key) { 1870 DisallowHeapAllocation no_allocation; 1871 if (!map->HasTransitionArray()) return Handle<Map>::null(); 1872 TransitionArray* transitions = map->transitions(); 1873 int transition = transitions->Search(*key); 1874 if (transition == TransitionArray::kNotFound) return Handle<Map>::null(); 1875 PropertyDetails target_details = transitions->GetTargetDetails(transition); 1876 if (target_details.type() != FIELD) return Handle<Map>::null(); 1877 if (target_details.attributes() != NONE) return Handle<Map>::null(); 1878 return Handle<Map>(transitions->GetTarget(transition)); 1879} 1880 1881 1882ACCESSORS(Oddball, to_string, String, kToStringOffset) 1883ACCESSORS(Oddball, to_number, Object, kToNumberOffset) 1884 1885 1886byte Oddball::kind() const { 1887 return Smi::cast(READ_FIELD(this, kKindOffset))->value(); 1888} 1889 1890 1891void Oddball::set_kind(byte value) { 1892 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value)); 1893} 1894 1895 1896Object* Cell::value() const { 1897 return READ_FIELD(this, kValueOffset); 1898} 1899 1900 1901void Cell::set_value(Object* val, WriteBarrierMode ignored) { 1902 // The write barrier is not used for global property cells. 1903 DCHECK(!val->IsPropertyCell() && !val->IsCell()); 1904 WRITE_FIELD(this, kValueOffset, val); 1905} 1906 1907ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset) 1908 1909Object* PropertyCell::type_raw() const { 1910 return READ_FIELD(this, kTypeOffset); 1911} 1912 1913 1914void PropertyCell::set_type_raw(Object* val, WriteBarrierMode ignored) { 1915 WRITE_FIELD(this, kTypeOffset, val); 1916} 1917 1918 1919int JSObject::GetHeaderSize() { 1920 InstanceType type = map()->instance_type(); 1921 // Check for the most common kind of JavaScript object before 1922 // falling into the generic switch. This speeds up the internal 1923 // field operations considerably on average. 1924 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize; 1925 switch (type) { 1926 case JS_GENERATOR_OBJECT_TYPE: 1927 return JSGeneratorObject::kSize; 1928 case JS_MODULE_TYPE: 1929 return JSModule::kSize; 1930 case JS_GLOBAL_PROXY_TYPE: 1931 return JSGlobalProxy::kSize; 1932 case JS_GLOBAL_OBJECT_TYPE: 1933 return JSGlobalObject::kSize; 1934 case JS_BUILTINS_OBJECT_TYPE: 1935 return JSBuiltinsObject::kSize; 1936 case JS_FUNCTION_TYPE: 1937 return JSFunction::kSize; 1938 case JS_VALUE_TYPE: 1939 return JSValue::kSize; 1940 case JS_DATE_TYPE: 1941 return JSDate::kSize; 1942 case JS_ARRAY_TYPE: 1943 return JSArray::kSize; 1944 case JS_ARRAY_BUFFER_TYPE: 1945 return JSArrayBuffer::kSize; 1946 case JS_TYPED_ARRAY_TYPE: 1947 return JSTypedArray::kSize; 1948 case JS_DATA_VIEW_TYPE: 1949 return JSDataView::kSize; 1950 case JS_SET_TYPE: 1951 return JSSet::kSize; 1952 case JS_MAP_TYPE: 1953 return JSMap::kSize; 1954 case JS_SET_ITERATOR_TYPE: 1955 return JSSetIterator::kSize; 1956 case JS_MAP_ITERATOR_TYPE: 1957 return JSMapIterator::kSize; 1958 case JS_WEAK_MAP_TYPE: 1959 return JSWeakMap::kSize; 1960 case JS_WEAK_SET_TYPE: 1961 return JSWeakSet::kSize; 1962 case JS_REGEXP_TYPE: 1963 return JSRegExp::kSize; 1964 case JS_CONTEXT_EXTENSION_OBJECT_TYPE: 1965 return JSObject::kHeaderSize; 1966 case JS_MESSAGE_OBJECT_TYPE: 1967 return JSMessageObject::kSize; 1968 default: 1969 // TODO(jkummerow): Re-enable this. Blink currently hits this 1970 // from its CustomElementConstructorBuilder. 1971 // UNREACHABLE(); 1972 return 0; 1973 } 1974} 1975 1976 1977int JSObject::GetInternalFieldCount() { 1978 DCHECK(1 << kPointerSizeLog2 == kPointerSize); 1979 // Make sure to adjust for the number of in-object properties. These 1980 // properties do contribute to the size, but are not internal fields. 1981 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) - 1982 map()->inobject_properties(); 1983} 1984 1985 1986int JSObject::GetInternalFieldOffset(int index) { 1987 DCHECK(index < GetInternalFieldCount() && index >= 0); 1988 return GetHeaderSize() + (kPointerSize * index); 1989} 1990 1991 1992Object* JSObject::GetInternalField(int index) { 1993 DCHECK(index < GetInternalFieldCount() && index >= 0); 1994 // Internal objects do follow immediately after the header, whereas in-object 1995 // properties are at the end of the object. Therefore there is no need 1996 // to adjust the index here. 1997 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index)); 1998} 1999 2000 2001void JSObject::SetInternalField(int index, Object* value) { 2002 DCHECK(index < GetInternalFieldCount() && index >= 0); 2003 // Internal objects do follow immediately after the header, whereas in-object 2004 // properties are at the end of the object. Therefore there is no need 2005 // to adjust the index here. 2006 int offset = GetHeaderSize() + (kPointerSize * index); 2007 WRITE_FIELD(this, offset, value); 2008 WRITE_BARRIER(GetHeap(), this, offset, value); 2009} 2010 2011 2012void JSObject::SetInternalField(int index, Smi* value) { 2013 DCHECK(index < GetInternalFieldCount() && index >= 0); 2014 // Internal objects do follow immediately after the header, whereas in-object 2015 // properties are at the end of the object. Therefore there is no need 2016 // to adjust the index here. 2017 int offset = GetHeaderSize() + (kPointerSize * index); 2018 WRITE_FIELD(this, offset, value); 2019} 2020 2021 2022// Access fast-case object properties at index. The use of these routines 2023// is needed to correctly distinguish between properties stored in-object and 2024// properties stored in the properties array. 2025Object* JSObject::RawFastPropertyAt(FieldIndex index) { 2026 if (index.is_inobject()) { 2027 return READ_FIELD(this, index.offset()); 2028 } else { 2029 return properties()->get(index.outobject_array_index()); 2030 } 2031} 2032 2033 2034void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) { 2035 if (index.is_inobject()) { 2036 int offset = index.offset(); 2037 WRITE_FIELD(this, offset, value); 2038 WRITE_BARRIER(GetHeap(), this, offset, value); 2039 } else { 2040 properties()->set(index.outobject_array_index(), value); 2041 } 2042} 2043 2044 2045int JSObject::GetInObjectPropertyOffset(int index) { 2046 return map()->GetInObjectPropertyOffset(index); 2047} 2048 2049 2050Object* JSObject::InObjectPropertyAt(int index) { 2051 int offset = GetInObjectPropertyOffset(index); 2052 return READ_FIELD(this, offset); 2053} 2054 2055 2056Object* JSObject::InObjectPropertyAtPut(int index, 2057 Object* value, 2058 WriteBarrierMode mode) { 2059 // Adjust for the number of properties stored in the object. 2060 int offset = GetInObjectPropertyOffset(index); 2061 WRITE_FIELD(this, offset, value); 2062 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); 2063 return value; 2064} 2065 2066 2067 2068void JSObject::InitializeBody(Map* map, 2069 Object* pre_allocated_value, 2070 Object* filler_value) { 2071 DCHECK(!filler_value->IsHeapObject() || 2072 !GetHeap()->InNewSpace(filler_value)); 2073 DCHECK(!pre_allocated_value->IsHeapObject() || 2074 !GetHeap()->InNewSpace(pre_allocated_value)); 2075 int size = map->instance_size(); 2076 int offset = kHeaderSize; 2077 if (filler_value != pre_allocated_value) { 2078 int pre_allocated = map->pre_allocated_property_fields(); 2079 DCHECK(pre_allocated * kPointerSize + kHeaderSize <= size); 2080 for (int i = 0; i < pre_allocated; i++) { 2081 WRITE_FIELD(this, offset, pre_allocated_value); 2082 offset += kPointerSize; 2083 } 2084 } 2085 while (offset < size) { 2086 WRITE_FIELD(this, offset, filler_value); 2087 offset += kPointerSize; 2088 } 2089} 2090 2091 2092bool JSObject::HasFastProperties() { 2093 DCHECK(properties()->IsDictionary() == map()->is_dictionary_map()); 2094 return !properties()->IsDictionary(); 2095} 2096 2097 2098bool Map::TooManyFastProperties(StoreFromKeyed store_mode) { 2099 if (unused_property_fields() != 0) return false; 2100 if (is_prototype_map()) return false; 2101 int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12; 2102 int limit = Max(minimum, inobject_properties()); 2103 int external = NumberOfFields() - inobject_properties(); 2104 return external > limit; 2105} 2106 2107 2108void Struct::InitializeBody(int object_size) { 2109 Object* value = GetHeap()->undefined_value(); 2110 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) { 2111 WRITE_FIELD(this, offset, value); 2112 } 2113} 2114 2115 2116bool Object::ToArrayIndex(uint32_t* index) { 2117 if (IsSmi()) { 2118 int value = Smi::cast(this)->value(); 2119 if (value < 0) return false; 2120 *index = value; 2121 return true; 2122 } 2123 if (IsHeapNumber()) { 2124 double value = HeapNumber::cast(this)->value(); 2125 uint32_t uint_value = static_cast<uint32_t>(value); 2126 if (value == static_cast<double>(uint_value)) { 2127 *index = uint_value; 2128 return true; 2129 } 2130 } 2131 return false; 2132} 2133 2134 2135bool Object::IsStringObjectWithCharacterAt(uint32_t index) { 2136 if (!this->IsJSValue()) return false; 2137 2138 JSValue* js_value = JSValue::cast(this); 2139 if (!js_value->value()->IsString()) return false; 2140 2141 String* str = String::cast(js_value->value()); 2142 if (index >= static_cast<uint32_t>(str->length())) return false; 2143 2144 return true; 2145} 2146 2147 2148void Object::VerifyApiCallResultType() { 2149#if ENABLE_EXTRA_CHECKS 2150 if (!(IsSmi() || 2151 IsString() || 2152 IsSymbol() || 2153 IsSpecObject() || 2154 IsHeapNumber() || 2155 IsUndefined() || 2156 IsTrue() || 2157 IsFalse() || 2158 IsNull())) { 2159 FATAL("API call returned invalid object"); 2160 } 2161#endif // ENABLE_EXTRA_CHECKS 2162} 2163 2164 2165Object* FixedArray::get(int index) { 2166 SLOW_DCHECK(index >= 0 && index < this->length()); 2167 return READ_FIELD(this, kHeaderSize + index * kPointerSize); 2168} 2169 2170 2171Handle<Object> FixedArray::get(Handle<FixedArray> array, int index) { 2172 return handle(array->get(index), array->GetIsolate()); 2173} 2174 2175 2176bool FixedArray::is_the_hole(int index) { 2177 return get(index) == GetHeap()->the_hole_value(); 2178} 2179 2180 2181void FixedArray::set(int index, Smi* value) { 2182 DCHECK(map() != GetHeap()->fixed_cow_array_map()); 2183 DCHECK(index >= 0 && index < this->length()); 2184 DCHECK(reinterpret_cast<Object*>(value)->IsSmi()); 2185 int offset = kHeaderSize + index * kPointerSize; 2186 WRITE_FIELD(this, offset, value); 2187} 2188 2189 2190void FixedArray::set(int index, Object* value) { 2191 DCHECK_NE(GetHeap()->fixed_cow_array_map(), map()); 2192 DCHECK_EQ(FIXED_ARRAY_TYPE, map()->instance_type()); 2193 DCHECK(index >= 0 && index < this->length()); 2194 int offset = kHeaderSize + index * kPointerSize; 2195 WRITE_FIELD(this, offset, value); 2196 WRITE_BARRIER(GetHeap(), this, offset, value); 2197} 2198 2199 2200inline bool FixedDoubleArray::is_the_hole_nan(double value) { 2201 return bit_cast<uint64_t, double>(value) == kHoleNanInt64; 2202} 2203 2204 2205inline double FixedDoubleArray::hole_nan_as_double() { 2206 return bit_cast<double, uint64_t>(kHoleNanInt64); 2207} 2208 2209 2210inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() { 2211 DCHECK(bit_cast<uint64_t>(base::OS::nan_value()) != kHoleNanInt64); 2212 DCHECK((bit_cast<uint64_t>(base::OS::nan_value()) >> 32) != kHoleNanUpper32); 2213 return base::OS::nan_value(); 2214} 2215 2216 2217double FixedDoubleArray::get_scalar(int index) { 2218 DCHECK(map() != GetHeap()->fixed_cow_array_map() && 2219 map() != GetHeap()->fixed_array_map()); 2220 DCHECK(index >= 0 && index < this->length()); 2221 double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize); 2222 DCHECK(!is_the_hole_nan(result)); 2223 return result; 2224} 2225 2226int64_t FixedDoubleArray::get_representation(int index) { 2227 DCHECK(map() != GetHeap()->fixed_cow_array_map() && 2228 map() != GetHeap()->fixed_array_map()); 2229 DCHECK(index >= 0 && index < this->length()); 2230 return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize); 2231} 2232 2233 2234Handle<Object> FixedDoubleArray::get(Handle<FixedDoubleArray> array, 2235 int index) { 2236 if (array->is_the_hole(index)) { 2237 return array->GetIsolate()->factory()->the_hole_value(); 2238 } else { 2239 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index)); 2240 } 2241} 2242 2243 2244void FixedDoubleArray::set(int index, double value) { 2245 DCHECK(map() != GetHeap()->fixed_cow_array_map() && 2246 map() != GetHeap()->fixed_array_map()); 2247 int offset = kHeaderSize + index * kDoubleSize; 2248 if (std::isnan(value)) value = canonical_not_the_hole_nan_as_double(); 2249 WRITE_DOUBLE_FIELD(this, offset, value); 2250} 2251 2252 2253void FixedDoubleArray::set_the_hole(int index) { 2254 DCHECK(map() != GetHeap()->fixed_cow_array_map() && 2255 map() != GetHeap()->fixed_array_map()); 2256 int offset = kHeaderSize + index * kDoubleSize; 2257 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double()); 2258} 2259 2260 2261bool FixedDoubleArray::is_the_hole(int index) { 2262 int offset = kHeaderSize + index * kDoubleSize; 2263 return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset)); 2264} 2265 2266 2267double* FixedDoubleArray::data_start() { 2268 return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize)); 2269} 2270 2271 2272void FixedDoubleArray::FillWithHoles(int from, int to) { 2273 for (int i = from; i < to; i++) { 2274 set_the_hole(i); 2275 } 2276} 2277 2278 2279void ConstantPoolArray::NumberOfEntries::increment(Type type) { 2280 DCHECK(type < NUMBER_OF_TYPES); 2281 element_counts_[type]++; 2282} 2283 2284 2285int ConstantPoolArray::NumberOfEntries::equals( 2286 const ConstantPoolArray::NumberOfEntries& other) const { 2287 for (int i = 0; i < NUMBER_OF_TYPES; i++) { 2288 if (element_counts_[i] != other.element_counts_[i]) return false; 2289 } 2290 return true; 2291} 2292 2293 2294bool ConstantPoolArray::NumberOfEntries::is_empty() const { 2295 return total_count() == 0; 2296} 2297 2298 2299int ConstantPoolArray::NumberOfEntries::count_of(Type type) const { 2300 DCHECK(type < NUMBER_OF_TYPES); 2301 return element_counts_[type]; 2302} 2303 2304 2305int ConstantPoolArray::NumberOfEntries::base_of(Type type) const { 2306 int base = 0; 2307 DCHECK(type < NUMBER_OF_TYPES); 2308 for (int i = 0; i < type; i++) { 2309 base += element_counts_[i]; 2310 } 2311 return base; 2312} 2313 2314 2315int ConstantPoolArray::NumberOfEntries::total_count() const { 2316 int count = 0; 2317 for (int i = 0; i < NUMBER_OF_TYPES; i++) { 2318 count += element_counts_[i]; 2319 } 2320 return count; 2321} 2322 2323 2324int ConstantPoolArray::NumberOfEntries::are_in_range(int min, int max) const { 2325 for (int i = FIRST_TYPE; i < NUMBER_OF_TYPES; i++) { 2326 if (element_counts_[i] < min || element_counts_[i] > max) { 2327 return false; 2328 } 2329 } 2330 return true; 2331} 2332 2333 2334int ConstantPoolArray::Iterator::next_index() { 2335 DCHECK(!is_finished()); 2336 int ret = next_index_++; 2337 update_section(); 2338 return ret; 2339} 2340 2341 2342bool ConstantPoolArray::Iterator::is_finished() { 2343 return next_index_ > array_->last_index(type_, final_section_); 2344} 2345 2346 2347void ConstantPoolArray::Iterator::update_section() { 2348 if (next_index_ > array_->last_index(type_, current_section_) && 2349 current_section_ != final_section_) { 2350 DCHECK(final_section_ == EXTENDED_SECTION); 2351 current_section_ = EXTENDED_SECTION; 2352 next_index_ = array_->first_index(type_, EXTENDED_SECTION); 2353 } 2354} 2355 2356 2357bool ConstantPoolArray::is_extended_layout() { 2358 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset); 2359 return IsExtendedField::decode(small_layout_1); 2360} 2361 2362 2363ConstantPoolArray::LayoutSection ConstantPoolArray::final_section() { 2364 return is_extended_layout() ? EXTENDED_SECTION : SMALL_SECTION; 2365} 2366 2367 2368int ConstantPoolArray::first_extended_section_index() { 2369 DCHECK(is_extended_layout()); 2370 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset); 2371 return TotalCountField::decode(small_layout_2); 2372} 2373 2374 2375int ConstantPoolArray::get_extended_section_header_offset() { 2376 return RoundUp(SizeFor(NumberOfEntries(this, SMALL_SECTION)), kInt64Size); 2377} 2378 2379 2380ConstantPoolArray::WeakObjectState ConstantPoolArray::get_weak_object_state() { 2381 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset); 2382 return WeakObjectStateField::decode(small_layout_2); 2383} 2384 2385 2386void ConstantPoolArray::set_weak_object_state( 2387 ConstantPoolArray::WeakObjectState state) { 2388 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset); 2389 small_layout_2 = WeakObjectStateField::update(small_layout_2, state); 2390 WRITE_INT32_FIELD(this, kSmallLayout2Offset, small_layout_2); 2391} 2392 2393 2394int ConstantPoolArray::first_index(Type type, LayoutSection section) { 2395 int index = 0; 2396 if (section == EXTENDED_SECTION) { 2397 DCHECK(is_extended_layout()); 2398 index += first_extended_section_index(); 2399 } 2400 2401 for (Type type_iter = FIRST_TYPE; type_iter < type; 2402 type_iter = next_type(type_iter)) { 2403 index += number_of_entries(type_iter, section); 2404 } 2405 2406 return index; 2407} 2408 2409 2410int ConstantPoolArray::last_index(Type type, LayoutSection section) { 2411 return first_index(type, section) + number_of_entries(type, section) - 1; 2412} 2413 2414 2415int ConstantPoolArray::number_of_entries(Type type, LayoutSection section) { 2416 if (section == SMALL_SECTION) { 2417 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset); 2418 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset); 2419 switch (type) { 2420 case INT64: 2421 return Int64CountField::decode(small_layout_1); 2422 case CODE_PTR: 2423 return CodePtrCountField::decode(small_layout_1); 2424 case HEAP_PTR: 2425 return HeapPtrCountField::decode(small_layout_1); 2426 case INT32: 2427 return Int32CountField::decode(small_layout_2); 2428 default: 2429 UNREACHABLE(); 2430 return 0; 2431 } 2432 } else { 2433 DCHECK(section == EXTENDED_SECTION && is_extended_layout()); 2434 int offset = get_extended_section_header_offset(); 2435 switch (type) { 2436 case INT64: 2437 offset += kExtendedInt64CountOffset; 2438 break; 2439 case CODE_PTR: 2440 offset += kExtendedCodePtrCountOffset; 2441 break; 2442 case HEAP_PTR: 2443 offset += kExtendedHeapPtrCountOffset; 2444 break; 2445 case INT32: 2446 offset += kExtendedInt32CountOffset; 2447 break; 2448 default: 2449 UNREACHABLE(); 2450 } 2451 return READ_INT_FIELD(this, offset); 2452 } 2453} 2454 2455 2456bool ConstantPoolArray::offset_is_type(int offset, Type type) { 2457 return (offset >= OffsetOfElementAt(first_index(type, SMALL_SECTION)) && 2458 offset <= OffsetOfElementAt(last_index(type, SMALL_SECTION))) || 2459 (is_extended_layout() && 2460 offset >= OffsetOfElementAt(first_index(type, EXTENDED_SECTION)) && 2461 offset <= OffsetOfElementAt(last_index(type, EXTENDED_SECTION))); 2462} 2463 2464 2465ConstantPoolArray::Type ConstantPoolArray::get_type(int index) { 2466 LayoutSection section; 2467 if (is_extended_layout() && index >= first_extended_section_index()) { 2468 section = EXTENDED_SECTION; 2469 } else { 2470 section = SMALL_SECTION; 2471 } 2472 2473 Type type = FIRST_TYPE; 2474 while (index > last_index(type, section)) { 2475 type = next_type(type); 2476 } 2477 DCHECK(type <= LAST_TYPE); 2478 return type; 2479} 2480 2481 2482int64_t ConstantPoolArray::get_int64_entry(int index) { 2483 DCHECK(map() == GetHeap()->constant_pool_array_map()); 2484 DCHECK(get_type(index) == INT64); 2485 return READ_INT64_FIELD(this, OffsetOfElementAt(index)); 2486} 2487 2488 2489double ConstantPoolArray::get_int64_entry_as_double(int index) { 2490 STATIC_ASSERT(kDoubleSize == kInt64Size); 2491 DCHECK(map() == GetHeap()->constant_pool_array_map()); 2492 DCHECK(get_type(index) == INT64); 2493 return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index)); 2494} 2495 2496 2497Address ConstantPoolArray::get_code_ptr_entry(int index) { 2498 DCHECK(map() == GetHeap()->constant_pool_array_map()); 2499 DCHECK(get_type(index) == CODE_PTR); 2500 return reinterpret_cast<Address>(READ_FIELD(this, OffsetOfElementAt(index))); 2501} 2502 2503 2504Object* ConstantPoolArray::get_heap_ptr_entry(int index) { 2505 DCHECK(map() == GetHeap()->constant_pool_array_map()); 2506 DCHECK(get_type(index) == HEAP_PTR); 2507 return READ_FIELD(this, OffsetOfElementAt(index)); 2508} 2509 2510 2511int32_t ConstantPoolArray::get_int32_entry(int index) { 2512 DCHECK(map() == GetHeap()->constant_pool_array_map()); 2513 DCHECK(get_type(index) == INT32); 2514 return READ_INT32_FIELD(this, OffsetOfElementAt(index)); 2515} 2516 2517 2518void ConstantPoolArray::set(int index, int64_t value) { 2519 DCHECK(map() == GetHeap()->constant_pool_array_map()); 2520 DCHECK(get_type(index) == INT64); 2521 WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value); 2522} 2523 2524 2525void ConstantPoolArray::set(int index, double value) { 2526 STATIC_ASSERT(kDoubleSize == kInt64Size); 2527 DCHECK(map() == GetHeap()->constant_pool_array_map()); 2528 DCHECK(get_type(index) == INT64); 2529 WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value); 2530} 2531 2532 2533void ConstantPoolArray::set(int index, Address value) { 2534 DCHECK(map() == GetHeap()->constant_pool_array_map()); 2535 DCHECK(get_type(index) == CODE_PTR); 2536 WRITE_FIELD(this, OffsetOfElementAt(index), reinterpret_cast<Object*>(value)); 2537} 2538 2539 2540void ConstantPoolArray::set(int index, Object* value) { 2541 DCHECK(map() == GetHeap()->constant_pool_array_map()); 2542 DCHECK(!GetHeap()->InNewSpace(value)); 2543 DCHECK(get_type(index) == HEAP_PTR); 2544 WRITE_FIELD(this, OffsetOfElementAt(index), value); 2545 WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value); 2546} 2547 2548 2549void ConstantPoolArray::set(int index, int32_t value) { 2550 DCHECK(map() == GetHeap()->constant_pool_array_map()); 2551 DCHECK(get_type(index) == INT32); 2552 WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value); 2553} 2554 2555 2556void ConstantPoolArray::set_at_offset(int offset, int32_t value) { 2557 DCHECK(map() == GetHeap()->constant_pool_array_map()); 2558 DCHECK(offset_is_type(offset, INT32)); 2559 WRITE_INT32_FIELD(this, offset, value); 2560} 2561 2562 2563void ConstantPoolArray::set_at_offset(int offset, int64_t value) { 2564 DCHECK(map() == GetHeap()->constant_pool_array_map()); 2565 DCHECK(offset_is_type(offset, INT64)); 2566 WRITE_INT64_FIELD(this, offset, value); 2567} 2568 2569 2570void ConstantPoolArray::set_at_offset(int offset, double value) { 2571 DCHECK(map() == GetHeap()->constant_pool_array_map()); 2572 DCHECK(offset_is_type(offset, INT64)); 2573 WRITE_DOUBLE_FIELD(this, offset, value); 2574} 2575 2576 2577void ConstantPoolArray::set_at_offset(int offset, Address value) { 2578 DCHECK(map() == GetHeap()->constant_pool_array_map()); 2579 DCHECK(offset_is_type(offset, CODE_PTR)); 2580 WRITE_FIELD(this, offset, reinterpret_cast<Object*>(value)); 2581 WRITE_BARRIER(GetHeap(), this, offset, reinterpret_cast<Object*>(value)); 2582} 2583 2584 2585void ConstantPoolArray::set_at_offset(int offset, Object* value) { 2586 DCHECK(map() == GetHeap()->constant_pool_array_map()); 2587 DCHECK(!GetHeap()->InNewSpace(value)); 2588 DCHECK(offset_is_type(offset, HEAP_PTR)); 2589 WRITE_FIELD(this, offset, value); 2590 WRITE_BARRIER(GetHeap(), this, offset, value); 2591} 2592 2593 2594void ConstantPoolArray::Init(const NumberOfEntries& small) { 2595 uint32_t small_layout_1 = 2596 Int64CountField::encode(small.count_of(INT64)) | 2597 CodePtrCountField::encode(small.count_of(CODE_PTR)) | 2598 HeapPtrCountField::encode(small.count_of(HEAP_PTR)) | 2599 IsExtendedField::encode(false); 2600 uint32_t small_layout_2 = 2601 Int32CountField::encode(small.count_of(INT32)) | 2602 TotalCountField::encode(small.total_count()) | 2603 WeakObjectStateField::encode(NO_WEAK_OBJECTS); 2604 WRITE_UINT32_FIELD(this, kSmallLayout1Offset, small_layout_1); 2605 WRITE_UINT32_FIELD(this, kSmallLayout2Offset, small_layout_2); 2606 if (kHeaderSize != kFirstEntryOffset) { 2607 DCHECK(kFirstEntryOffset - kHeaderSize == kInt32Size); 2608 WRITE_UINT32_FIELD(this, kHeaderSize, 0); // Zero out header padding. 2609 } 2610} 2611 2612 2613void ConstantPoolArray::InitExtended(const NumberOfEntries& small, 2614 const NumberOfEntries& extended) { 2615 // Initialize small layout fields first. 2616 Init(small); 2617 2618 // Set is_extended_layout field. 2619 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset); 2620 small_layout_1 = IsExtendedField::update(small_layout_1, true); 2621 WRITE_INT32_FIELD(this, kSmallLayout1Offset, small_layout_1); 2622 2623 // Initialize the extended layout fields. 2624 int extended_header_offset = get_extended_section_header_offset(); 2625 WRITE_INT_FIELD(this, extended_header_offset + kExtendedInt64CountOffset, 2626 extended.count_of(INT64)); 2627 WRITE_INT_FIELD(this, extended_header_offset + kExtendedCodePtrCountOffset, 2628 extended.count_of(CODE_PTR)); 2629 WRITE_INT_FIELD(this, extended_header_offset + kExtendedHeapPtrCountOffset, 2630 extended.count_of(HEAP_PTR)); 2631 WRITE_INT_FIELD(this, extended_header_offset + kExtendedInt32CountOffset, 2632 extended.count_of(INT32)); 2633} 2634 2635 2636int ConstantPoolArray::size() { 2637 NumberOfEntries small(this, SMALL_SECTION); 2638 if (!is_extended_layout()) { 2639 return SizeFor(small); 2640 } else { 2641 NumberOfEntries extended(this, EXTENDED_SECTION); 2642 return SizeForExtended(small, extended); 2643 } 2644} 2645 2646 2647int ConstantPoolArray::length() { 2648 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset); 2649 int length = TotalCountField::decode(small_layout_2); 2650 if (is_extended_layout()) { 2651 length += number_of_entries(INT64, EXTENDED_SECTION) + 2652 number_of_entries(CODE_PTR, EXTENDED_SECTION) + 2653 number_of_entries(HEAP_PTR, EXTENDED_SECTION) + 2654 number_of_entries(INT32, EXTENDED_SECTION); 2655 } 2656 return length; 2657} 2658 2659 2660WriteBarrierMode HeapObject::GetWriteBarrierMode( 2661 const DisallowHeapAllocation& promise) { 2662 Heap* heap = GetHeap(); 2663 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER; 2664 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER; 2665 return UPDATE_WRITE_BARRIER; 2666} 2667 2668 2669void FixedArray::set(int index, 2670 Object* value, 2671 WriteBarrierMode mode) { 2672 DCHECK(map() != GetHeap()->fixed_cow_array_map()); 2673 DCHECK(index >= 0 && index < this->length()); 2674 int offset = kHeaderSize + index * kPointerSize; 2675 WRITE_FIELD(this, offset, value); 2676 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); 2677} 2678 2679 2680void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array, 2681 int index, 2682 Object* value) { 2683 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map()); 2684 DCHECK(index >= 0 && index < array->length()); 2685 int offset = kHeaderSize + index * kPointerSize; 2686 WRITE_FIELD(array, offset, value); 2687 Heap* heap = array->GetHeap(); 2688 if (heap->InNewSpace(value)) { 2689 heap->RecordWrite(array->address(), offset); 2690 } 2691} 2692 2693 2694void FixedArray::NoWriteBarrierSet(FixedArray* array, 2695 int index, 2696 Object* value) { 2697 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map()); 2698 DCHECK(index >= 0 && index < array->length()); 2699 DCHECK(!array->GetHeap()->InNewSpace(value)); 2700 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value); 2701} 2702 2703 2704void FixedArray::set_undefined(int index) { 2705 DCHECK(map() != GetHeap()->fixed_cow_array_map()); 2706 DCHECK(index >= 0 && index < this->length()); 2707 DCHECK(!GetHeap()->InNewSpace(GetHeap()->undefined_value())); 2708 WRITE_FIELD(this, 2709 kHeaderSize + index * kPointerSize, 2710 GetHeap()->undefined_value()); 2711} 2712 2713 2714void FixedArray::set_null(int index) { 2715 DCHECK(index >= 0 && index < this->length()); 2716 DCHECK(!GetHeap()->InNewSpace(GetHeap()->null_value())); 2717 WRITE_FIELD(this, 2718 kHeaderSize + index * kPointerSize, 2719 GetHeap()->null_value()); 2720} 2721 2722 2723void FixedArray::set_the_hole(int index) { 2724 DCHECK(map() != GetHeap()->fixed_cow_array_map()); 2725 DCHECK(index >= 0 && index < this->length()); 2726 DCHECK(!GetHeap()->InNewSpace(GetHeap()->the_hole_value())); 2727 WRITE_FIELD(this, 2728 kHeaderSize + index * kPointerSize, 2729 GetHeap()->the_hole_value()); 2730} 2731 2732 2733void FixedArray::FillWithHoles(int from, int to) { 2734 for (int i = from; i < to; i++) { 2735 set_the_hole(i); 2736 } 2737} 2738 2739 2740Object** FixedArray::data_start() { 2741 return HeapObject::RawField(this, kHeaderSize); 2742} 2743 2744 2745bool DescriptorArray::IsEmpty() { 2746 DCHECK(length() >= kFirstIndex || 2747 this == GetHeap()->empty_descriptor_array()); 2748 return length() < kFirstIndex; 2749} 2750 2751 2752void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) { 2753 WRITE_FIELD( 2754 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors)); 2755} 2756 2757 2758// Perform a binary search in a fixed array. Low and high are entry indices. If 2759// there are three entries in this array it should be called with low=0 and 2760// high=2. 2761template<SearchMode search_mode, typename T> 2762int BinarySearch(T* array, Name* name, int low, int high, int valid_entries) { 2763 uint32_t hash = name->Hash(); 2764 int limit = high; 2765 2766 DCHECK(low <= high); 2767 2768 while (low != high) { 2769 int mid = (low + high) / 2; 2770 Name* mid_name = array->GetSortedKey(mid); 2771 uint32_t mid_hash = mid_name->Hash(); 2772 2773 if (mid_hash >= hash) { 2774 high = mid; 2775 } else { 2776 low = mid + 1; 2777 } 2778 } 2779 2780 for (; low <= limit; ++low) { 2781 int sort_index = array->GetSortedKeyIndex(low); 2782 Name* entry = array->GetKey(sort_index); 2783 if (entry->Hash() != hash) break; 2784 if (entry->Equals(name)) { 2785 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) { 2786 return sort_index; 2787 } 2788 return T::kNotFound; 2789 } 2790 } 2791 2792 return T::kNotFound; 2793} 2794 2795 2796// Perform a linear search in this fixed array. len is the number of entry 2797// indices that are valid. 2798template<SearchMode search_mode, typename T> 2799int LinearSearch(T* array, Name* name, int len, int valid_entries) { 2800 uint32_t hash = name->Hash(); 2801 if (search_mode == ALL_ENTRIES) { 2802 for (int number = 0; number < len; number++) { 2803 int sorted_index = array->GetSortedKeyIndex(number); 2804 Name* entry = array->GetKey(sorted_index); 2805 uint32_t current_hash = entry->Hash(); 2806 if (current_hash > hash) break; 2807 if (current_hash == hash && entry->Equals(name)) return sorted_index; 2808 } 2809 } else { 2810 DCHECK(len >= valid_entries); 2811 for (int number = 0; number < valid_entries; number++) { 2812 Name* entry = array->GetKey(number); 2813 uint32_t current_hash = entry->Hash(); 2814 if (current_hash == hash && entry->Equals(name)) return number; 2815 } 2816 } 2817 return T::kNotFound; 2818} 2819 2820 2821template<SearchMode search_mode, typename T> 2822int Search(T* array, Name* name, int valid_entries) { 2823 if (search_mode == VALID_ENTRIES) { 2824 SLOW_DCHECK(array->IsSortedNoDuplicates(valid_entries)); 2825 } else { 2826 SLOW_DCHECK(array->IsSortedNoDuplicates()); 2827 } 2828 2829 int nof = array->number_of_entries(); 2830 if (nof == 0) return T::kNotFound; 2831 2832 // Fast case: do linear search for small arrays. 2833 const int kMaxElementsForLinearSearch = 8; 2834 if ((search_mode == ALL_ENTRIES && 2835 nof <= kMaxElementsForLinearSearch) || 2836 (search_mode == VALID_ENTRIES && 2837 valid_entries <= (kMaxElementsForLinearSearch * 3))) { 2838 return LinearSearch<search_mode>(array, name, nof, valid_entries); 2839 } 2840 2841 // Slow case: perform binary search. 2842 return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries); 2843} 2844 2845 2846int DescriptorArray::Search(Name* name, int valid_descriptors) { 2847 return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors); 2848} 2849 2850 2851int DescriptorArray::SearchWithCache(Name* name, Map* map) { 2852 int number_of_own_descriptors = map->NumberOfOwnDescriptors(); 2853 if (number_of_own_descriptors == 0) return kNotFound; 2854 2855 DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache(); 2856 int number = cache->Lookup(map, name); 2857 2858 if (number == DescriptorLookupCache::kAbsent) { 2859 number = Search(name, number_of_own_descriptors); 2860 cache->Update(map, name, number); 2861 } 2862 2863 return number; 2864} 2865 2866 2867PropertyDetails Map::GetLastDescriptorDetails() { 2868 return instance_descriptors()->GetDetails(LastAdded()); 2869} 2870 2871 2872void Map::LookupDescriptor(JSObject* holder, 2873 Name* name, 2874 LookupResult* result) { 2875 DescriptorArray* descriptors = this->instance_descriptors(); 2876 int number = descriptors->SearchWithCache(name, this); 2877 if (number == DescriptorArray::kNotFound) return result->NotFound(); 2878 result->DescriptorResult(holder, descriptors->GetDetails(number), number); 2879} 2880 2881 2882void Map::LookupTransition(JSObject* holder, 2883 Name* name, 2884 LookupResult* result) { 2885 int transition_index = this->SearchTransition(name); 2886 if (transition_index == TransitionArray::kNotFound) return result->NotFound(); 2887 result->TransitionResult(holder, this->GetTransition(transition_index)); 2888} 2889 2890 2891FixedArrayBase* Map::GetInitialElements() { 2892 if (has_fast_smi_or_object_elements() || 2893 has_fast_double_elements()) { 2894 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array())); 2895 return GetHeap()->empty_fixed_array(); 2896 } else if (has_external_array_elements()) { 2897 ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(this); 2898 DCHECK(!GetHeap()->InNewSpace(empty_array)); 2899 return empty_array; 2900 } else if (has_fixed_typed_array_elements()) { 2901 FixedTypedArrayBase* empty_array = 2902 GetHeap()->EmptyFixedTypedArrayForMap(this); 2903 DCHECK(!GetHeap()->InNewSpace(empty_array)); 2904 return empty_array; 2905 } else { 2906 UNREACHABLE(); 2907 } 2908 return NULL; 2909} 2910 2911 2912Object** DescriptorArray::GetKeySlot(int descriptor_number) { 2913 DCHECK(descriptor_number < number_of_descriptors()); 2914 return RawFieldOfElementAt(ToKeyIndex(descriptor_number)); 2915} 2916 2917 2918Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) { 2919 return GetKeySlot(descriptor_number); 2920} 2921 2922 2923Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) { 2924 return GetValueSlot(descriptor_number - 1) + 1; 2925} 2926 2927 2928Name* DescriptorArray::GetKey(int descriptor_number) { 2929 DCHECK(descriptor_number < number_of_descriptors()); 2930 return Name::cast(get(ToKeyIndex(descriptor_number))); 2931} 2932 2933 2934int DescriptorArray::GetSortedKeyIndex(int descriptor_number) { 2935 return GetDetails(descriptor_number).pointer(); 2936} 2937 2938 2939Name* DescriptorArray::GetSortedKey(int descriptor_number) { 2940 return GetKey(GetSortedKeyIndex(descriptor_number)); 2941} 2942 2943 2944void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) { 2945 PropertyDetails details = GetDetails(descriptor_index); 2946 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi()); 2947} 2948 2949 2950void DescriptorArray::SetRepresentation(int descriptor_index, 2951 Representation representation) { 2952 DCHECK(!representation.IsNone()); 2953 PropertyDetails details = GetDetails(descriptor_index); 2954 set(ToDetailsIndex(descriptor_index), 2955 details.CopyWithRepresentation(representation).AsSmi()); 2956} 2957 2958 2959Object** DescriptorArray::GetValueSlot(int descriptor_number) { 2960 DCHECK(descriptor_number < number_of_descriptors()); 2961 return RawFieldOfElementAt(ToValueIndex(descriptor_number)); 2962} 2963 2964 2965int DescriptorArray::GetValueOffset(int descriptor_number) { 2966 return OffsetOfElementAt(ToValueIndex(descriptor_number)); 2967} 2968 2969 2970Object* DescriptorArray::GetValue(int descriptor_number) { 2971 DCHECK(descriptor_number < number_of_descriptors()); 2972 return get(ToValueIndex(descriptor_number)); 2973} 2974 2975 2976void DescriptorArray::SetValue(int descriptor_index, Object* value) { 2977 set(ToValueIndex(descriptor_index), value); 2978} 2979 2980 2981PropertyDetails DescriptorArray::GetDetails(int descriptor_number) { 2982 DCHECK(descriptor_number < number_of_descriptors()); 2983 Object* details = get(ToDetailsIndex(descriptor_number)); 2984 return PropertyDetails(Smi::cast(details)); 2985} 2986 2987 2988PropertyType DescriptorArray::GetType(int descriptor_number) { 2989 return GetDetails(descriptor_number).type(); 2990} 2991 2992 2993int DescriptorArray::GetFieldIndex(int descriptor_number) { 2994 DCHECK(GetDetails(descriptor_number).type() == FIELD); 2995 return GetDetails(descriptor_number).field_index(); 2996} 2997 2998 2999HeapType* DescriptorArray::GetFieldType(int descriptor_number) { 3000 DCHECK(GetDetails(descriptor_number).type() == FIELD); 3001 return HeapType::cast(GetValue(descriptor_number)); 3002} 3003 3004 3005Object* DescriptorArray::GetConstant(int descriptor_number) { 3006 return GetValue(descriptor_number); 3007} 3008 3009 3010Object* DescriptorArray::GetCallbacksObject(int descriptor_number) { 3011 DCHECK(GetType(descriptor_number) == CALLBACKS); 3012 return GetValue(descriptor_number); 3013} 3014 3015 3016AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) { 3017 DCHECK(GetType(descriptor_number) == CALLBACKS); 3018 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number)); 3019 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address()); 3020} 3021 3022 3023void DescriptorArray::Get(int descriptor_number, Descriptor* desc) { 3024 desc->Init(handle(GetKey(descriptor_number), GetIsolate()), 3025 handle(GetValue(descriptor_number), GetIsolate()), 3026 GetDetails(descriptor_number)); 3027} 3028 3029 3030void DescriptorArray::Set(int descriptor_number, 3031 Descriptor* desc, 3032 const WhitenessWitness&) { 3033 // Range check. 3034 DCHECK(descriptor_number < number_of_descriptors()); 3035 3036 NoIncrementalWriteBarrierSet(this, 3037 ToKeyIndex(descriptor_number), 3038 *desc->GetKey()); 3039 NoIncrementalWriteBarrierSet(this, 3040 ToValueIndex(descriptor_number), 3041 *desc->GetValue()); 3042 NoIncrementalWriteBarrierSet(this, 3043 ToDetailsIndex(descriptor_number), 3044 desc->GetDetails().AsSmi()); 3045} 3046 3047 3048void DescriptorArray::Set(int descriptor_number, Descriptor* desc) { 3049 // Range check. 3050 DCHECK(descriptor_number < number_of_descriptors()); 3051 3052 set(ToKeyIndex(descriptor_number), *desc->GetKey()); 3053 set(ToValueIndex(descriptor_number), *desc->GetValue()); 3054 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi()); 3055} 3056 3057 3058void DescriptorArray::Append(Descriptor* desc) { 3059 DisallowHeapAllocation no_gc; 3060 int descriptor_number = number_of_descriptors(); 3061 SetNumberOfDescriptors(descriptor_number + 1); 3062 Set(descriptor_number, desc); 3063 3064 uint32_t hash = desc->GetKey()->Hash(); 3065 3066 int insertion; 3067 3068 for (insertion = descriptor_number; insertion > 0; --insertion) { 3069 Name* key = GetSortedKey(insertion - 1); 3070 if (key->Hash() <= hash) break; 3071 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1)); 3072 } 3073 3074 SetSortedKey(insertion, descriptor_number); 3075} 3076 3077 3078void DescriptorArray::SwapSortedKeys(int first, int second) { 3079 int first_key = GetSortedKeyIndex(first); 3080 SetSortedKey(first, GetSortedKeyIndex(second)); 3081 SetSortedKey(second, first_key); 3082} 3083 3084 3085DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array) 3086 : marking_(array->GetHeap()->incremental_marking()) { 3087 marking_->EnterNoMarkingScope(); 3088 DCHECK(!marking_->IsMarking() || 3089 Marking::Color(array) == Marking::WHITE_OBJECT); 3090} 3091 3092 3093DescriptorArray::WhitenessWitness::~WhitenessWitness() { 3094 marking_->LeaveNoMarkingScope(); 3095} 3096 3097 3098template<typename Derived, typename Shape, typename Key> 3099int HashTable<Derived, Shape, Key>::ComputeCapacity(int at_least_space_for) { 3100 const int kMinCapacity = 32; 3101 int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2); 3102 if (capacity < kMinCapacity) { 3103 capacity = kMinCapacity; // Guarantee min capacity. 3104 } 3105 return capacity; 3106} 3107 3108 3109template<typename Derived, typename Shape, typename Key> 3110int HashTable<Derived, Shape, Key>::FindEntry(Key key) { 3111 return FindEntry(GetIsolate(), key); 3112} 3113 3114 3115// Find entry for key otherwise return kNotFound. 3116template<typename Derived, typename Shape, typename Key> 3117int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) { 3118 uint32_t capacity = Capacity(); 3119 uint32_t entry = FirstProbe(HashTable::Hash(key), capacity); 3120 uint32_t count = 1; 3121 // EnsureCapacity will guarantee the hash table is never full. 3122 while (true) { 3123 Object* element = KeyAt(entry); 3124 // Empty entry. Uses raw unchecked accessors because it is called by the 3125 // string table during bootstrapping. 3126 if (element == isolate->heap()->raw_unchecked_undefined_value()) break; 3127 if (element != isolate->heap()->raw_unchecked_the_hole_value() && 3128 Shape::IsMatch(key, element)) return entry; 3129 entry = NextProbe(entry, count++, capacity); 3130 } 3131 return kNotFound; 3132} 3133 3134 3135bool SeededNumberDictionary::requires_slow_elements() { 3136 Object* max_index_object = get(kMaxNumberKeyIndex); 3137 if (!max_index_object->IsSmi()) return false; 3138 return 0 != 3139 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask); 3140} 3141 3142uint32_t SeededNumberDictionary::max_number_key() { 3143 DCHECK(!requires_slow_elements()); 3144 Object* max_index_object = get(kMaxNumberKeyIndex); 3145 if (!max_index_object->IsSmi()) return 0; 3146 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value()); 3147 return value >> kRequiresSlowElementsTagSize; 3148} 3149 3150void SeededNumberDictionary::set_requires_slow_elements() { 3151 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask)); 3152} 3153 3154 3155// ------------------------------------ 3156// Cast operations 3157 3158 3159CAST_ACCESSOR(AccessorInfo) 3160CAST_ACCESSOR(ByteArray) 3161CAST_ACCESSOR(Cell) 3162CAST_ACCESSOR(Code) 3163CAST_ACCESSOR(CodeCacheHashTable) 3164CAST_ACCESSOR(CompilationCacheTable) 3165CAST_ACCESSOR(ConsString) 3166CAST_ACCESSOR(ConstantPoolArray) 3167CAST_ACCESSOR(DeoptimizationInputData) 3168CAST_ACCESSOR(DeoptimizationOutputData) 3169CAST_ACCESSOR(DependentCode) 3170CAST_ACCESSOR(DescriptorArray) 3171CAST_ACCESSOR(ExternalArray) 3172CAST_ACCESSOR(ExternalOneByteString) 3173CAST_ACCESSOR(ExternalFloat32Array) 3174CAST_ACCESSOR(ExternalFloat64Array) 3175CAST_ACCESSOR(ExternalInt16Array) 3176CAST_ACCESSOR(ExternalInt32Array) 3177CAST_ACCESSOR(ExternalInt8Array) 3178CAST_ACCESSOR(ExternalString) 3179CAST_ACCESSOR(ExternalTwoByteString) 3180CAST_ACCESSOR(ExternalUint16Array) 3181CAST_ACCESSOR(ExternalUint32Array) 3182CAST_ACCESSOR(ExternalUint8Array) 3183CAST_ACCESSOR(ExternalUint8ClampedArray) 3184CAST_ACCESSOR(FixedArray) 3185CAST_ACCESSOR(FixedArrayBase) 3186CAST_ACCESSOR(FixedDoubleArray) 3187CAST_ACCESSOR(FixedTypedArrayBase) 3188CAST_ACCESSOR(Foreign) 3189CAST_ACCESSOR(FreeSpace) 3190CAST_ACCESSOR(GlobalObject) 3191CAST_ACCESSOR(HeapObject) 3192CAST_ACCESSOR(JSArray) 3193CAST_ACCESSOR(JSArrayBuffer) 3194CAST_ACCESSOR(JSArrayBufferView) 3195CAST_ACCESSOR(JSBuiltinsObject) 3196CAST_ACCESSOR(JSDataView) 3197CAST_ACCESSOR(JSDate) 3198CAST_ACCESSOR(JSFunction) 3199CAST_ACCESSOR(JSFunctionProxy) 3200CAST_ACCESSOR(JSFunctionResultCache) 3201CAST_ACCESSOR(JSGeneratorObject) 3202CAST_ACCESSOR(JSGlobalObject) 3203CAST_ACCESSOR(JSGlobalProxy) 3204CAST_ACCESSOR(JSMap) 3205CAST_ACCESSOR(JSMapIterator) 3206CAST_ACCESSOR(JSMessageObject) 3207CAST_ACCESSOR(JSModule) 3208CAST_ACCESSOR(JSObject) 3209CAST_ACCESSOR(JSProxy) 3210CAST_ACCESSOR(JSReceiver) 3211CAST_ACCESSOR(JSRegExp) 3212CAST_ACCESSOR(JSSet) 3213CAST_ACCESSOR(JSSetIterator) 3214CAST_ACCESSOR(JSTypedArray) 3215CAST_ACCESSOR(JSValue) 3216CAST_ACCESSOR(JSWeakMap) 3217CAST_ACCESSOR(JSWeakSet) 3218CAST_ACCESSOR(Map) 3219CAST_ACCESSOR(MapCache) 3220CAST_ACCESSOR(Name) 3221CAST_ACCESSOR(NameDictionary) 3222CAST_ACCESSOR(NormalizedMapCache) 3223CAST_ACCESSOR(Object) 3224CAST_ACCESSOR(ObjectHashTable) 3225CAST_ACCESSOR(Oddball) 3226CAST_ACCESSOR(OrderedHashMap) 3227CAST_ACCESSOR(OrderedHashSet) 3228CAST_ACCESSOR(PolymorphicCodeCacheHashTable) 3229CAST_ACCESSOR(PropertyCell) 3230CAST_ACCESSOR(ScopeInfo) 3231CAST_ACCESSOR(SeededNumberDictionary) 3232CAST_ACCESSOR(SeqOneByteString) 3233CAST_ACCESSOR(SeqString) 3234CAST_ACCESSOR(SeqTwoByteString) 3235CAST_ACCESSOR(SharedFunctionInfo) 3236CAST_ACCESSOR(SlicedString) 3237CAST_ACCESSOR(Smi) 3238CAST_ACCESSOR(String) 3239CAST_ACCESSOR(StringTable) 3240CAST_ACCESSOR(Struct) 3241CAST_ACCESSOR(Symbol) 3242CAST_ACCESSOR(UnseededNumberDictionary) 3243CAST_ACCESSOR(WeakHashTable) 3244 3245 3246template <class Traits> 3247FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) { 3248 SLOW_DCHECK(object->IsHeapObject() && 3249 HeapObject::cast(object)->map()->instance_type() == 3250 Traits::kInstanceType); 3251 return reinterpret_cast<FixedTypedArray<Traits>*>(object); 3252} 3253 3254 3255template <class Traits> 3256const FixedTypedArray<Traits>* 3257FixedTypedArray<Traits>::cast(const Object* object) { 3258 SLOW_DCHECK(object->IsHeapObject() && 3259 HeapObject::cast(object)->map()->instance_type() == 3260 Traits::kInstanceType); 3261 return reinterpret_cast<FixedTypedArray<Traits>*>(object); 3262} 3263 3264 3265#define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name) 3266 STRUCT_LIST(MAKE_STRUCT_CAST) 3267#undef MAKE_STRUCT_CAST 3268 3269 3270template <typename Derived, typename Shape, typename Key> 3271HashTable<Derived, Shape, Key>* 3272HashTable<Derived, Shape, Key>::cast(Object* obj) { 3273 SLOW_DCHECK(obj->IsHashTable()); 3274 return reinterpret_cast<HashTable*>(obj); 3275} 3276 3277 3278template <typename Derived, typename Shape, typename Key> 3279const HashTable<Derived, Shape, Key>* 3280HashTable<Derived, Shape, Key>::cast(const Object* obj) { 3281 SLOW_DCHECK(obj->IsHashTable()); 3282 return reinterpret_cast<const HashTable*>(obj); 3283} 3284 3285 3286SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset) 3287SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset) 3288 3289SMI_ACCESSORS(FreeSpace, size, kSizeOffset) 3290NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset) 3291 3292SMI_ACCESSORS(String, length, kLengthOffset) 3293SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset) 3294 3295 3296uint32_t Name::hash_field() { 3297 return READ_UINT32_FIELD(this, kHashFieldOffset); 3298} 3299 3300 3301void Name::set_hash_field(uint32_t value) { 3302 WRITE_UINT32_FIELD(this, kHashFieldOffset, value); 3303#if V8_HOST_ARCH_64_BIT 3304 WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0); 3305#endif 3306} 3307 3308 3309bool Name::Equals(Name* other) { 3310 if (other == this) return true; 3311 if ((this->IsInternalizedString() && other->IsInternalizedString()) || 3312 this->IsSymbol() || other->IsSymbol()) { 3313 return false; 3314 } 3315 return String::cast(this)->SlowEquals(String::cast(other)); 3316} 3317 3318 3319bool Name::Equals(Handle<Name> one, Handle<Name> two) { 3320 if (one.is_identical_to(two)) return true; 3321 if ((one->IsInternalizedString() && two->IsInternalizedString()) || 3322 one->IsSymbol() || two->IsSymbol()) { 3323 return false; 3324 } 3325 return String::SlowEquals(Handle<String>::cast(one), 3326 Handle<String>::cast(two)); 3327} 3328 3329 3330ACCESSORS(Symbol, name, Object, kNameOffset) 3331ACCESSORS(Symbol, flags, Smi, kFlagsOffset) 3332BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit) 3333BOOL_ACCESSORS(Symbol, flags, is_own, kOwnBit) 3334 3335 3336bool String::Equals(String* other) { 3337 if (other == this) return true; 3338 if (this->IsInternalizedString() && other->IsInternalizedString()) { 3339 return false; 3340 } 3341 return SlowEquals(other); 3342} 3343 3344 3345bool String::Equals(Handle<String> one, Handle<String> two) { 3346 if (one.is_identical_to(two)) return true; 3347 if (one->IsInternalizedString() && two->IsInternalizedString()) { 3348 return false; 3349 } 3350 return SlowEquals(one, two); 3351} 3352 3353 3354Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) { 3355 if (!string->IsConsString()) return string; 3356 Handle<ConsString> cons = Handle<ConsString>::cast(string); 3357 if (cons->IsFlat()) return handle(cons->first()); 3358 return SlowFlatten(cons, pretenure); 3359} 3360 3361 3362uint16_t String::Get(int index) { 3363 DCHECK(index >= 0 && index < length()); 3364 switch (StringShape(this).full_representation_tag()) { 3365 case kSeqStringTag | kOneByteStringTag: 3366 return SeqOneByteString::cast(this)->SeqOneByteStringGet(index); 3367 case kSeqStringTag | kTwoByteStringTag: 3368 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index); 3369 case kConsStringTag | kOneByteStringTag: 3370 case kConsStringTag | kTwoByteStringTag: 3371 return ConsString::cast(this)->ConsStringGet(index); 3372 case kExternalStringTag | kOneByteStringTag: 3373 return ExternalOneByteString::cast(this)->ExternalOneByteStringGet(index); 3374 case kExternalStringTag | kTwoByteStringTag: 3375 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index); 3376 case kSlicedStringTag | kOneByteStringTag: 3377 case kSlicedStringTag | kTwoByteStringTag: 3378 return SlicedString::cast(this)->SlicedStringGet(index); 3379 default: 3380 break; 3381 } 3382 3383 UNREACHABLE(); 3384 return 0; 3385} 3386 3387 3388void String::Set(int index, uint16_t value) { 3389 DCHECK(index >= 0 && index < length()); 3390 DCHECK(StringShape(this).IsSequential()); 3391 3392 return this->IsOneByteRepresentation() 3393 ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value) 3394 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value); 3395} 3396 3397 3398bool String::IsFlat() { 3399 if (!StringShape(this).IsCons()) return true; 3400 return ConsString::cast(this)->second()->length() == 0; 3401} 3402 3403 3404String* String::GetUnderlying() { 3405 // Giving direct access to underlying string only makes sense if the 3406 // wrapping string is already flattened. 3407 DCHECK(this->IsFlat()); 3408 DCHECK(StringShape(this).IsIndirect()); 3409 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset); 3410 const int kUnderlyingOffset = SlicedString::kParentOffset; 3411 return String::cast(READ_FIELD(this, kUnderlyingOffset)); 3412} 3413 3414 3415template<class Visitor> 3416ConsString* String::VisitFlat(Visitor* visitor, 3417 String* string, 3418 const int offset) { 3419 int slice_offset = offset; 3420 const int length = string->length(); 3421 DCHECK(offset <= length); 3422 while (true) { 3423 int32_t type = string->map()->instance_type(); 3424 switch (type & (kStringRepresentationMask | kStringEncodingMask)) { 3425 case kSeqStringTag | kOneByteStringTag: 3426 visitor->VisitOneByteString( 3427 SeqOneByteString::cast(string)->GetChars() + slice_offset, 3428 length - offset); 3429 return NULL; 3430 3431 case kSeqStringTag | kTwoByteStringTag: 3432 visitor->VisitTwoByteString( 3433 SeqTwoByteString::cast(string)->GetChars() + slice_offset, 3434 length - offset); 3435 return NULL; 3436 3437 case kExternalStringTag | kOneByteStringTag: 3438 visitor->VisitOneByteString( 3439 ExternalOneByteString::cast(string)->GetChars() + slice_offset, 3440 length - offset); 3441 return NULL; 3442 3443 case kExternalStringTag | kTwoByteStringTag: 3444 visitor->VisitTwoByteString( 3445 ExternalTwoByteString::cast(string)->GetChars() + slice_offset, 3446 length - offset); 3447 return NULL; 3448 3449 case kSlicedStringTag | kOneByteStringTag: 3450 case kSlicedStringTag | kTwoByteStringTag: { 3451 SlicedString* slicedString = SlicedString::cast(string); 3452 slice_offset += slicedString->offset(); 3453 string = slicedString->parent(); 3454 continue; 3455 } 3456 3457 case kConsStringTag | kOneByteStringTag: 3458 case kConsStringTag | kTwoByteStringTag: 3459 return ConsString::cast(string); 3460 3461 default: 3462 UNREACHABLE(); 3463 return NULL; 3464 } 3465 } 3466} 3467 3468 3469uint16_t SeqOneByteString::SeqOneByteStringGet(int index) { 3470 DCHECK(index >= 0 && index < length()); 3471 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize); 3472} 3473 3474 3475void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) { 3476 DCHECK(index >= 0 && index < length() && value <= kMaxOneByteCharCode); 3477 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, 3478 static_cast<byte>(value)); 3479} 3480 3481 3482Address SeqOneByteString::GetCharsAddress() { 3483 return FIELD_ADDR(this, kHeaderSize); 3484} 3485 3486 3487uint8_t* SeqOneByteString::GetChars() { 3488 return reinterpret_cast<uint8_t*>(GetCharsAddress()); 3489} 3490 3491 3492Address SeqTwoByteString::GetCharsAddress() { 3493 return FIELD_ADDR(this, kHeaderSize); 3494} 3495 3496 3497uc16* SeqTwoByteString::GetChars() { 3498 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize)); 3499} 3500 3501 3502uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) { 3503 DCHECK(index >= 0 && index < length()); 3504 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize); 3505} 3506 3507 3508void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) { 3509 DCHECK(index >= 0 && index < length()); 3510 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value); 3511} 3512 3513 3514int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) { 3515 return SizeFor(length()); 3516} 3517 3518 3519int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) { 3520 return SizeFor(length()); 3521} 3522 3523 3524String* SlicedString::parent() { 3525 return String::cast(READ_FIELD(this, kParentOffset)); 3526} 3527 3528 3529void SlicedString::set_parent(String* parent, WriteBarrierMode mode) { 3530 DCHECK(parent->IsSeqString() || parent->IsExternalString()); 3531 WRITE_FIELD(this, kParentOffset, parent); 3532 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode); 3533} 3534 3535 3536SMI_ACCESSORS(SlicedString, offset, kOffsetOffset) 3537 3538 3539String* ConsString::first() { 3540 return String::cast(READ_FIELD(this, kFirstOffset)); 3541} 3542 3543 3544Object* ConsString::unchecked_first() { 3545 return READ_FIELD(this, kFirstOffset); 3546} 3547 3548 3549void ConsString::set_first(String* value, WriteBarrierMode mode) { 3550 WRITE_FIELD(this, kFirstOffset, value); 3551 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode); 3552} 3553 3554 3555String* ConsString::second() { 3556 return String::cast(READ_FIELD(this, kSecondOffset)); 3557} 3558 3559 3560Object* ConsString::unchecked_second() { 3561 return READ_FIELD(this, kSecondOffset); 3562} 3563 3564 3565void ConsString::set_second(String* value, WriteBarrierMode mode) { 3566 WRITE_FIELD(this, kSecondOffset, value); 3567 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode); 3568} 3569 3570 3571bool ExternalString::is_short() { 3572 InstanceType type = map()->instance_type(); 3573 return (type & kShortExternalStringMask) == kShortExternalStringTag; 3574} 3575 3576 3577const ExternalOneByteString::Resource* ExternalOneByteString::resource() { 3578 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)); 3579} 3580 3581 3582void ExternalOneByteString::update_data_cache() { 3583 if (is_short()) return; 3584 const char** data_field = 3585 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset)); 3586 *data_field = resource()->data(); 3587} 3588 3589 3590void ExternalOneByteString::set_resource( 3591 const ExternalOneByteString::Resource* resource) { 3592 DCHECK(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize)); 3593 *reinterpret_cast<const Resource**>( 3594 FIELD_ADDR(this, kResourceOffset)) = resource; 3595 if (resource != NULL) update_data_cache(); 3596} 3597 3598 3599const uint8_t* ExternalOneByteString::GetChars() { 3600 return reinterpret_cast<const uint8_t*>(resource()->data()); 3601} 3602 3603 3604uint16_t ExternalOneByteString::ExternalOneByteStringGet(int index) { 3605 DCHECK(index >= 0 && index < length()); 3606 return GetChars()[index]; 3607} 3608 3609 3610const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() { 3611 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)); 3612} 3613 3614 3615void ExternalTwoByteString::update_data_cache() { 3616 if (is_short()) return; 3617 const uint16_t** data_field = 3618 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset)); 3619 *data_field = resource()->data(); 3620} 3621 3622 3623void ExternalTwoByteString::set_resource( 3624 const ExternalTwoByteString::Resource* resource) { 3625 *reinterpret_cast<const Resource**>( 3626 FIELD_ADDR(this, kResourceOffset)) = resource; 3627 if (resource != NULL) update_data_cache(); 3628} 3629 3630 3631const uint16_t* ExternalTwoByteString::GetChars() { 3632 return resource()->data(); 3633} 3634 3635 3636uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) { 3637 DCHECK(index >= 0 && index < length()); 3638 return GetChars()[index]; 3639} 3640 3641 3642const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData( 3643 unsigned start) { 3644 return GetChars() + start; 3645} 3646 3647 3648int ConsStringIteratorOp::OffsetForDepth(int depth) { 3649 return depth & kDepthMask; 3650} 3651 3652 3653void ConsStringIteratorOp::PushLeft(ConsString* string) { 3654 frames_[depth_++ & kDepthMask] = string; 3655} 3656 3657 3658void ConsStringIteratorOp::PushRight(ConsString* string) { 3659 // Inplace update. 3660 frames_[(depth_-1) & kDepthMask] = string; 3661} 3662 3663 3664void ConsStringIteratorOp::AdjustMaximumDepth() { 3665 if (depth_ > maximum_depth_) maximum_depth_ = depth_; 3666} 3667 3668 3669void ConsStringIteratorOp::Pop() { 3670 DCHECK(depth_ > 0); 3671 DCHECK(depth_ <= maximum_depth_); 3672 depth_--; 3673} 3674 3675 3676uint16_t StringCharacterStream::GetNext() { 3677 DCHECK(buffer8_ != NULL && end_ != NULL); 3678 // Advance cursor if needed. 3679 if (buffer8_ == end_) HasMore(); 3680 DCHECK(buffer8_ < end_); 3681 return is_one_byte_ ? *buffer8_++ : *buffer16_++; 3682} 3683 3684 3685StringCharacterStream::StringCharacterStream(String* string, 3686 ConsStringIteratorOp* op, 3687 int offset) 3688 : is_one_byte_(false), 3689 op_(op) { 3690 Reset(string, offset); 3691} 3692 3693 3694void StringCharacterStream::Reset(String* string, int offset) { 3695 buffer8_ = NULL; 3696 end_ = NULL; 3697 ConsString* cons_string = String::VisitFlat(this, string, offset); 3698 op_->Reset(cons_string, offset); 3699 if (cons_string != NULL) { 3700 string = op_->Next(&offset); 3701 if (string != NULL) String::VisitFlat(this, string, offset); 3702 } 3703} 3704 3705 3706bool StringCharacterStream::HasMore() { 3707 if (buffer8_ != end_) return true; 3708 int offset; 3709 String* string = op_->Next(&offset); 3710 DCHECK_EQ(offset, 0); 3711 if (string == NULL) return false; 3712 String::VisitFlat(this, string); 3713 DCHECK(buffer8_ != end_); 3714 return true; 3715} 3716 3717 3718void StringCharacterStream::VisitOneByteString( 3719 const uint8_t* chars, int length) { 3720 is_one_byte_ = true; 3721 buffer8_ = chars; 3722 end_ = chars + length; 3723} 3724 3725 3726void StringCharacterStream::VisitTwoByteString( 3727 const uint16_t* chars, int length) { 3728 is_one_byte_ = false; 3729 buffer16_ = chars; 3730 end_ = reinterpret_cast<const uint8_t*>(chars + length); 3731} 3732 3733 3734void JSFunctionResultCache::MakeZeroSize() { 3735 set_finger_index(kEntriesIndex); 3736 set_size(kEntriesIndex); 3737} 3738 3739 3740void JSFunctionResultCache::Clear() { 3741 int cache_size = size(); 3742 Object** entries_start = RawFieldOfElementAt(kEntriesIndex); 3743 MemsetPointer(entries_start, 3744 GetHeap()->the_hole_value(), 3745 cache_size - kEntriesIndex); 3746 MakeZeroSize(); 3747} 3748 3749 3750int JSFunctionResultCache::size() { 3751 return Smi::cast(get(kCacheSizeIndex))->value(); 3752} 3753 3754 3755void JSFunctionResultCache::set_size(int size) { 3756 set(kCacheSizeIndex, Smi::FromInt(size)); 3757} 3758 3759 3760int JSFunctionResultCache::finger_index() { 3761 return Smi::cast(get(kFingerIndex))->value(); 3762} 3763 3764 3765void JSFunctionResultCache::set_finger_index(int finger_index) { 3766 set(kFingerIndex, Smi::FromInt(finger_index)); 3767} 3768 3769 3770byte ByteArray::get(int index) { 3771 DCHECK(index >= 0 && index < this->length()); 3772 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize); 3773} 3774 3775 3776void ByteArray::set(int index, byte value) { 3777 DCHECK(index >= 0 && index < this->length()); 3778 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value); 3779} 3780 3781 3782int ByteArray::get_int(int index) { 3783 DCHECK(index >= 0 && (index * kIntSize) < this->length()); 3784 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize); 3785} 3786 3787 3788ByteArray* ByteArray::FromDataStartAddress(Address address) { 3789 DCHECK_TAG_ALIGNED(address); 3790 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag); 3791} 3792 3793 3794Address ByteArray::GetDataStartAddress() { 3795 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize; 3796} 3797 3798 3799uint8_t* ExternalUint8ClampedArray::external_uint8_clamped_pointer() { 3800 return reinterpret_cast<uint8_t*>(external_pointer()); 3801} 3802 3803 3804uint8_t ExternalUint8ClampedArray::get_scalar(int index) { 3805 DCHECK((index >= 0) && (index < this->length())); 3806 uint8_t* ptr = external_uint8_clamped_pointer(); 3807 return ptr[index]; 3808} 3809 3810 3811Handle<Object> ExternalUint8ClampedArray::get( 3812 Handle<ExternalUint8ClampedArray> array, 3813 int index) { 3814 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)), 3815 array->GetIsolate()); 3816} 3817 3818 3819void ExternalUint8ClampedArray::set(int index, uint8_t value) { 3820 DCHECK((index >= 0) && (index < this->length())); 3821 uint8_t* ptr = external_uint8_clamped_pointer(); 3822 ptr[index] = value; 3823} 3824 3825 3826void* ExternalArray::external_pointer() const { 3827 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset); 3828 return reinterpret_cast<void*>(ptr); 3829} 3830 3831 3832void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) { 3833 intptr_t ptr = reinterpret_cast<intptr_t>(value); 3834 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr); 3835} 3836 3837 3838int8_t ExternalInt8Array::get_scalar(int index) { 3839 DCHECK((index >= 0) && (index < this->length())); 3840 int8_t* ptr = static_cast<int8_t*>(external_pointer()); 3841 return ptr[index]; 3842} 3843 3844 3845Handle<Object> ExternalInt8Array::get(Handle<ExternalInt8Array> array, 3846 int index) { 3847 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)), 3848 array->GetIsolate()); 3849} 3850 3851 3852void ExternalInt8Array::set(int index, int8_t value) { 3853 DCHECK((index >= 0) && (index < this->length())); 3854 int8_t* ptr = static_cast<int8_t*>(external_pointer()); 3855 ptr[index] = value; 3856} 3857 3858 3859uint8_t ExternalUint8Array::get_scalar(int index) { 3860 DCHECK((index >= 0) && (index < this->length())); 3861 uint8_t* ptr = static_cast<uint8_t*>(external_pointer()); 3862 return ptr[index]; 3863} 3864 3865 3866Handle<Object> ExternalUint8Array::get(Handle<ExternalUint8Array> array, 3867 int index) { 3868 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)), 3869 array->GetIsolate()); 3870} 3871 3872 3873void ExternalUint8Array::set(int index, uint8_t value) { 3874 DCHECK((index >= 0) && (index < this->length())); 3875 uint8_t* ptr = static_cast<uint8_t*>(external_pointer()); 3876 ptr[index] = value; 3877} 3878 3879 3880int16_t ExternalInt16Array::get_scalar(int index) { 3881 DCHECK((index >= 0) && (index < this->length())); 3882 int16_t* ptr = static_cast<int16_t*>(external_pointer()); 3883 return ptr[index]; 3884} 3885 3886 3887Handle<Object> ExternalInt16Array::get(Handle<ExternalInt16Array> array, 3888 int index) { 3889 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)), 3890 array->GetIsolate()); 3891} 3892 3893 3894void ExternalInt16Array::set(int index, int16_t value) { 3895 DCHECK((index >= 0) && (index < this->length())); 3896 int16_t* ptr = static_cast<int16_t*>(external_pointer()); 3897 ptr[index] = value; 3898} 3899 3900 3901uint16_t ExternalUint16Array::get_scalar(int index) { 3902 DCHECK((index >= 0) && (index < this->length())); 3903 uint16_t* ptr = static_cast<uint16_t*>(external_pointer()); 3904 return ptr[index]; 3905} 3906 3907 3908Handle<Object> ExternalUint16Array::get(Handle<ExternalUint16Array> array, 3909 int index) { 3910 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)), 3911 array->GetIsolate()); 3912} 3913 3914 3915void ExternalUint16Array::set(int index, uint16_t value) { 3916 DCHECK((index >= 0) && (index < this->length())); 3917 uint16_t* ptr = static_cast<uint16_t*>(external_pointer()); 3918 ptr[index] = value; 3919} 3920 3921 3922int32_t ExternalInt32Array::get_scalar(int index) { 3923 DCHECK((index >= 0) && (index < this->length())); 3924 int32_t* ptr = static_cast<int32_t*>(external_pointer()); 3925 return ptr[index]; 3926} 3927 3928 3929Handle<Object> ExternalInt32Array::get(Handle<ExternalInt32Array> array, 3930 int index) { 3931 return array->GetIsolate()->factory()-> 3932 NewNumberFromInt(array->get_scalar(index)); 3933} 3934 3935 3936void ExternalInt32Array::set(int index, int32_t value) { 3937 DCHECK((index >= 0) && (index < this->length())); 3938 int32_t* ptr = static_cast<int32_t*>(external_pointer()); 3939 ptr[index] = value; 3940} 3941 3942 3943uint32_t ExternalUint32Array::get_scalar(int index) { 3944 DCHECK((index >= 0) && (index < this->length())); 3945 uint32_t* ptr = static_cast<uint32_t*>(external_pointer()); 3946 return ptr[index]; 3947} 3948 3949 3950Handle<Object> ExternalUint32Array::get(Handle<ExternalUint32Array> array, 3951 int index) { 3952 return array->GetIsolate()->factory()-> 3953 NewNumberFromUint(array->get_scalar(index)); 3954} 3955 3956 3957void ExternalUint32Array::set(int index, uint32_t value) { 3958 DCHECK((index >= 0) && (index < this->length())); 3959 uint32_t* ptr = static_cast<uint32_t*>(external_pointer()); 3960 ptr[index] = value; 3961} 3962 3963 3964float ExternalFloat32Array::get_scalar(int index) { 3965 DCHECK((index >= 0) && (index < this->length())); 3966 float* ptr = static_cast<float*>(external_pointer()); 3967 return ptr[index]; 3968} 3969 3970 3971Handle<Object> ExternalFloat32Array::get(Handle<ExternalFloat32Array> array, 3972 int index) { 3973 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index)); 3974} 3975 3976 3977void ExternalFloat32Array::set(int index, float value) { 3978 DCHECK((index >= 0) && (index < this->length())); 3979 float* ptr = static_cast<float*>(external_pointer()); 3980 ptr[index] = value; 3981} 3982 3983 3984double ExternalFloat64Array::get_scalar(int index) { 3985 DCHECK((index >= 0) && (index < this->length())); 3986 double* ptr = static_cast<double*>(external_pointer()); 3987 return ptr[index]; 3988} 3989 3990 3991Handle<Object> ExternalFloat64Array::get(Handle<ExternalFloat64Array> array, 3992 int index) { 3993 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index)); 3994} 3995 3996 3997void ExternalFloat64Array::set(int index, double value) { 3998 DCHECK((index >= 0) && (index < this->length())); 3999 double* ptr = static_cast<double*>(external_pointer()); 4000 ptr[index] = value; 4001} 4002 4003 4004void* FixedTypedArrayBase::DataPtr() { 4005 return FIELD_ADDR(this, kDataOffset); 4006} 4007 4008 4009int FixedTypedArrayBase::DataSize(InstanceType type) { 4010 int element_size; 4011 switch (type) { 4012#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \ 4013 case FIXED_##TYPE##_ARRAY_TYPE: \ 4014 element_size = size; \ 4015 break; 4016 4017 TYPED_ARRAYS(TYPED_ARRAY_CASE) 4018#undef TYPED_ARRAY_CASE 4019 default: 4020 UNREACHABLE(); 4021 return 0; 4022 } 4023 return length() * element_size; 4024} 4025 4026 4027int FixedTypedArrayBase::DataSize() { 4028 return DataSize(map()->instance_type()); 4029} 4030 4031 4032int FixedTypedArrayBase::size() { 4033 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize()); 4034} 4035 4036 4037int FixedTypedArrayBase::TypedArraySize(InstanceType type) { 4038 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type)); 4039} 4040 4041 4042uint8_t Uint8ArrayTraits::defaultValue() { return 0; } 4043 4044 4045uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; } 4046 4047 4048int8_t Int8ArrayTraits::defaultValue() { return 0; } 4049 4050 4051uint16_t Uint16ArrayTraits::defaultValue() { return 0; } 4052 4053 4054int16_t Int16ArrayTraits::defaultValue() { return 0; } 4055 4056 4057uint32_t Uint32ArrayTraits::defaultValue() { return 0; } 4058 4059 4060int32_t Int32ArrayTraits::defaultValue() { return 0; } 4061 4062 4063float Float32ArrayTraits::defaultValue() { 4064 return static_cast<float>(base::OS::nan_value()); 4065} 4066 4067 4068double Float64ArrayTraits::defaultValue() { return base::OS::nan_value(); } 4069 4070 4071template <class Traits> 4072typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) { 4073 DCHECK((index >= 0) && (index < this->length())); 4074 ElementType* ptr = reinterpret_cast<ElementType*>( 4075 FIELD_ADDR(this, kDataOffset)); 4076 return ptr[index]; 4077} 4078 4079 4080template<> inline 4081FixedTypedArray<Float64ArrayTraits>::ElementType 4082 FixedTypedArray<Float64ArrayTraits>::get_scalar(int index) { 4083 DCHECK((index >= 0) && (index < this->length())); 4084 return READ_DOUBLE_FIELD(this, ElementOffset(index)); 4085} 4086 4087 4088template <class Traits> 4089void FixedTypedArray<Traits>::set(int index, ElementType value) { 4090 DCHECK((index >= 0) && (index < this->length())); 4091 ElementType* ptr = reinterpret_cast<ElementType*>( 4092 FIELD_ADDR(this, kDataOffset)); 4093 ptr[index] = value; 4094} 4095 4096 4097template<> inline 4098void FixedTypedArray<Float64ArrayTraits>::set( 4099 int index, Float64ArrayTraits::ElementType value) { 4100 DCHECK((index >= 0) && (index < this->length())); 4101 WRITE_DOUBLE_FIELD(this, ElementOffset(index), value); 4102} 4103 4104 4105template <class Traits> 4106typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) { 4107 return static_cast<ElementType>(value); 4108} 4109 4110 4111template <> inline 4112uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) { 4113 if (value < 0) return 0; 4114 if (value > 0xFF) return 0xFF; 4115 return static_cast<uint8_t>(value); 4116} 4117 4118 4119template <class Traits> 4120typename Traits::ElementType FixedTypedArray<Traits>::from_double( 4121 double value) { 4122 return static_cast<ElementType>(DoubleToInt32(value)); 4123} 4124 4125 4126template<> inline 4127uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) { 4128 if (value < 0) return 0; 4129 if (value > 0xFF) return 0xFF; 4130 return static_cast<uint8_t>(lrint(value)); 4131} 4132 4133 4134template<> inline 4135float FixedTypedArray<Float32ArrayTraits>::from_double(double value) { 4136 return static_cast<float>(value); 4137} 4138 4139 4140template<> inline 4141double FixedTypedArray<Float64ArrayTraits>::from_double(double value) { 4142 return value; 4143} 4144 4145 4146template <class Traits> 4147Handle<Object> FixedTypedArray<Traits>::get( 4148 Handle<FixedTypedArray<Traits> > array, 4149 int index) { 4150 return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index)); 4151} 4152 4153 4154template <class Traits> 4155Handle<Object> FixedTypedArray<Traits>::SetValue( 4156 Handle<FixedTypedArray<Traits> > array, 4157 uint32_t index, 4158 Handle<Object> value) { 4159 ElementType cast_value = Traits::defaultValue(); 4160 if (index < static_cast<uint32_t>(array->length())) { 4161 if (value->IsSmi()) { 4162 int int_value = Handle<Smi>::cast(value)->value(); 4163 cast_value = from_int(int_value); 4164 } else if (value->IsHeapNumber()) { 4165 double double_value = Handle<HeapNumber>::cast(value)->value(); 4166 cast_value = from_double(double_value); 4167 } else { 4168 // Clamp undefined to the default value. All other types have been 4169 // converted to a number type further up in the call chain. 4170 DCHECK(value->IsUndefined()); 4171 } 4172 array->set(index, cast_value); 4173 } 4174 return Traits::ToHandle(array->GetIsolate(), cast_value); 4175} 4176 4177 4178Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) { 4179 return handle(Smi::FromInt(scalar), isolate); 4180} 4181 4182 4183Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate, 4184 uint8_t scalar) { 4185 return handle(Smi::FromInt(scalar), isolate); 4186} 4187 4188 4189Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) { 4190 return handle(Smi::FromInt(scalar), isolate); 4191} 4192 4193 4194Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) { 4195 return handle(Smi::FromInt(scalar), isolate); 4196} 4197 4198 4199Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) { 4200 return handle(Smi::FromInt(scalar), isolate); 4201} 4202 4203 4204Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) { 4205 return isolate->factory()->NewNumberFromUint(scalar); 4206} 4207 4208 4209Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) { 4210 return isolate->factory()->NewNumberFromInt(scalar); 4211} 4212 4213 4214Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) { 4215 return isolate->factory()->NewNumber(scalar); 4216} 4217 4218 4219Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) { 4220 return isolate->factory()->NewNumber(scalar); 4221} 4222 4223 4224int Map::visitor_id() { 4225 return READ_BYTE_FIELD(this, kVisitorIdOffset); 4226} 4227 4228 4229void Map::set_visitor_id(int id) { 4230 DCHECK(0 <= id && id < 256); 4231 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id)); 4232} 4233 4234 4235int Map::instance_size() { 4236 return NOBARRIER_READ_BYTE_FIELD( 4237 this, kInstanceSizeOffset) << kPointerSizeLog2; 4238} 4239 4240 4241int Map::inobject_properties() { 4242 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset); 4243} 4244 4245 4246int Map::pre_allocated_property_fields() { 4247 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset); 4248} 4249 4250 4251int Map::GetInObjectPropertyOffset(int index) { 4252 // Adjust for the number of properties stored in the object. 4253 index -= inobject_properties(); 4254 DCHECK(index <= 0); 4255 return instance_size() + (index * kPointerSize); 4256} 4257 4258 4259int HeapObject::SizeFromMap(Map* map) { 4260 int instance_size = map->instance_size(); 4261 if (instance_size != kVariableSizeSentinel) return instance_size; 4262 // Only inline the most frequent cases. 4263 InstanceType instance_type = map->instance_type(); 4264 if (instance_type == FIXED_ARRAY_TYPE) { 4265 return FixedArray::BodyDescriptor::SizeOf(map, this); 4266 } 4267 if (instance_type == ONE_BYTE_STRING_TYPE || 4268 instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) { 4269 return SeqOneByteString::SizeFor( 4270 reinterpret_cast<SeqOneByteString*>(this)->length()); 4271 } 4272 if (instance_type == BYTE_ARRAY_TYPE) { 4273 return reinterpret_cast<ByteArray*>(this)->ByteArraySize(); 4274 } 4275 if (instance_type == FREE_SPACE_TYPE) { 4276 return reinterpret_cast<FreeSpace*>(this)->nobarrier_size(); 4277 } 4278 if (instance_type == STRING_TYPE || 4279 instance_type == INTERNALIZED_STRING_TYPE) { 4280 return SeqTwoByteString::SizeFor( 4281 reinterpret_cast<SeqTwoByteString*>(this)->length()); 4282 } 4283 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) { 4284 return FixedDoubleArray::SizeFor( 4285 reinterpret_cast<FixedDoubleArray*>(this)->length()); 4286 } 4287 if (instance_type == CONSTANT_POOL_ARRAY_TYPE) { 4288 return reinterpret_cast<ConstantPoolArray*>(this)->size(); 4289 } 4290 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE && 4291 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) { 4292 return reinterpret_cast<FixedTypedArrayBase*>( 4293 this)->TypedArraySize(instance_type); 4294 } 4295 DCHECK(instance_type == CODE_TYPE); 4296 return reinterpret_cast<Code*>(this)->CodeSize(); 4297} 4298 4299 4300void Map::set_instance_size(int value) { 4301 DCHECK_EQ(0, value & (kPointerSize - 1)); 4302 value >>= kPointerSizeLog2; 4303 DCHECK(0 <= value && value < 256); 4304 NOBARRIER_WRITE_BYTE_FIELD( 4305 this, kInstanceSizeOffset, static_cast<byte>(value)); 4306} 4307 4308 4309void Map::set_inobject_properties(int value) { 4310 DCHECK(0 <= value && value < 256); 4311 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value)); 4312} 4313 4314 4315void Map::set_pre_allocated_property_fields(int value) { 4316 DCHECK(0 <= value && value < 256); 4317 WRITE_BYTE_FIELD(this, 4318 kPreAllocatedPropertyFieldsOffset, 4319 static_cast<byte>(value)); 4320} 4321 4322 4323InstanceType Map::instance_type() { 4324 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset)); 4325} 4326 4327 4328void Map::set_instance_type(InstanceType value) { 4329 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value); 4330} 4331 4332 4333int Map::unused_property_fields() { 4334 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset); 4335} 4336 4337 4338void Map::set_unused_property_fields(int value) { 4339 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255)); 4340} 4341 4342 4343byte Map::bit_field() { 4344 return READ_BYTE_FIELD(this, kBitFieldOffset); 4345} 4346 4347 4348void Map::set_bit_field(byte value) { 4349 WRITE_BYTE_FIELD(this, kBitFieldOffset, value); 4350} 4351 4352 4353byte Map::bit_field2() { 4354 return READ_BYTE_FIELD(this, kBitField2Offset); 4355} 4356 4357 4358void Map::set_bit_field2(byte value) { 4359 WRITE_BYTE_FIELD(this, kBitField2Offset, value); 4360} 4361 4362 4363void Map::set_non_instance_prototype(bool value) { 4364 if (value) { 4365 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype)); 4366 } else { 4367 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype)); 4368 } 4369} 4370 4371 4372bool Map::has_non_instance_prototype() { 4373 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0; 4374} 4375 4376 4377void Map::set_function_with_prototype(bool value) { 4378 set_bit_field(FunctionWithPrototype::update(bit_field(), value)); 4379} 4380 4381 4382bool Map::function_with_prototype() { 4383 return FunctionWithPrototype::decode(bit_field()); 4384} 4385 4386 4387void Map::set_is_access_check_needed(bool access_check_needed) { 4388 if (access_check_needed) { 4389 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded)); 4390 } else { 4391 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded)); 4392 } 4393} 4394 4395 4396bool Map::is_access_check_needed() { 4397 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0; 4398} 4399 4400 4401void Map::set_is_extensible(bool value) { 4402 if (value) { 4403 set_bit_field2(bit_field2() | (1 << kIsExtensible)); 4404 } else { 4405 set_bit_field2(bit_field2() & ~(1 << kIsExtensible)); 4406 } 4407} 4408 4409bool Map::is_extensible() { 4410 return ((1 << kIsExtensible) & bit_field2()) != 0; 4411} 4412 4413 4414void Map::set_is_prototype_map(bool value) { 4415 set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value)); 4416} 4417 4418bool Map::is_prototype_map() { 4419 return IsPrototypeMapBits::decode(bit_field2()); 4420} 4421 4422 4423void Map::set_dictionary_map(bool value) { 4424 uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value); 4425 new_bit_field3 = IsUnstable::update(new_bit_field3, value); 4426 set_bit_field3(new_bit_field3); 4427} 4428 4429 4430bool Map::is_dictionary_map() { 4431 return DictionaryMap::decode(bit_field3()); 4432} 4433 4434 4435Code::Flags Code::flags() { 4436 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset)); 4437} 4438 4439 4440void Map::set_owns_descriptors(bool owns_descriptors) { 4441 set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors)); 4442} 4443 4444 4445bool Map::owns_descriptors() { 4446 return OwnsDescriptors::decode(bit_field3()); 4447} 4448 4449 4450void Map::set_has_instance_call_handler() { 4451 set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true)); 4452} 4453 4454 4455bool Map::has_instance_call_handler() { 4456 return HasInstanceCallHandler::decode(bit_field3()); 4457} 4458 4459 4460void Map::deprecate() { 4461 set_bit_field3(Deprecated::update(bit_field3(), true)); 4462} 4463 4464 4465bool Map::is_deprecated() { 4466 return Deprecated::decode(bit_field3()); 4467} 4468 4469 4470void Map::set_migration_target(bool value) { 4471 set_bit_field3(IsMigrationTarget::update(bit_field3(), value)); 4472} 4473 4474 4475bool Map::is_migration_target() { 4476 return IsMigrationTarget::decode(bit_field3()); 4477} 4478 4479 4480void Map::set_done_inobject_slack_tracking(bool value) { 4481 set_bit_field3(DoneInobjectSlackTracking::update(bit_field3(), value)); 4482} 4483 4484 4485bool Map::done_inobject_slack_tracking() { 4486 return DoneInobjectSlackTracking::decode(bit_field3()); 4487} 4488 4489 4490void Map::set_construction_count(int value) { 4491 set_bit_field3(ConstructionCount::update(bit_field3(), value)); 4492} 4493 4494 4495int Map::construction_count() { 4496 return ConstructionCount::decode(bit_field3()); 4497} 4498 4499 4500void Map::freeze() { 4501 set_bit_field3(IsFrozen::update(bit_field3(), true)); 4502} 4503 4504 4505bool Map::is_frozen() { 4506 return IsFrozen::decode(bit_field3()); 4507} 4508 4509 4510void Map::mark_unstable() { 4511 set_bit_field3(IsUnstable::update(bit_field3(), true)); 4512} 4513 4514 4515bool Map::is_stable() { 4516 return !IsUnstable::decode(bit_field3()); 4517} 4518 4519 4520bool Map::has_code_cache() { 4521 return code_cache() != GetIsolate()->heap()->empty_fixed_array(); 4522} 4523 4524 4525bool Map::CanBeDeprecated() { 4526 int descriptor = LastAdded(); 4527 for (int i = 0; i <= descriptor; i++) { 4528 PropertyDetails details = instance_descriptors()->GetDetails(i); 4529 if (details.representation().IsNone()) return true; 4530 if (details.representation().IsSmi()) return true; 4531 if (details.representation().IsDouble()) return true; 4532 if (details.representation().IsHeapObject()) return true; 4533 if (details.type() == CONSTANT) return true; 4534 } 4535 return false; 4536} 4537 4538 4539void Map::NotifyLeafMapLayoutChange() { 4540 if (is_stable()) { 4541 mark_unstable(); 4542 dependent_code()->DeoptimizeDependentCodeGroup( 4543 GetIsolate(), 4544 DependentCode::kPrototypeCheckGroup); 4545 } 4546} 4547 4548 4549bool Map::CanOmitMapChecks() { 4550 return is_stable() && FLAG_omit_map_checks_for_leaf_maps; 4551} 4552 4553 4554int DependentCode::number_of_entries(DependencyGroup group) { 4555 if (length() == 0) return 0; 4556 return Smi::cast(get(group))->value(); 4557} 4558 4559 4560void DependentCode::set_number_of_entries(DependencyGroup group, int value) { 4561 set(group, Smi::FromInt(value)); 4562} 4563 4564 4565bool DependentCode::is_code_at(int i) { 4566 return get(kCodesStartIndex + i)->IsCode(); 4567} 4568 4569Code* DependentCode::code_at(int i) { 4570 return Code::cast(get(kCodesStartIndex + i)); 4571} 4572 4573 4574CompilationInfo* DependentCode::compilation_info_at(int i) { 4575 return reinterpret_cast<CompilationInfo*>( 4576 Foreign::cast(get(kCodesStartIndex + i))->foreign_address()); 4577} 4578 4579 4580void DependentCode::set_object_at(int i, Object* object) { 4581 set(kCodesStartIndex + i, object); 4582} 4583 4584 4585Object* DependentCode::object_at(int i) { 4586 return get(kCodesStartIndex + i); 4587} 4588 4589 4590Object** DependentCode::slot_at(int i) { 4591 return RawFieldOfElementAt(kCodesStartIndex + i); 4592} 4593 4594 4595void DependentCode::clear_at(int i) { 4596 set_undefined(kCodesStartIndex + i); 4597} 4598 4599 4600void DependentCode::copy(int from, int to) { 4601 set(kCodesStartIndex + to, get(kCodesStartIndex + from)); 4602} 4603 4604 4605void DependentCode::ExtendGroup(DependencyGroup group) { 4606 GroupStartIndexes starts(this); 4607 for (int g = kGroupCount - 1; g > group; g--) { 4608 if (starts.at(g) < starts.at(g + 1)) { 4609 copy(starts.at(g), starts.at(g + 1)); 4610 } 4611 } 4612} 4613 4614 4615void Code::set_flags(Code::Flags flags) { 4616 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1); 4617 WRITE_INT_FIELD(this, kFlagsOffset, flags); 4618} 4619 4620 4621Code::Kind Code::kind() { 4622 return ExtractKindFromFlags(flags()); 4623} 4624 4625 4626bool Code::IsCodeStubOrIC() { 4627 return kind() == STUB || kind() == HANDLER || kind() == LOAD_IC || 4628 kind() == KEYED_LOAD_IC || kind() == CALL_IC || kind() == STORE_IC || 4629 kind() == KEYED_STORE_IC || kind() == BINARY_OP_IC || 4630 kind() == COMPARE_IC || kind() == COMPARE_NIL_IC || 4631 kind() == TO_BOOLEAN_IC; 4632} 4633 4634 4635InlineCacheState Code::ic_state() { 4636 InlineCacheState result = ExtractICStateFromFlags(flags()); 4637 // Only allow uninitialized or debugger states for non-IC code 4638 // objects. This is used in the debugger to determine whether or not 4639 // a call to code object has been replaced with a debug break call. 4640 DCHECK(is_inline_cache_stub() || 4641 result == UNINITIALIZED || 4642 result == DEBUG_STUB); 4643 return result; 4644} 4645 4646 4647ExtraICState Code::extra_ic_state() { 4648 DCHECK(is_inline_cache_stub() || ic_state() == DEBUG_STUB); 4649 return ExtractExtraICStateFromFlags(flags()); 4650} 4651 4652 4653Code::StubType Code::type() { 4654 return ExtractTypeFromFlags(flags()); 4655} 4656 4657 4658// For initialization. 4659void Code::set_raw_kind_specific_flags1(int value) { 4660 WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value); 4661} 4662 4663 4664void Code::set_raw_kind_specific_flags2(int value) { 4665 WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value); 4666} 4667 4668 4669inline bool Code::is_crankshafted() { 4670 return IsCrankshaftedField::decode( 4671 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)); 4672} 4673 4674 4675inline bool Code::is_hydrogen_stub() { 4676 return is_crankshafted() && kind() != OPTIMIZED_FUNCTION; 4677} 4678 4679 4680inline void Code::set_is_crankshafted(bool value) { 4681 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset); 4682 int updated = IsCrankshaftedField::update(previous, value); 4683 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated); 4684} 4685 4686 4687inline bool Code::is_turbofanned() { 4688 DCHECK(kind() == OPTIMIZED_FUNCTION || kind() == STUB); 4689 return IsTurbofannedField::decode( 4690 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset)); 4691} 4692 4693 4694inline void Code::set_is_turbofanned(bool value) { 4695 DCHECK(kind() == OPTIMIZED_FUNCTION || kind() == STUB); 4696 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset); 4697 int updated = IsTurbofannedField::update(previous, value); 4698 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated); 4699} 4700 4701 4702bool Code::optimizable() { 4703 DCHECK_EQ(FUNCTION, kind()); 4704 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1; 4705} 4706 4707 4708void Code::set_optimizable(bool value) { 4709 DCHECK_EQ(FUNCTION, kind()); 4710 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0); 4711} 4712 4713 4714bool Code::has_deoptimization_support() { 4715 DCHECK_EQ(FUNCTION, kind()); 4716 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags); 4717 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags); 4718} 4719 4720 4721void Code::set_has_deoptimization_support(bool value) { 4722 DCHECK_EQ(FUNCTION, kind()); 4723 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags); 4724 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value); 4725 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags); 4726} 4727 4728 4729bool Code::has_debug_break_slots() { 4730 DCHECK_EQ(FUNCTION, kind()); 4731 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags); 4732 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags); 4733} 4734 4735 4736void Code::set_has_debug_break_slots(bool value) { 4737 DCHECK_EQ(FUNCTION, kind()); 4738 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags); 4739 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value); 4740 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags); 4741} 4742 4743 4744bool Code::is_compiled_optimizable() { 4745 DCHECK_EQ(FUNCTION, kind()); 4746 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags); 4747 return FullCodeFlagsIsCompiledOptimizable::decode(flags); 4748} 4749 4750 4751void Code::set_compiled_optimizable(bool value) { 4752 DCHECK_EQ(FUNCTION, kind()); 4753 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags); 4754 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value); 4755 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags); 4756} 4757 4758 4759int Code::allow_osr_at_loop_nesting_level() { 4760 DCHECK_EQ(FUNCTION, kind()); 4761 int fields = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset); 4762 return AllowOSRAtLoopNestingLevelField::decode(fields); 4763} 4764 4765 4766void Code::set_allow_osr_at_loop_nesting_level(int level) { 4767 DCHECK_EQ(FUNCTION, kind()); 4768 DCHECK(level >= 0 && level <= kMaxLoopNestingMarker); 4769 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset); 4770 int updated = AllowOSRAtLoopNestingLevelField::update(previous, level); 4771 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated); 4772} 4773 4774 4775int Code::profiler_ticks() { 4776 DCHECK_EQ(FUNCTION, kind()); 4777 return READ_BYTE_FIELD(this, kProfilerTicksOffset); 4778} 4779 4780 4781void Code::set_profiler_ticks(int ticks) { 4782 DCHECK(ticks < 256); 4783 if (kind() == FUNCTION) { 4784 WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks); 4785 } 4786} 4787 4788 4789int Code::builtin_index() { 4790 DCHECK_EQ(BUILTIN, kind()); 4791 return READ_INT32_FIELD(this, kKindSpecificFlags1Offset); 4792} 4793 4794 4795void Code::set_builtin_index(int index) { 4796 DCHECK_EQ(BUILTIN, kind()); 4797 WRITE_INT32_FIELD(this, kKindSpecificFlags1Offset, index); 4798} 4799 4800 4801unsigned Code::stack_slots() { 4802 DCHECK(is_crankshafted()); 4803 return StackSlotsField::decode( 4804 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset)); 4805} 4806 4807 4808void Code::set_stack_slots(unsigned slots) { 4809 CHECK(slots <= (1 << kStackSlotsBitCount)); 4810 DCHECK(is_crankshafted()); 4811 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset); 4812 int updated = StackSlotsField::update(previous, slots); 4813 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated); 4814} 4815 4816 4817unsigned Code::safepoint_table_offset() { 4818 DCHECK(is_crankshafted()); 4819 return SafepointTableOffsetField::decode( 4820 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)); 4821} 4822 4823 4824void Code::set_safepoint_table_offset(unsigned offset) { 4825 CHECK(offset <= (1 << kSafepointTableOffsetBitCount)); 4826 DCHECK(is_crankshafted()); 4827 DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize))); 4828 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset); 4829 int updated = SafepointTableOffsetField::update(previous, offset); 4830 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated); 4831} 4832 4833 4834unsigned Code::back_edge_table_offset() { 4835 DCHECK_EQ(FUNCTION, kind()); 4836 return BackEdgeTableOffsetField::decode( 4837 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)) << kPointerSizeLog2; 4838} 4839 4840 4841void Code::set_back_edge_table_offset(unsigned offset) { 4842 DCHECK_EQ(FUNCTION, kind()); 4843 DCHECK(IsAligned(offset, static_cast<unsigned>(kPointerSize))); 4844 offset = offset >> kPointerSizeLog2; 4845 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset); 4846 int updated = BackEdgeTableOffsetField::update(previous, offset); 4847 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated); 4848} 4849 4850 4851bool Code::back_edges_patched_for_osr() { 4852 DCHECK_EQ(FUNCTION, kind()); 4853 return allow_osr_at_loop_nesting_level() > 0; 4854} 4855 4856 4857byte Code::to_boolean_state() { 4858 return extra_ic_state(); 4859} 4860 4861 4862bool Code::has_function_cache() { 4863 DCHECK(kind() == STUB); 4864 return HasFunctionCacheField::decode( 4865 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset)); 4866} 4867 4868 4869void Code::set_has_function_cache(bool flag) { 4870 DCHECK(kind() == STUB); 4871 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset); 4872 int updated = HasFunctionCacheField::update(previous, flag); 4873 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated); 4874} 4875 4876 4877bool Code::marked_for_deoptimization() { 4878 DCHECK(kind() == OPTIMIZED_FUNCTION); 4879 return MarkedForDeoptimizationField::decode( 4880 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset)); 4881} 4882 4883 4884void Code::set_marked_for_deoptimization(bool flag) { 4885 DCHECK(kind() == OPTIMIZED_FUNCTION); 4886 DCHECK(!flag || AllowDeoptimization::IsAllowed(GetIsolate())); 4887 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset); 4888 int updated = MarkedForDeoptimizationField::update(previous, flag); 4889 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated); 4890} 4891 4892 4893bool Code::is_weak_stub() { 4894 return CanBeWeakStub() && WeakStubField::decode( 4895 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset)); 4896} 4897 4898 4899void Code::mark_as_weak_stub() { 4900 DCHECK(CanBeWeakStub()); 4901 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset); 4902 int updated = WeakStubField::update(previous, true); 4903 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated); 4904} 4905 4906 4907bool Code::is_invalidated_weak_stub() { 4908 return is_weak_stub() && InvalidatedWeakStubField::decode( 4909 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset)); 4910} 4911 4912 4913void Code::mark_as_invalidated_weak_stub() { 4914 DCHECK(is_inline_cache_stub()); 4915 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset); 4916 int updated = InvalidatedWeakStubField::update(previous, true); 4917 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated); 4918} 4919 4920 4921bool Code::is_inline_cache_stub() { 4922 Kind kind = this->kind(); 4923 switch (kind) { 4924#define CASE(name) case name: return true; 4925 IC_KIND_LIST(CASE) 4926#undef CASE 4927 default: return false; 4928 } 4929} 4930 4931 4932bool Code::is_keyed_stub() { 4933 return is_keyed_load_stub() || is_keyed_store_stub(); 4934} 4935 4936 4937bool Code::is_debug_stub() { 4938 return ic_state() == DEBUG_STUB; 4939} 4940 4941 4942ConstantPoolArray* Code::constant_pool() { 4943 return ConstantPoolArray::cast(READ_FIELD(this, kConstantPoolOffset)); 4944} 4945 4946 4947void Code::set_constant_pool(Object* value) { 4948 DCHECK(value->IsConstantPoolArray()); 4949 WRITE_FIELD(this, kConstantPoolOffset, value); 4950 WRITE_BARRIER(GetHeap(), this, kConstantPoolOffset, value); 4951} 4952 4953 4954Code::Flags Code::ComputeFlags(Kind kind, InlineCacheState ic_state, 4955 ExtraICState extra_ic_state, StubType type, 4956 CacheHolderFlag holder) { 4957 // Compute the bit mask. 4958 unsigned int bits = KindField::encode(kind) 4959 | ICStateField::encode(ic_state) 4960 | TypeField::encode(type) 4961 | ExtraICStateField::encode(extra_ic_state) 4962 | CacheHolderField::encode(holder); 4963 return static_cast<Flags>(bits); 4964} 4965 4966 4967Code::Flags Code::ComputeMonomorphicFlags(Kind kind, 4968 ExtraICState extra_ic_state, 4969 CacheHolderFlag holder, 4970 StubType type) { 4971 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, holder); 4972} 4973 4974 4975Code::Flags Code::ComputeHandlerFlags(Kind handler_kind, StubType type, 4976 CacheHolderFlag holder) { 4977 return ComputeFlags(Code::HANDLER, MONOMORPHIC, handler_kind, type, holder); 4978} 4979 4980 4981Code::Kind Code::ExtractKindFromFlags(Flags flags) { 4982 return KindField::decode(flags); 4983} 4984 4985 4986InlineCacheState Code::ExtractICStateFromFlags(Flags flags) { 4987 return ICStateField::decode(flags); 4988} 4989 4990 4991ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) { 4992 return ExtraICStateField::decode(flags); 4993} 4994 4995 4996Code::StubType Code::ExtractTypeFromFlags(Flags flags) { 4997 return TypeField::decode(flags); 4998} 4999 5000 5001CacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) { 5002 return CacheHolderField::decode(flags); 5003} 5004 5005 5006Code::Flags Code::RemoveTypeFromFlags(Flags flags) { 5007 int bits = flags & ~TypeField::kMask; 5008 return static_cast<Flags>(bits); 5009} 5010 5011 5012Code::Flags Code::RemoveTypeAndHolderFromFlags(Flags flags) { 5013 int bits = flags & ~TypeField::kMask & ~CacheHolderField::kMask; 5014 return static_cast<Flags>(bits); 5015} 5016 5017 5018Code* Code::GetCodeFromTargetAddress(Address address) { 5019 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize); 5020 // GetCodeFromTargetAddress might be called when marking objects during mark 5021 // sweep. reinterpret_cast is therefore used instead of the more appropriate 5022 // Code::cast. Code::cast does not work when the object's map is 5023 // marked. 5024 Code* result = reinterpret_cast<Code*>(code); 5025 return result; 5026} 5027 5028 5029Object* Code::GetObjectFromEntryAddress(Address location_of_address) { 5030 return HeapObject:: 5031 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize); 5032} 5033 5034 5035bool Code::IsWeakObjectInOptimizedCode(Object* object) { 5036 if (!FLAG_collect_maps) return false; 5037 if (object->IsMap()) { 5038 return Map::cast(object)->CanTransition() && 5039 FLAG_weak_embedded_maps_in_optimized_code; 5040 } 5041 if (object->IsJSObject() || 5042 (object->IsCell() && Cell::cast(object)->value()->IsJSObject())) { 5043 return FLAG_weak_embedded_objects_in_optimized_code; 5044 } 5045 return false; 5046} 5047 5048 5049class Code::FindAndReplacePattern { 5050 public: 5051 FindAndReplacePattern() : count_(0) { } 5052 void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) { 5053 DCHECK(count_ < kMaxCount); 5054 find_[count_] = map_to_find; 5055 replace_[count_] = obj_to_replace; 5056 ++count_; 5057 } 5058 private: 5059 static const int kMaxCount = 4; 5060 int count_; 5061 Handle<Map> find_[kMaxCount]; 5062 Handle<Object> replace_[kMaxCount]; 5063 friend class Code; 5064}; 5065 5066 5067bool Code::IsWeakObjectInIC(Object* object) { 5068 return object->IsMap() && Map::cast(object)->CanTransition() && 5069 FLAG_collect_maps && 5070 FLAG_weak_embedded_maps_in_ic; 5071} 5072 5073 5074Object* Map::prototype() const { 5075 return READ_FIELD(this, kPrototypeOffset); 5076} 5077 5078 5079void Map::set_prototype(Object* value, WriteBarrierMode mode) { 5080 DCHECK(value->IsNull() || value->IsJSReceiver()); 5081 WRITE_FIELD(this, kPrototypeOffset, value); 5082 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode); 5083} 5084 5085 5086// If the descriptor is using the empty transition array, install a new empty 5087// transition array that will have place for an element transition. 5088static void EnsureHasTransitionArray(Handle<Map> map) { 5089 Handle<TransitionArray> transitions; 5090 if (!map->HasTransitionArray()) { 5091 transitions = TransitionArray::Allocate(map->GetIsolate(), 0); 5092 transitions->set_back_pointer_storage(map->GetBackPointer()); 5093 } else if (!map->transitions()->IsFullTransitionArray()) { 5094 transitions = TransitionArray::ExtendToFullTransitionArray(map); 5095 } else { 5096 return; 5097 } 5098 map->set_transitions(*transitions); 5099} 5100 5101 5102void Map::InitializeDescriptors(DescriptorArray* descriptors) { 5103 int len = descriptors->number_of_descriptors(); 5104 set_instance_descriptors(descriptors); 5105 SetNumberOfOwnDescriptors(len); 5106} 5107 5108 5109ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset) 5110 5111 5112void Map::set_bit_field3(uint32_t bits) { 5113 if (kInt32Size != kPointerSize) { 5114 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0); 5115 } 5116 WRITE_UINT32_FIELD(this, kBitField3Offset, bits); 5117} 5118 5119 5120uint32_t Map::bit_field3() { 5121 return READ_UINT32_FIELD(this, kBitField3Offset); 5122} 5123 5124 5125void Map::AppendDescriptor(Descriptor* desc) { 5126 DescriptorArray* descriptors = instance_descriptors(); 5127 int number_of_own_descriptors = NumberOfOwnDescriptors(); 5128 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors); 5129 descriptors->Append(desc); 5130 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1); 5131} 5132 5133 5134Object* Map::GetBackPointer() { 5135 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset); 5136 if (object->IsDescriptorArray()) { 5137 return TransitionArray::cast(object)->back_pointer_storage(); 5138 } else { 5139 DCHECK(object->IsMap() || object->IsUndefined()); 5140 return object; 5141 } 5142} 5143 5144 5145bool Map::HasElementsTransition() { 5146 return HasTransitionArray() && transitions()->HasElementsTransition(); 5147} 5148 5149 5150bool Map::HasTransitionArray() const { 5151 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset); 5152 return object->IsTransitionArray(); 5153} 5154 5155 5156Map* Map::elements_transition_map() { 5157 int index = transitions()->Search(GetHeap()->elements_transition_symbol()); 5158 return transitions()->GetTarget(index); 5159} 5160 5161 5162bool Map::CanHaveMoreTransitions() { 5163 if (!HasTransitionArray()) return true; 5164 return FixedArray::SizeFor(transitions()->length() + 5165 TransitionArray::kTransitionSize) 5166 <= Page::kMaxRegularHeapObjectSize; 5167} 5168 5169 5170Map* Map::GetTransition(int transition_index) { 5171 return transitions()->GetTarget(transition_index); 5172} 5173 5174 5175int Map::SearchTransition(Name* name) { 5176 if (HasTransitionArray()) return transitions()->Search(name); 5177 return TransitionArray::kNotFound; 5178} 5179 5180 5181FixedArray* Map::GetPrototypeTransitions() { 5182 if (!HasTransitionArray()) return GetHeap()->empty_fixed_array(); 5183 if (!transitions()->HasPrototypeTransitions()) { 5184 return GetHeap()->empty_fixed_array(); 5185 } 5186 return transitions()->GetPrototypeTransitions(); 5187} 5188 5189 5190void Map::SetPrototypeTransitions( 5191 Handle<