1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4//
5// Review notes:
6//
7// - The use of macros in these inline functions may seem superfluous
8// but it is absolutely needed to make sure gcc generates optimal
9// code. gcc is not happy when attempting to inline too deep.
10//
11
12#ifndef V8_OBJECTS_INL_H_
13#define V8_OBJECTS_INL_H_
14
15#include "src/base/atomicops.h"
16#include "src/elements.h"
17#include "src/objects.h"
18#include "src/contexts.h"
19#include "src/conversions-inl.h"
20#include "src/field-index-inl.h"
21#include "src/heap.h"
22#include "src/isolate.h"
23#include "src/heap-inl.h"
24#include "src/property.h"
25#include "src/spaces.h"
26#include "src/store-buffer.h"
27#include "src/v8memory.h"
28#include "src/factory.h"
29#include "src/incremental-marking.h"
30#include "src/transitions-inl.h"
31#include "src/objects-visiting.h"
32#include "src/lookup.h"
33
34namespace v8 {
35namespace internal {
36
37PropertyDetails::PropertyDetails(Smi* smi) {
38  value_ = smi->value();
39}
40
41
42Smi* PropertyDetails::AsSmi() const {
43  // Ensure the upper 2 bits have the same value by sign extending it. This is
44  // necessary to be able to use the 31st bit of the property details.
45  int value = value_ << 1;
46  return Smi::FromInt(value >> 1);
47}
48
49
50PropertyDetails PropertyDetails::AsDeleted() const {
51  Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
52  return PropertyDetails(smi);
53}
54
55
56#define TYPE_CHECKER(type, instancetype)                                \
57  bool Object::Is##type() {                                             \
58  return Object::IsHeapObject() &&                                      \
59      HeapObject::cast(this)->map()->instance_type() == instancetype;   \
60  }
61
62
63#define CAST_ACCESSOR(type)                     \
64  type* type::cast(Object* object) {            \
65    SLOW_ASSERT(object->Is##type());            \
66    return reinterpret_cast<type*>(object);     \
67  }
68
69
70#define INT_ACCESSORS(holder, name, offset)                             \
71  int holder::name() { return READ_INT_FIELD(this, offset); }           \
72  void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
73
74
75#define ACCESSORS(holder, name, type, offset)                           \
76  type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
77  void holder::set_##name(type* value, WriteBarrierMode mode) {         \
78    WRITE_FIELD(this, offset, value);                                   \
79    CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);    \
80  }
81
82
83// Getter that returns a tagged Smi and setter that writes a tagged Smi.
84#define ACCESSORS_TO_SMI(holder, name, offset)                          \
85  Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); }   \
86  void holder::set_##name(Smi* value, WriteBarrierMode mode) {          \
87    WRITE_FIELD(this, offset, value);                                   \
88  }
89
90
91// Getter that returns a Smi as an int and writes an int as a Smi.
92#define SMI_ACCESSORS(holder, name, offset)             \
93  int holder::name() {                                  \
94    Object* value = READ_FIELD(this, offset);           \
95    return Smi::cast(value)->value();                   \
96  }                                                     \
97  void holder::set_##name(int value) {                  \
98    WRITE_FIELD(this, offset, Smi::FromInt(value));     \
99  }
100
101#define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset)    \
102  int holder::synchronized_##name() {                       \
103    Object* value = ACQUIRE_READ_FIELD(this, offset);       \
104    return Smi::cast(value)->value();                       \
105  }                                                         \
106  void holder::synchronized_set_##name(int value) {         \
107    RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
108  }
109
110#define NOBARRIER_SMI_ACCESSORS(holder, name, offset)          \
111  int holder::nobarrier_##name() {                             \
112    Object* value = NOBARRIER_READ_FIELD(this, offset);        \
113    return Smi::cast(value)->value();                          \
114  }                                                            \
115  void holder::nobarrier_set_##name(int value) {               \
116    NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value));  \
117  }
118
119#define BOOL_GETTER(holder, field, name, offset)           \
120  bool holder::name() {                                    \
121    return BooleanBit::get(field(), offset);               \
122  }                                                        \
123
124
125#define BOOL_ACCESSORS(holder, field, name, offset)        \
126  bool holder::name() {                                    \
127    return BooleanBit::get(field(), offset);               \
128  }                                                        \
129  void holder::set_##name(bool value) {                    \
130    set_##field(BooleanBit::set(field(), offset, value));  \
131  }
132
133
134bool Object::IsFixedArrayBase() {
135  return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray() ||
136         IsFixedTypedArrayBase() || IsExternalArray();
137}
138
139
140// External objects are not extensible, so the map check is enough.
141bool Object::IsExternal() {
142  return Object::IsHeapObject() &&
143      HeapObject::cast(this)->map() ==
144      HeapObject::cast(this)->GetHeap()->external_map();
145}
146
147
148bool Object::IsAccessorInfo() {
149  return IsExecutableAccessorInfo() || IsDeclaredAccessorInfo();
150}
151
152
153bool Object::IsSmi() {
154  return HAS_SMI_TAG(this);
155}
156
157
158bool Object::IsHeapObject() {
159  return Internals::HasHeapObjectTag(this);
160}
161
162
163TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
164TYPE_CHECKER(Symbol, SYMBOL_TYPE)
165
166
167bool Object::IsString() {
168  return Object::IsHeapObject()
169    && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
170}
171
172
173bool Object::IsName() {
174  return IsString() || IsSymbol();
175}
176
177
178bool Object::IsUniqueName() {
179  return IsInternalizedString() || IsSymbol();
180}
181
182
183bool Object::IsSpecObject() {
184  return Object::IsHeapObject()
185    && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
186}
187
188
189bool Object::IsSpecFunction() {
190  if (!Object::IsHeapObject()) return false;
191  InstanceType type = HeapObject::cast(this)->map()->instance_type();
192  return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
193}
194
195
196bool Object::IsTemplateInfo() {
197  return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
198}
199
200
201bool Object::IsInternalizedString() {
202  if (!this->IsHeapObject()) return false;
203  uint32_t type = HeapObject::cast(this)->map()->instance_type();
204  STATIC_ASSERT(kNotInternalizedTag != 0);
205  return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
206      (kStringTag | kInternalizedTag);
207}
208
209
210bool Object::IsConsString() {
211  if (!IsString()) return false;
212  return StringShape(String::cast(this)).IsCons();
213}
214
215
216bool Object::IsSlicedString() {
217  if (!IsString()) return false;
218  return StringShape(String::cast(this)).IsSliced();
219}
220
221
222bool Object::IsSeqString() {
223  if (!IsString()) return false;
224  return StringShape(String::cast(this)).IsSequential();
225}
226
227
228bool Object::IsSeqOneByteString() {
229  if (!IsString()) return false;
230  return StringShape(String::cast(this)).IsSequential() &&
231         String::cast(this)->IsOneByteRepresentation();
232}
233
234
235bool Object::IsSeqTwoByteString() {
236  if (!IsString()) return false;
237  return StringShape(String::cast(this)).IsSequential() &&
238         String::cast(this)->IsTwoByteRepresentation();
239}
240
241
242bool Object::IsExternalString() {
243  if (!IsString()) return false;
244  return StringShape(String::cast(this)).IsExternal();
245}
246
247
248bool Object::IsExternalAsciiString() {
249  if (!IsString()) return false;
250  return StringShape(String::cast(this)).IsExternal() &&
251         String::cast(this)->IsOneByteRepresentation();
252}
253
254
255bool Object::IsExternalTwoByteString() {
256  if (!IsString()) return false;
257  return StringShape(String::cast(this)).IsExternal() &&
258         String::cast(this)->IsTwoByteRepresentation();
259}
260
261
262bool Object::HasValidElements() {
263  // Dictionary is covered under FixedArray.
264  return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray() ||
265         IsFixedTypedArrayBase();
266}
267
268
269Handle<Object> Object::NewStorageFor(Isolate* isolate,
270                                     Handle<Object> object,
271                                     Representation representation) {
272  if (representation.IsSmi() && object->IsUninitialized()) {
273    return handle(Smi::FromInt(0), isolate);
274  }
275  if (!representation.IsDouble()) return object;
276  if (object->IsUninitialized()) {
277    return isolate->factory()->NewHeapNumber(0);
278  }
279  return isolate->factory()->NewHeapNumber(object->Number());
280}
281
282
283StringShape::StringShape(String* str)
284  : type_(str->map()->instance_type()) {
285  set_valid();
286  ASSERT((type_ & kIsNotStringMask) == kStringTag);
287}
288
289
290StringShape::StringShape(Map* map)
291  : type_(map->instance_type()) {
292  set_valid();
293  ASSERT((type_ & kIsNotStringMask) == kStringTag);
294}
295
296
297StringShape::StringShape(InstanceType t)
298  : type_(static_cast<uint32_t>(t)) {
299  set_valid();
300  ASSERT((type_ & kIsNotStringMask) == kStringTag);
301}
302
303
304bool StringShape::IsInternalized() {
305  ASSERT(valid());
306  STATIC_ASSERT(kNotInternalizedTag != 0);
307  return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
308      (kStringTag | kInternalizedTag);
309}
310
311
312bool String::IsOneByteRepresentation() {
313  uint32_t type = map()->instance_type();
314  return (type & kStringEncodingMask) == kOneByteStringTag;
315}
316
317
318bool String::IsTwoByteRepresentation() {
319  uint32_t type = map()->instance_type();
320  return (type & kStringEncodingMask) == kTwoByteStringTag;
321}
322
323
324bool String::IsOneByteRepresentationUnderneath() {
325  uint32_t type = map()->instance_type();
326  STATIC_ASSERT(kIsIndirectStringTag != 0);
327  STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
328  ASSERT(IsFlat());
329  switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
330    case kOneByteStringTag:
331      return true;
332    case kTwoByteStringTag:
333      return false;
334    default:  // Cons or sliced string.  Need to go deeper.
335      return GetUnderlying()->IsOneByteRepresentation();
336  }
337}
338
339
340bool String::IsTwoByteRepresentationUnderneath() {
341  uint32_t type = map()->instance_type();
342  STATIC_ASSERT(kIsIndirectStringTag != 0);
343  STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
344  ASSERT(IsFlat());
345  switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
346    case kOneByteStringTag:
347      return false;
348    case kTwoByteStringTag:
349      return true;
350    default:  // Cons or sliced string.  Need to go deeper.
351      return GetUnderlying()->IsTwoByteRepresentation();
352  }
353}
354
355
356bool String::HasOnlyOneByteChars() {
357  uint32_t type = map()->instance_type();
358  return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
359         IsOneByteRepresentation();
360}
361
362
363bool StringShape::IsCons() {
364  return (type_ & kStringRepresentationMask) == kConsStringTag;
365}
366
367
368bool StringShape::IsSliced() {
369  return (type_ & kStringRepresentationMask) == kSlicedStringTag;
370}
371
372
373bool StringShape::IsIndirect() {
374  return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
375}
376
377
378bool StringShape::IsExternal() {
379  return (type_ & kStringRepresentationMask) == kExternalStringTag;
380}
381
382
383bool StringShape::IsSequential() {
384  return (type_ & kStringRepresentationMask) == kSeqStringTag;
385}
386
387
388StringRepresentationTag StringShape::representation_tag() {
389  uint32_t tag = (type_ & kStringRepresentationMask);
390  return static_cast<StringRepresentationTag>(tag);
391}
392
393
394uint32_t StringShape::encoding_tag() {
395  return type_ & kStringEncodingMask;
396}
397
398
399uint32_t StringShape::full_representation_tag() {
400  return (type_ & (kStringRepresentationMask | kStringEncodingMask));
401}
402
403
404STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) ==
405             Internals::kFullStringRepresentationMask);
406
407STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) ==
408             Internals::kStringEncodingMask);
409
410
411bool StringShape::IsSequentialAscii() {
412  return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
413}
414
415
416bool StringShape::IsSequentialTwoByte() {
417  return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
418}
419
420
421bool StringShape::IsExternalAscii() {
422  return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
423}
424
425
426STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) ==
427             Internals::kExternalAsciiRepresentationTag);
428
429STATIC_ASSERT(v8::String::ASCII_ENCODING == kOneByteStringTag);
430
431
432bool StringShape::IsExternalTwoByte() {
433  return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
434}
435
436
437STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) ==
438             Internals::kExternalTwoByteRepresentationTag);
439
440STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
441
442uc32 FlatStringReader::Get(int index) {
443  ASSERT(0 <= index && index <= length_);
444  if (is_ascii_) {
445    return static_cast<const byte*>(start_)[index];
446  } else {
447    return static_cast<const uc16*>(start_)[index];
448  }
449}
450
451
452Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
453  return key->AsHandle(isolate);
454}
455
456
457Handle<Object> MapCacheShape::AsHandle(Isolate* isolate, HashTableKey* key) {
458  return key->AsHandle(isolate);
459}
460
461
462Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
463                                               HashTableKey* key) {
464  return key->AsHandle(isolate);
465}
466
467
468Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
469                                                 HashTableKey* key) {
470  return key->AsHandle(isolate);
471}
472
473template <typename Char>
474class SequentialStringKey : public HashTableKey {
475 public:
476  explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
477      : string_(string), hash_field_(0), seed_(seed) { }
478
479  virtual uint32_t Hash() V8_OVERRIDE {
480    hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
481                                                           string_.length(),
482                                                           seed_);
483
484    uint32_t result = hash_field_ >> String::kHashShift;
485    ASSERT(result != 0);  // Ensure that the hash value of 0 is never computed.
486    return result;
487  }
488
489
490  virtual uint32_t HashForObject(Object* other) V8_OVERRIDE {
491    return String::cast(other)->Hash();
492  }
493
494  Vector<const Char> string_;
495  uint32_t hash_field_;
496  uint32_t seed_;
497};
498
499
500class OneByteStringKey : public SequentialStringKey<uint8_t> {
501 public:
502  OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
503      : SequentialStringKey<uint8_t>(str, seed) { }
504
505  virtual bool IsMatch(Object* string) V8_OVERRIDE {
506    return String::cast(string)->IsOneByteEqualTo(string_);
507  }
508
509  virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE;
510};
511
512
513template<class Char>
514class SubStringKey : public HashTableKey {
515 public:
516  SubStringKey(Handle<String> string, int from, int length)
517      : string_(string), from_(from), length_(length) {
518    if (string_->IsSlicedString()) {
519      string_ = Handle<String>(Unslice(*string_, &from_));
520    }
521    ASSERT(string_->IsSeqString() || string->IsExternalString());
522  }
523
524  virtual uint32_t Hash() V8_OVERRIDE {
525    ASSERT(length_ >= 0);
526    ASSERT(from_ + length_ <= string_->length());
527    const Char* chars = GetChars() + from_;
528    hash_field_ = StringHasher::HashSequentialString(
529        chars, length_, string_->GetHeap()->HashSeed());
530    uint32_t result = hash_field_ >> String::kHashShift;
531    ASSERT(result != 0);  // Ensure that the hash value of 0 is never computed.
532    return result;
533  }
534
535  virtual uint32_t HashForObject(Object* other) V8_OVERRIDE {
536    return String::cast(other)->Hash();
537  }
538
539  virtual bool IsMatch(Object* string) V8_OVERRIDE;
540  virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE;
541
542 private:
543  const Char* GetChars();
544  String* Unslice(String* string, int* offset) {
545    while (string->IsSlicedString()) {
546      SlicedString* sliced = SlicedString::cast(string);
547      *offset += sliced->offset();
548      string = sliced->parent();
549    }
550    return string;
551  }
552
553  Handle<String> string_;
554  int from_;
555  int length_;
556  uint32_t hash_field_;
557};
558
559
560class TwoByteStringKey : public SequentialStringKey<uc16> {
561 public:
562  explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
563      : SequentialStringKey<uc16>(str, seed) { }
564
565  virtual bool IsMatch(Object* string) V8_OVERRIDE {
566    return String::cast(string)->IsTwoByteEqualTo(string_);
567  }
568
569  virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE;
570};
571
572
573// Utf8StringKey carries a vector of chars as key.
574class Utf8StringKey : public HashTableKey {
575 public:
576  explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
577      : string_(string), hash_field_(0), seed_(seed) { }
578
579  virtual bool IsMatch(Object* string) V8_OVERRIDE {
580    return String::cast(string)->IsUtf8EqualTo(string_);
581  }
582
583  virtual uint32_t Hash() V8_OVERRIDE {
584    if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
585    hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
586    uint32_t result = hash_field_ >> String::kHashShift;
587    ASSERT(result != 0);  // Ensure that the hash value of 0 is never computed.
588    return result;
589  }
590
591  virtual uint32_t HashForObject(Object* other) V8_OVERRIDE {
592    return String::cast(other)->Hash();
593  }
594
595  virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE {
596    if (hash_field_ == 0) Hash();
597    return isolate->factory()->NewInternalizedStringFromUtf8(
598        string_, chars_, hash_field_);
599  }
600
601  Vector<const char> string_;
602  uint32_t hash_field_;
603  int chars_;  // Caches the number of characters when computing the hash code.
604  uint32_t seed_;
605};
606
607
608bool Object::IsNumber() {
609  return IsSmi() || IsHeapNumber();
610}
611
612
613TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
614TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
615
616
617bool Object::IsFiller() {
618  if (!Object::IsHeapObject()) return false;
619  InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
620  return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
621}
622
623
624bool Object::IsExternalArray() {
625  if (!Object::IsHeapObject())
626    return false;
627  InstanceType instance_type =
628      HeapObject::cast(this)->map()->instance_type();
629  return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
630          instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
631}
632
633
634#define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size)               \
635  TYPE_CHECKER(External##Type##Array, EXTERNAL_##TYPE##_ARRAY_TYPE)           \
636  TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
637
638TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
639#undef TYPED_ARRAY_TYPE_CHECKER
640
641
642bool Object::IsFixedTypedArrayBase() {
643  if (!Object::IsHeapObject()) return false;
644
645  InstanceType instance_type =
646      HeapObject::cast(this)->map()->instance_type();
647  return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
648          instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
649}
650
651
652bool Object::IsJSReceiver() {
653  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
654  return IsHeapObject() &&
655      HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
656}
657
658
659bool Object::IsJSObject() {
660  STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
661  return IsHeapObject() &&
662      HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
663}
664
665
666bool Object::IsJSProxy() {
667  if (!Object::IsHeapObject()) return false;
668  return  HeapObject::cast(this)->map()->IsJSProxyMap();
669}
670
671
672TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
673TYPE_CHECKER(JSSet, JS_SET_TYPE)
674TYPE_CHECKER(JSMap, JS_MAP_TYPE)
675TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
676TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
677TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
678TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
679TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
680TYPE_CHECKER(Map, MAP_TYPE)
681TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
682TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
683TYPE_CHECKER(ConstantPoolArray, CONSTANT_POOL_ARRAY_TYPE)
684
685
686bool Object::IsJSWeakCollection() {
687  return IsJSWeakMap() || IsJSWeakSet();
688}
689
690
691bool Object::IsDescriptorArray() {
692  return IsFixedArray();
693}
694
695
696bool Object::IsTransitionArray() {
697  return IsFixedArray();
698}
699
700
701bool Object::IsDeoptimizationInputData() {
702  // Must be a fixed array.
703  if (!IsFixedArray()) return false;
704
705  // There's no sure way to detect the difference between a fixed array and
706  // a deoptimization data array.  Since this is used for asserts we can
707  // check that the length is zero or else the fixed size plus a multiple of
708  // the entry size.
709  int length = FixedArray::cast(this)->length();
710  if (length == 0) return true;
711
712  length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
713  return length >= 0 &&
714      length % DeoptimizationInputData::kDeoptEntrySize == 0;
715}
716
717
718bool Object::IsDeoptimizationOutputData() {
719  if (!IsFixedArray()) return false;
720  // There's actually no way to see the difference between a fixed array and
721  // a deoptimization data array.  Since this is used for asserts we can check
722  // that the length is plausible though.
723  if (FixedArray::cast(this)->length() % 2 != 0) return false;
724  return true;
725}
726
727
728bool Object::IsDependentCode() {
729  if (!IsFixedArray()) return false;
730  // There's actually no way to see the difference between a fixed array and
731  // a dependent codes array.
732  return true;
733}
734
735
736bool Object::IsContext() {
737  if (!Object::IsHeapObject()) return false;
738  Map* map = HeapObject::cast(this)->map();
739  Heap* heap = map->GetHeap();
740  return (map == heap->function_context_map() ||
741      map == heap->catch_context_map() ||
742      map == heap->with_context_map() ||
743      map == heap->native_context_map() ||
744      map == heap->block_context_map() ||
745      map == heap->module_context_map() ||
746      map == heap->global_context_map());
747}
748
749
750bool Object::IsNativeContext() {
751  return Object::IsHeapObject() &&
752      HeapObject::cast(this)->map() ==
753      HeapObject::cast(this)->GetHeap()->native_context_map();
754}
755
756
757bool Object::IsScopeInfo() {
758  return Object::IsHeapObject() &&
759      HeapObject::cast(this)->map() ==
760      HeapObject::cast(this)->GetHeap()->scope_info_map();
761}
762
763
764TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
765
766
767template <> inline bool Is<JSFunction>(Object* obj) {
768  return obj->IsJSFunction();
769}
770
771
772TYPE_CHECKER(Code, CODE_TYPE)
773TYPE_CHECKER(Oddball, ODDBALL_TYPE)
774TYPE_CHECKER(Cell, CELL_TYPE)
775TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
776TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
777TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
778TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
779TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
780TYPE_CHECKER(JSDate, JS_DATE_TYPE)
781TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
782
783
784bool Object::IsStringWrapper() {
785  return IsJSValue() && JSValue::cast(this)->value()->IsString();
786}
787
788
789TYPE_CHECKER(Foreign, FOREIGN_TYPE)
790
791
792bool Object::IsBoolean() {
793  return IsOddball() &&
794      ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
795}
796
797
798TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
799TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
800TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
801TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
802
803
804bool Object::IsJSArrayBufferView() {
805  return IsJSDataView() || IsJSTypedArray();
806}
807
808
809TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
810
811
812template <> inline bool Is<JSArray>(Object* obj) {
813  return obj->IsJSArray();
814}
815
816
817bool Object::IsHashTable() {
818  return Object::IsHeapObject() &&
819      HeapObject::cast(this)->map() ==
820      HeapObject::cast(this)->GetHeap()->hash_table_map();
821}
822
823
824bool Object::IsDictionary() {
825  return IsHashTable() &&
826      this != HeapObject::cast(this)->GetHeap()->string_table();
827}
828
829
830bool Object::IsStringTable() {
831  return IsHashTable();
832}
833
834
835bool Object::IsJSFunctionResultCache() {
836  if (!IsFixedArray()) return false;
837  FixedArray* self = FixedArray::cast(this);
838  int length = self->length();
839  if (length < JSFunctionResultCache::kEntriesIndex) return false;
840  if ((length - JSFunctionResultCache::kEntriesIndex)
841      % JSFunctionResultCache::kEntrySize != 0) {
842    return false;
843  }
844#ifdef VERIFY_HEAP
845  if (FLAG_verify_heap) {
846    reinterpret_cast<JSFunctionResultCache*>(this)->
847        JSFunctionResultCacheVerify();
848  }
849#endif
850  return true;
851}
852
853
854bool Object::IsNormalizedMapCache() {
855  return NormalizedMapCache::IsNormalizedMapCache(this);
856}
857
858
859int NormalizedMapCache::GetIndex(Handle<Map> map) {
860  return map->Hash() % NormalizedMapCache::kEntries;
861}
862
863
864bool NormalizedMapCache::IsNormalizedMapCache(Object* obj) {
865  if (!obj->IsFixedArray()) return false;
866  if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
867    return false;
868  }
869#ifdef VERIFY_HEAP
870  if (FLAG_verify_heap) {
871    reinterpret_cast<NormalizedMapCache*>(obj)->NormalizedMapCacheVerify();
872  }
873#endif
874  return true;
875}
876
877
878bool Object::IsCompilationCacheTable() {
879  return IsHashTable();
880}
881
882
883bool Object::IsCodeCacheHashTable() {
884  return IsHashTable();
885}
886
887
888bool Object::IsPolymorphicCodeCacheHashTable() {
889  return IsHashTable();
890}
891
892
893bool Object::IsMapCache() {
894  return IsHashTable();
895}
896
897
898bool Object::IsObjectHashTable() {
899  return IsHashTable();
900}
901
902
903bool Object::IsOrderedHashTable() {
904  return IsHeapObject() &&
905      HeapObject::cast(this)->map() ==
906      HeapObject::cast(this)->GetHeap()->ordered_hash_table_map();
907}
908
909
910bool Object::IsPrimitive() {
911  return IsOddball() || IsNumber() || IsString();
912}
913
914
915bool Object::IsJSGlobalProxy() {
916  bool result = IsHeapObject() &&
917                (HeapObject::cast(this)->map()->instance_type() ==
918                 JS_GLOBAL_PROXY_TYPE);
919  ASSERT(!result ||
920         HeapObject::cast(this)->map()->is_access_check_needed());
921  return result;
922}
923
924
925bool Object::IsGlobalObject() {
926  if (!IsHeapObject()) return false;
927
928  InstanceType type = HeapObject::cast(this)->map()->instance_type();
929  return type == JS_GLOBAL_OBJECT_TYPE ||
930         type == JS_BUILTINS_OBJECT_TYPE;
931}
932
933
934TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
935TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
936
937
938bool Object::IsUndetectableObject() {
939  return IsHeapObject()
940    && HeapObject::cast(this)->map()->is_undetectable();
941}
942
943
944bool Object::IsAccessCheckNeeded() {
945  if (!IsHeapObject()) return false;
946  if (IsJSGlobalProxy()) {
947    JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
948    GlobalObject* global =
949        proxy->GetIsolate()->context()->global_object();
950    return proxy->IsDetachedFrom(global);
951  }
952  return HeapObject::cast(this)->map()->is_access_check_needed();
953}
954
955
956bool Object::IsStruct() {
957  if (!IsHeapObject()) return false;
958  switch (HeapObject::cast(this)->map()->instance_type()) {
959#define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
960  STRUCT_LIST(MAKE_STRUCT_CASE)
961#undef MAKE_STRUCT_CASE
962    default: return false;
963  }
964}
965
966
967#define MAKE_STRUCT_PREDICATE(NAME, Name, name)                  \
968  bool Object::Is##Name() {                                      \
969    return Object::IsHeapObject()                                \
970      && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
971  }
972  STRUCT_LIST(MAKE_STRUCT_PREDICATE)
973#undef MAKE_STRUCT_PREDICATE
974
975
976bool Object::IsUndefined() {
977  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
978}
979
980
981bool Object::IsNull() {
982  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
983}
984
985
986bool Object::IsTheHole() {
987  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
988}
989
990
991bool Object::IsException() {
992  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kException;
993}
994
995
996bool Object::IsUninitialized() {
997  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
998}
999
1000
1001bool Object::IsTrue() {
1002  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
1003}
1004
1005
1006bool Object::IsFalse() {
1007  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
1008}
1009
1010
1011bool Object::IsArgumentsMarker() {
1012  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
1013}
1014
1015
1016double Object::Number() {
1017  ASSERT(IsNumber());
1018  return IsSmi()
1019    ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
1020    : reinterpret_cast<HeapNumber*>(this)->value();
1021}
1022
1023
1024bool Object::IsNaN() {
1025  return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
1026}
1027
1028
1029MaybeHandle<Smi> Object::ToSmi(Isolate* isolate, Handle<Object> object) {
1030  if (object->IsSmi()) return Handle<Smi>::cast(object);
1031  if (object->IsHeapNumber()) {
1032    double value = Handle<HeapNumber>::cast(object)->value();
1033    int int_value = FastD2I(value);
1034    if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
1035      return handle(Smi::FromInt(int_value), isolate);
1036    }
1037  }
1038  return Handle<Smi>();
1039}
1040
1041
1042MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1043                                         Handle<Object> object) {
1044  return ToObject(
1045      isolate, object, handle(isolate->context()->native_context(), isolate));
1046}
1047
1048
1049bool Object::HasSpecificClassOf(String* name) {
1050  return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1051}
1052
1053
1054MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
1055                                        Handle<Name> name) {
1056  LookupIterator it(object, name);
1057  return GetProperty(&it);
1058}
1059
1060
1061MaybeHandle<Object> Object::GetElement(Isolate* isolate,
1062                                       Handle<Object> object,
1063                                       uint32_t index) {
1064  // GetElement can trigger a getter which can cause allocation.
1065  // This was not always the case. This ASSERT is here to catch
1066  // leftover incorrect uses.
1067  ASSERT(AllowHeapAllocation::IsAllowed());
1068  return Object::GetElementWithReceiver(isolate, object, object, index);
1069}
1070
1071
1072MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
1073                                                 Handle<Name> name) {
1074  uint32_t index;
1075  Isolate* isolate = name->GetIsolate();
1076  if (name->AsArrayIndex(&index)) return GetElement(isolate, object, index);
1077  return GetProperty(object, name);
1078}
1079
1080
1081MaybeHandle<Object> Object::GetProperty(Isolate* isolate,
1082                                        Handle<Object> object,
1083                                        const char* name) {
1084  Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
1085  ASSERT(!str.is_null());
1086#ifdef DEBUG
1087  uint32_t index;  // Assert that the name is not an array index.
1088  ASSERT(!str->AsArrayIndex(&index));
1089#endif  // DEBUG
1090  return GetProperty(object, str);
1091}
1092
1093
1094MaybeHandle<Object> JSProxy::GetElementWithHandler(Handle<JSProxy> proxy,
1095                                                   Handle<Object> receiver,
1096                                                   uint32_t index) {
1097  return GetPropertyWithHandler(
1098      proxy, receiver, proxy->GetIsolate()->factory()->Uint32ToString(index));
1099}
1100
1101
1102MaybeHandle<Object> JSProxy::SetElementWithHandler(Handle<JSProxy> proxy,
1103                                                   Handle<JSReceiver> receiver,
1104                                                   uint32_t index,
1105                                                   Handle<Object> value,
1106                                                   StrictMode strict_mode) {
1107  Isolate* isolate = proxy->GetIsolate();
1108  Handle<String> name = isolate->factory()->Uint32ToString(index);
1109  return SetPropertyWithHandler(
1110      proxy, receiver, name, value, NONE, strict_mode);
1111}
1112
1113
1114bool JSProxy::HasElementWithHandler(Handle<JSProxy> proxy, uint32_t index) {
1115  Isolate* isolate = proxy->GetIsolate();
1116  Handle<String> name = isolate->factory()->Uint32ToString(index);
1117  return HasPropertyWithHandler(proxy, name);
1118}
1119
1120
1121#define FIELD_ADDR(p, offset) \
1122  (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1123
1124#define READ_FIELD(p, offset) \
1125  (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
1126
1127#define ACQUIRE_READ_FIELD(p, offset)           \
1128  reinterpret_cast<Object*>(base::Acquire_Load( \
1129      reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset))))
1130
1131#define NOBARRIER_READ_FIELD(p, offset)           \
1132  reinterpret_cast<Object*>(base::NoBarrier_Load( \
1133      reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset))))
1134
1135#define WRITE_FIELD(p, offset, value) \
1136  (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1137
1138#define RELEASE_WRITE_FIELD(p, offset, value)                     \
1139  base::Release_Store(                                            \
1140      reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1141      reinterpret_cast<base::AtomicWord>(value));
1142
1143#define NOBARRIER_WRITE_FIELD(p, offset, value)                   \
1144  base::NoBarrier_Store(                                          \
1145      reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1146      reinterpret_cast<base::AtomicWord>(value));
1147
1148#define WRITE_BARRIER(heap, object, offset, value)                      \
1149  heap->incremental_marking()->RecordWrite(                             \
1150      object, HeapObject::RawField(object, offset), value);             \
1151  if (heap->InNewSpace(value)) {                                        \
1152    heap->RecordWrite(object->address(), offset);                       \
1153  }
1154
1155#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode)    \
1156  if (mode == UPDATE_WRITE_BARRIER) {                                   \
1157    heap->incremental_marking()->RecordWrite(                           \
1158      object, HeapObject::RawField(object, offset), value);             \
1159    if (heap->InNewSpace(value)) {                                      \
1160      heap->RecordWrite(object->address(), offset);                     \
1161    }                                                                   \
1162  }
1163
1164#ifndef V8_TARGET_ARCH_MIPS
1165  #define READ_DOUBLE_FIELD(p, offset) \
1166    (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
1167#else  // V8_TARGET_ARCH_MIPS
1168  // Prevent gcc from using load-double (mips ldc1) on (possibly)
1169  // non-64-bit aligned HeapNumber::value.
1170  static inline double read_double_field(void* p, int offset) {
1171    union conversion {
1172      double d;
1173      uint32_t u[2];
1174    } c;
1175    c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
1176    c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
1177    return c.d;
1178  }
1179  #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
1180#endif  // V8_TARGET_ARCH_MIPS
1181
1182#ifndef V8_TARGET_ARCH_MIPS
1183  #define WRITE_DOUBLE_FIELD(p, offset, value) \
1184    (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
1185#else  // V8_TARGET_ARCH_MIPS
1186  // Prevent gcc from using store-double (mips sdc1) on (possibly)
1187  // non-64-bit aligned HeapNumber::value.
1188  static inline void write_double_field(void* p, int offset,
1189                                        double value) {
1190    union conversion {
1191      double d;
1192      uint32_t u[2];
1193    } c;
1194    c.d = value;
1195    (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
1196    (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
1197  }
1198  #define WRITE_DOUBLE_FIELD(p, offset, value) \
1199    write_double_field(p, offset, value)
1200#endif  // V8_TARGET_ARCH_MIPS
1201
1202
1203#define READ_INT_FIELD(p, offset) \
1204  (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
1205
1206#define WRITE_INT_FIELD(p, offset, value) \
1207  (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1208
1209#define READ_INTPTR_FIELD(p, offset) \
1210  (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
1211
1212#define WRITE_INTPTR_FIELD(p, offset, value) \
1213  (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1214
1215#define READ_UINT32_FIELD(p, offset) \
1216  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
1217
1218#define WRITE_UINT32_FIELD(p, offset, value) \
1219  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1220
1221#define READ_INT32_FIELD(p, offset) \
1222  (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)))
1223
1224#define WRITE_INT32_FIELD(p, offset, value) \
1225  (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1226
1227#define READ_INT64_FIELD(p, offset) \
1228  (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
1229
1230#define WRITE_INT64_FIELD(p, offset, value) \
1231  (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1232
1233#define READ_SHORT_FIELD(p, offset) \
1234  (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
1235
1236#define WRITE_SHORT_FIELD(p, offset, value) \
1237  (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1238
1239#define READ_BYTE_FIELD(p, offset) \
1240  (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
1241
1242#define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1243  static_cast<byte>(base::NoBarrier_Load(    \
1244      reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
1245
1246#define WRITE_BYTE_FIELD(p, offset, value) \
1247  (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1248
1249#define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value)           \
1250  base::NoBarrier_Store(                                       \
1251      reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1252      static_cast<base::Atomic8>(value));
1253
1254Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1255  return &READ_FIELD(obj, byte_offset);
1256}
1257
1258
1259int Smi::value() {
1260  return Internals::SmiValue(this);
1261}
1262
1263
1264Smi* Smi::FromInt(int value) {
1265  ASSERT(Smi::IsValid(value));
1266  return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
1267}
1268
1269
1270Smi* Smi::FromIntptr(intptr_t value) {
1271  ASSERT(Smi::IsValid(value));
1272  int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
1273  return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1274}
1275
1276
1277bool Smi::IsValid(intptr_t value) {
1278  bool result = Internals::IsValidSmi(value);
1279  ASSERT_EQ(result, value >= kMinValue && value <= kMaxValue);
1280  return result;
1281}
1282
1283
1284MapWord MapWord::FromMap(Map* map) {
1285  return MapWord(reinterpret_cast<uintptr_t>(map));
1286}
1287
1288
1289Map* MapWord::ToMap() {
1290  return reinterpret_cast<Map*>(value_);
1291}
1292
1293
1294bool MapWord::IsForwardingAddress() {
1295  return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1296}
1297
1298
1299MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1300  Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1301  return MapWord(reinterpret_cast<uintptr_t>(raw));
1302}
1303
1304
1305HeapObject* MapWord::ToForwardingAddress() {
1306  ASSERT(IsForwardingAddress());
1307  return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1308}
1309
1310
1311#ifdef VERIFY_HEAP
1312void HeapObject::VerifyObjectField(int offset) {
1313  VerifyPointer(READ_FIELD(this, offset));
1314}
1315
1316void HeapObject::VerifySmiField(int offset) {
1317  CHECK(READ_FIELD(this, offset)->IsSmi());
1318}
1319#endif
1320
1321
1322Heap* HeapObject::GetHeap() {
1323  Heap* heap =
1324      MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
1325  SLOW_ASSERT(heap != NULL);
1326  return heap;
1327}
1328
1329
1330Isolate* HeapObject::GetIsolate() {
1331  return GetHeap()->isolate();
1332}
1333
1334
1335Map* HeapObject::map() {
1336#ifdef DEBUG
1337  // Clear mark potentially added by PathTracer.
1338  uintptr_t raw_value =
1339      map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
1340  return MapWord::FromRawValue(raw_value).ToMap();
1341#else
1342  return map_word().ToMap();
1343#endif
1344}
1345
1346
1347void HeapObject::set_map(Map* value) {
1348  set_map_word(MapWord::FromMap(value));
1349  if (value != NULL) {
1350    // TODO(1600) We are passing NULL as a slot because maps can never be on
1351    // evacuation candidate.
1352    value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1353  }
1354}
1355
1356
1357Map* HeapObject::synchronized_map() {
1358  return synchronized_map_word().ToMap();
1359}
1360
1361
1362void HeapObject::synchronized_set_map(Map* value) {
1363  synchronized_set_map_word(MapWord::FromMap(value));
1364  if (value != NULL) {
1365    // TODO(1600) We are passing NULL as a slot because maps can never be on
1366    // evacuation candidate.
1367    value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1368  }
1369}
1370
1371
1372void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
1373  synchronized_set_map_word(MapWord::FromMap(value));
1374}
1375
1376
1377// Unsafe accessor omitting write barrier.
1378void HeapObject::set_map_no_write_barrier(Map* value) {
1379  set_map_word(MapWord::FromMap(value));
1380}
1381
1382
1383MapWord HeapObject::map_word() {
1384  return MapWord(
1385      reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
1386}
1387
1388
1389void HeapObject::set_map_word(MapWord map_word) {
1390  NOBARRIER_WRITE_FIELD(
1391      this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1392}
1393
1394
1395MapWord HeapObject::synchronized_map_word() {
1396  return MapWord(
1397      reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1398}
1399
1400
1401void HeapObject::synchronized_set_map_word(MapWord map_word) {
1402  RELEASE_WRITE_FIELD(
1403      this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1404}
1405
1406
1407HeapObject* HeapObject::FromAddress(Address address) {
1408  ASSERT_TAG_ALIGNED(address);
1409  return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1410}
1411
1412
1413Address HeapObject::address() {
1414  return reinterpret_cast<Address>(this) - kHeapObjectTag;
1415}
1416
1417
1418int HeapObject::Size() {
1419  return SizeFromMap(map());
1420}
1421
1422
1423void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1424  v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1425                   reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1426}
1427
1428
1429void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1430  v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1431}
1432
1433
1434void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) {
1435  v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1436}
1437
1438
1439double HeapNumber::value() {
1440  return READ_DOUBLE_FIELD(this, kValueOffset);
1441}
1442
1443
1444void HeapNumber::set_value(double value) {
1445  WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1446}
1447
1448
1449int HeapNumber::get_exponent() {
1450  return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1451          kExponentShift) - kExponentBias;
1452}
1453
1454
1455int HeapNumber::get_sign() {
1456  return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1457}
1458
1459
1460ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1461
1462
1463Object** FixedArray::GetFirstElementAddress() {
1464  return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1465}
1466
1467
1468bool FixedArray::ContainsOnlySmisOrHoles() {
1469  Object* the_hole = GetHeap()->the_hole_value();
1470  Object** current = GetFirstElementAddress();
1471  for (int i = 0; i < length(); ++i) {
1472    Object* candidate = *current++;
1473    if (!candidate->IsSmi() && candidate != the_hole) return false;
1474  }
1475  return true;
1476}
1477
1478
1479FixedArrayBase* JSObject::elements() {
1480  Object* array = READ_FIELD(this, kElementsOffset);
1481  return static_cast<FixedArrayBase*>(array);
1482}
1483
1484
1485void JSObject::ValidateElements(Handle<JSObject> object) {
1486#ifdef ENABLE_SLOW_ASSERTS
1487  if (FLAG_enable_slow_asserts) {
1488    ElementsAccessor* accessor = object->GetElementsAccessor();
1489    accessor->Validate(object);
1490  }
1491#endif
1492}
1493
1494
1495void AllocationSite::Initialize() {
1496  set_transition_info(Smi::FromInt(0));
1497  SetElementsKind(GetInitialFastElementsKind());
1498  set_nested_site(Smi::FromInt(0));
1499  set_pretenure_data(Smi::FromInt(0));
1500  set_pretenure_create_count(Smi::FromInt(0));
1501  set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1502                     SKIP_WRITE_BARRIER);
1503}
1504
1505
1506void AllocationSite::MarkZombie() {
1507  ASSERT(!IsZombie());
1508  Initialize();
1509  set_pretenure_decision(kZombie);
1510}
1511
1512
1513// Heuristic: We only need to create allocation site info if the boilerplate
1514// elements kind is the initial elements kind.
1515AllocationSiteMode AllocationSite::GetMode(
1516    ElementsKind boilerplate_elements_kind) {
1517  if (FLAG_pretenuring_call_new ||
1518      IsFastSmiElementsKind(boilerplate_elements_kind)) {
1519    return TRACK_ALLOCATION_SITE;
1520  }
1521
1522  return DONT_TRACK_ALLOCATION_SITE;
1523}
1524
1525
1526AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1527                                           ElementsKind to) {
1528  if (FLAG_pretenuring_call_new ||
1529      (IsFastSmiElementsKind(from) &&
1530       IsMoreGeneralElementsKindTransition(from, to))) {
1531    return TRACK_ALLOCATION_SITE;
1532  }
1533
1534  return DONT_TRACK_ALLOCATION_SITE;
1535}
1536
1537
1538inline bool AllocationSite::CanTrack(InstanceType type) {
1539  if (FLAG_allocation_site_pretenuring) {
1540    return type == JS_ARRAY_TYPE ||
1541        type == JS_OBJECT_TYPE ||
1542        type < FIRST_NONSTRING_TYPE;
1543  }
1544  return type == JS_ARRAY_TYPE;
1545}
1546
1547
1548inline DependentCode::DependencyGroup AllocationSite::ToDependencyGroup(
1549    Reason reason) {
1550  switch (reason) {
1551    case TENURING:
1552      return DependentCode::kAllocationSiteTenuringChangedGroup;
1553      break;
1554    case TRANSITIONS:
1555      return DependentCode::kAllocationSiteTransitionChangedGroup;
1556      break;
1557  }
1558  UNREACHABLE();
1559  return DependentCode::kAllocationSiteTransitionChangedGroup;
1560}
1561
1562
1563inline void AllocationSite::set_memento_found_count(int count) {
1564  int value = pretenure_data()->value();
1565  // Verify that we can count more mementos than we can possibly find in one
1566  // new space collection.
1567  ASSERT((GetHeap()->MaxSemiSpaceSize() /
1568          (StaticVisitorBase::kMinObjectSizeInWords * kPointerSize +
1569           AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1570  ASSERT(count < MementoFoundCountBits::kMax);
1571  set_pretenure_data(
1572      Smi::FromInt(MementoFoundCountBits::update(value, count)),
1573      SKIP_WRITE_BARRIER);
1574}
1575
1576inline bool AllocationSite::IncrementMementoFoundCount() {
1577  if (IsZombie()) return false;
1578
1579  int value = memento_found_count();
1580  set_memento_found_count(value + 1);
1581  return memento_found_count() == kPretenureMinimumCreated;
1582}
1583
1584
1585inline void AllocationSite::IncrementMementoCreateCount() {
1586  ASSERT(FLAG_allocation_site_pretenuring);
1587  int value = memento_create_count();
1588  set_memento_create_count(value + 1);
1589}
1590
1591
1592inline bool AllocationSite::MakePretenureDecision(
1593    PretenureDecision current_decision,
1594    double ratio,
1595    bool maximum_size_scavenge) {
1596  // Here we just allow state transitions from undecided or maybe tenure
1597  // to don't tenure, maybe tenure, or tenure.
1598  if ((current_decision == kUndecided || current_decision == kMaybeTenure)) {
1599    if (ratio >= kPretenureRatio) {
1600      // We just transition into tenure state when the semi-space was at
1601      // maximum capacity.
1602      if (maximum_size_scavenge) {
1603        set_deopt_dependent_code(true);
1604        set_pretenure_decision(kTenure);
1605        // Currently we just need to deopt when we make a state transition to
1606        // tenure.
1607        return true;
1608      }
1609      set_pretenure_decision(kMaybeTenure);
1610    } else {
1611      set_pretenure_decision(kDontTenure);
1612    }
1613  }
1614  return false;
1615}
1616
1617
1618inline bool AllocationSite::DigestPretenuringFeedback(
1619    bool maximum_size_scavenge) {
1620  bool deopt = false;
1621  int create_count = memento_create_count();
1622  int found_count = memento_found_count();
1623  bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1624  double ratio =
1625      minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1626          static_cast<double>(found_count) / create_count : 0.0;
1627  PretenureDecision current_decision = pretenure_decision();
1628
1629  if (minimum_mementos_created) {
1630    deopt = MakePretenureDecision(
1631        current_decision, ratio, maximum_size_scavenge);
1632  }
1633
1634  if (FLAG_trace_pretenuring_statistics) {
1635    PrintF(
1636        "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
1637         static_cast<void*>(this), create_count, found_count, ratio,
1638         PretenureDecisionName(current_decision),
1639         PretenureDecisionName(pretenure_decision()));
1640  }
1641
1642  // Clear feedback calculation fields until the next gc.
1643  set_memento_found_count(0);
1644  set_memento_create_count(0);
1645  return deopt;
1646}
1647
1648
1649void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1650  JSObject::ValidateElements(object);
1651  ElementsKind elements_kind = object->map()->elements_kind();
1652  if (!IsFastObjectElementsKind(elements_kind)) {
1653    if (IsFastHoleyElementsKind(elements_kind)) {
1654      TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1655    } else {
1656      TransitionElementsKind(object, FAST_ELEMENTS);
1657    }
1658  }
1659}
1660
1661
1662void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1663                                        Object** objects,
1664                                        uint32_t count,
1665                                        EnsureElementsMode mode) {
1666  ElementsKind current_kind = object->map()->elements_kind();
1667  ElementsKind target_kind = current_kind;
1668  {
1669    DisallowHeapAllocation no_allocation;
1670    ASSERT(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1671    bool is_holey = IsFastHoleyElementsKind(current_kind);
1672    if (current_kind == FAST_HOLEY_ELEMENTS) return;
1673    Heap* heap = object->GetHeap();
1674    Object* the_hole = heap->the_hole_value();
1675    for (uint32_t i = 0; i < count; ++i) {
1676      Object* current = *objects++;
1677      if (current == the_hole) {
1678        is_holey = true;
1679        target_kind = GetHoleyElementsKind(target_kind);
1680      } else if (!current->IsSmi()) {
1681        if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1682          if (IsFastSmiElementsKind(target_kind)) {
1683            if (is_holey) {
1684              target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1685            } else {
1686              target_kind = FAST_DOUBLE_ELEMENTS;
1687            }
1688          }
1689        } else if (is_holey) {
1690          target_kind = FAST_HOLEY_ELEMENTS;
1691          break;
1692        } else {
1693          target_kind = FAST_ELEMENTS;
1694        }
1695      }
1696    }
1697  }
1698  if (target_kind != current_kind) {
1699    TransitionElementsKind(object, target_kind);
1700  }
1701}
1702
1703
1704void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1705                                        Handle<FixedArrayBase> elements,
1706                                        uint32_t length,
1707                                        EnsureElementsMode mode) {
1708  Heap* heap = object->GetHeap();
1709  if (elements->map() != heap->fixed_double_array_map()) {
1710    ASSERT(elements->map() == heap->fixed_array_map() ||
1711           elements->map() == heap->fixed_cow_array_map());
1712    if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1713      mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1714    }
1715    Object** objects =
1716        Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1717    EnsureCanContainElements(object, objects, length, mode);
1718    return;
1719  }
1720
1721  ASSERT(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1722  if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1723    TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1724  } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1725    Handle<FixedDoubleArray> double_array =
1726        Handle<FixedDoubleArray>::cast(elements);
1727    for (uint32_t i = 0; i < length; ++i) {
1728      if (double_array->is_the_hole(i)) {
1729        TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1730        return;
1731      }
1732    }
1733    TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
1734  }
1735}
1736
1737
1738void JSObject::SetMapAndElements(Handle<JSObject> object,
1739                                 Handle<Map> new_map,
1740                                 Handle<FixedArrayBase> value) {
1741  JSObject::MigrateToMap(object, new_map);
1742  ASSERT((object->map()->has_fast_smi_or_object_elements() ||
1743          (*value == object->GetHeap()->empty_fixed_array())) ==
1744         (value->map() == object->GetHeap()->fixed_array_map() ||
1745          value->map() == object->GetHeap()->fixed_cow_array_map()));
1746  ASSERT((*value == object->GetHeap()->empty_fixed_array()) ||
1747         (object->map()->has_fast_double_elements() ==
1748          value->IsFixedDoubleArray()));
1749  object->set_elements(*value);
1750}
1751
1752
1753void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1754  WRITE_FIELD(this, kElementsOffset, value);
1755  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1756}
1757
1758
1759void JSObject::initialize_properties() {
1760  ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1761  WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1762}
1763
1764
1765void JSObject::initialize_elements() {
1766  FixedArrayBase* elements = map()->GetInitialElements();
1767  WRITE_FIELD(this, kElementsOffset, elements);
1768}
1769
1770
1771Handle<String> JSObject::ExpectedTransitionKey(Handle<Map> map) {
1772  DisallowHeapAllocation no_gc;
1773  if (!map->HasTransitionArray()) return Handle<String>::null();
1774  TransitionArray* transitions = map->transitions();
1775  if (!transitions->IsSimpleTransition()) return Handle<String>::null();
1776  int transition = TransitionArray::kSimpleTransitionIndex;
1777  PropertyDetails details = transitions->GetTargetDetails(transition);
1778  Name* name = transitions->GetKey(transition);
1779  if (details.type() != FIELD) return Handle<String>::null();
1780  if (details.attributes() != NONE) return Handle<String>::null();
1781  if (!name->IsString()) return Handle<String>::null();
1782  return Handle<String>(String::cast(name));
1783}
1784
1785
1786Handle<Map> JSObject::ExpectedTransitionTarget(Handle<Map> map) {
1787  ASSERT(!ExpectedTransitionKey(map).is_null());
1788  return Handle<Map>(map->transitions()->GetTarget(
1789      TransitionArray::kSimpleTransitionIndex));
1790}
1791
1792
1793Handle<Map> JSObject::FindTransitionToField(Handle<Map> map, Handle<Name> key) {
1794  DisallowHeapAllocation no_allocation;
1795  if (!map->HasTransitionArray()) return Handle<Map>::null();
1796  TransitionArray* transitions = map->transitions();
1797  int transition = transitions->Search(*key);
1798  if (transition == TransitionArray::kNotFound) return Handle<Map>::null();
1799  PropertyDetails target_details = transitions->GetTargetDetails(transition);
1800  if (target_details.type() != FIELD) return Handle<Map>::null();
1801  if (target_details.attributes() != NONE) return Handle<Map>::null();
1802  return Handle<Map>(transitions->GetTarget(transition));
1803}
1804
1805
1806ACCESSORS(Oddball, to_string, String, kToStringOffset)
1807ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1808
1809
1810byte Oddball::kind() {
1811  return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1812}
1813
1814
1815void Oddball::set_kind(byte value) {
1816  WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1817}
1818
1819
1820Object* Cell::value() {
1821  return READ_FIELD(this, kValueOffset);
1822}
1823
1824
1825void Cell::set_value(Object* val, WriteBarrierMode ignored) {
1826  // The write barrier is not used for global property cells.
1827  ASSERT(!val->IsPropertyCell() && !val->IsCell());
1828  WRITE_FIELD(this, kValueOffset, val);
1829}
1830
1831ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
1832
1833Object* PropertyCell::type_raw() {
1834  return READ_FIELD(this, kTypeOffset);
1835}
1836
1837
1838void PropertyCell::set_type_raw(Object* val, WriteBarrierMode ignored) {
1839  WRITE_FIELD(this, kTypeOffset, val);
1840}
1841
1842
1843int JSObject::GetHeaderSize() {
1844  InstanceType type = map()->instance_type();
1845  // Check for the most common kind of JavaScript object before
1846  // falling into the generic switch. This speeds up the internal
1847  // field operations considerably on average.
1848  if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1849  switch (type) {
1850    case JS_GENERATOR_OBJECT_TYPE:
1851      return JSGeneratorObject::kSize;
1852    case JS_MODULE_TYPE:
1853      return JSModule::kSize;
1854    case JS_GLOBAL_PROXY_TYPE:
1855      return JSGlobalProxy::kSize;
1856    case JS_GLOBAL_OBJECT_TYPE:
1857      return JSGlobalObject::kSize;
1858    case JS_BUILTINS_OBJECT_TYPE:
1859      return JSBuiltinsObject::kSize;
1860    case JS_FUNCTION_TYPE:
1861      return JSFunction::kSize;
1862    case JS_VALUE_TYPE:
1863      return JSValue::kSize;
1864    case JS_DATE_TYPE:
1865      return JSDate::kSize;
1866    case JS_ARRAY_TYPE:
1867      return JSArray::kSize;
1868    case JS_ARRAY_BUFFER_TYPE:
1869      return JSArrayBuffer::kSize;
1870    case JS_TYPED_ARRAY_TYPE:
1871      return JSTypedArray::kSize;
1872    case JS_DATA_VIEW_TYPE:
1873      return JSDataView::kSize;
1874    case JS_SET_TYPE:
1875      return JSSet::kSize;
1876    case JS_MAP_TYPE:
1877      return JSMap::kSize;
1878    case JS_SET_ITERATOR_TYPE:
1879      return JSSetIterator::kSize;
1880    case JS_MAP_ITERATOR_TYPE:
1881      return JSMapIterator::kSize;
1882    case JS_WEAK_MAP_TYPE:
1883      return JSWeakMap::kSize;
1884    case JS_WEAK_SET_TYPE:
1885      return JSWeakSet::kSize;
1886    case JS_REGEXP_TYPE:
1887      return JSRegExp::kSize;
1888    case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1889      return JSObject::kHeaderSize;
1890    case JS_MESSAGE_OBJECT_TYPE:
1891      return JSMessageObject::kSize;
1892    default:
1893      // TODO(jkummerow): Re-enable this. Blink currently hits this
1894      // from its CustomElementConstructorBuilder.
1895      // UNREACHABLE();
1896      return 0;
1897  }
1898}
1899
1900
1901int JSObject::GetInternalFieldCount() {
1902  ASSERT(1 << kPointerSizeLog2 == kPointerSize);
1903  // Make sure to adjust for the number of in-object properties. These
1904  // properties do contribute to the size, but are not internal fields.
1905  return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1906         map()->inobject_properties();
1907}
1908
1909
1910int JSObject::GetInternalFieldOffset(int index) {
1911  ASSERT(index < GetInternalFieldCount() && index >= 0);
1912  return GetHeaderSize() + (kPointerSize * index);
1913}
1914
1915
1916Object* JSObject::GetInternalField(int index) {
1917  ASSERT(index < GetInternalFieldCount() && index >= 0);
1918  // Internal objects do follow immediately after the header, whereas in-object
1919  // properties are at the end of the object. Therefore there is no need
1920  // to adjust the index here.
1921  return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1922}
1923
1924
1925void JSObject::SetInternalField(int index, Object* value) {
1926  ASSERT(index < GetInternalFieldCount() && index >= 0);
1927  // Internal objects do follow immediately after the header, whereas in-object
1928  // properties are at the end of the object. Therefore there is no need
1929  // to adjust the index here.
1930  int offset = GetHeaderSize() + (kPointerSize * index);
1931  WRITE_FIELD(this, offset, value);
1932  WRITE_BARRIER(GetHeap(), this, offset, value);
1933}
1934
1935
1936void JSObject::SetInternalField(int index, Smi* value) {
1937  ASSERT(index < GetInternalFieldCount() && index >= 0);
1938  // Internal objects do follow immediately after the header, whereas in-object
1939  // properties are at the end of the object. Therefore there is no need
1940  // to adjust the index here.
1941  int offset = GetHeaderSize() + (kPointerSize * index);
1942  WRITE_FIELD(this, offset, value);
1943}
1944
1945
1946// Access fast-case object properties at index. The use of these routines
1947// is needed to correctly distinguish between properties stored in-object and
1948// properties stored in the properties array.
1949Object* JSObject::RawFastPropertyAt(FieldIndex index) {
1950  if (index.is_inobject()) {
1951    return READ_FIELD(this, index.offset());
1952  } else {
1953    return properties()->get(index.outobject_array_index());
1954  }
1955}
1956
1957
1958void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
1959  if (index.is_inobject()) {
1960    int offset = index.offset();
1961    WRITE_FIELD(this, offset, value);
1962    WRITE_BARRIER(GetHeap(), this, offset, value);
1963  } else {
1964    properties()->set(index.outobject_array_index(), value);
1965  }
1966}
1967
1968
1969int JSObject::GetInObjectPropertyOffset(int index) {
1970  return map()->GetInObjectPropertyOffset(index);
1971}
1972
1973
1974Object* JSObject::InObjectPropertyAt(int index) {
1975  int offset = GetInObjectPropertyOffset(index);
1976  return READ_FIELD(this, offset);
1977}
1978
1979
1980Object* JSObject::InObjectPropertyAtPut(int index,
1981                                        Object* value,
1982                                        WriteBarrierMode mode) {
1983  // Adjust for the number of properties stored in the object.
1984  int offset = GetInObjectPropertyOffset(index);
1985  WRITE_FIELD(this, offset, value);
1986  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1987  return value;
1988}
1989
1990
1991
1992void JSObject::InitializeBody(Map* map,
1993                              Object* pre_allocated_value,
1994                              Object* filler_value) {
1995  ASSERT(!filler_value->IsHeapObject() ||
1996         !GetHeap()->InNewSpace(filler_value));
1997  ASSERT(!pre_allocated_value->IsHeapObject() ||
1998         !GetHeap()->InNewSpace(pre_allocated_value));
1999  int size = map->instance_size();
2000  int offset = kHeaderSize;
2001  if (filler_value != pre_allocated_value) {
2002    int pre_allocated = map->pre_allocated_property_fields();
2003    ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
2004    for (int i = 0; i < pre_allocated; i++) {
2005      WRITE_FIELD(this, offset, pre_allocated_value);
2006      offset += kPointerSize;
2007    }
2008  }
2009  while (offset < size) {
2010    WRITE_FIELD(this, offset, filler_value);
2011    offset += kPointerSize;
2012  }
2013}
2014
2015
2016bool JSObject::HasFastProperties() {
2017  ASSERT(properties()->IsDictionary() == map()->is_dictionary_map());
2018  return !properties()->IsDictionary();
2019}
2020
2021
2022bool JSObject::TooManyFastProperties(StoreFromKeyed store_mode) {
2023  // Allow extra fast properties if the object has more than
2024  // kFastPropertiesSoftLimit in-object properties. When this is the case, it is
2025  // very unlikely that the object is being used as a dictionary and there is a
2026  // good chance that allowing more map transitions will be worth it.
2027  Map* map = this->map();
2028  if (map->unused_property_fields() != 0) return false;
2029
2030  int inobject = map->inobject_properties();
2031
2032  int limit;
2033  if (store_mode == CERTAINLY_NOT_STORE_FROM_KEYED) {
2034    limit = Max(inobject, kMaxFastProperties);
2035  } else {
2036    limit = Max(inobject, kFastPropertiesSoftLimit);
2037  }
2038  return properties()->length() > limit;
2039}
2040
2041
2042void Struct::InitializeBody(int object_size) {
2043  Object* value = GetHeap()->undefined_value();
2044  for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2045    WRITE_FIELD(this, offset, value);
2046  }
2047}
2048
2049
2050bool Object::ToArrayIndex(uint32_t* index) {
2051  if (IsSmi()) {
2052    int value = Smi::cast(this)->value();
2053    if (value < 0) return false;
2054    *index = value;
2055    return true;
2056  }
2057  if (IsHeapNumber()) {
2058    double value = HeapNumber::cast(this)->value();
2059    uint32_t uint_value = static_cast<uint32_t>(value);
2060    if (value == static_cast<double>(uint_value)) {
2061      *index = uint_value;
2062      return true;
2063    }
2064  }
2065  return false;
2066}
2067
2068
2069bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
2070  if (!this->IsJSValue()) return false;
2071
2072  JSValue* js_value = JSValue::cast(this);
2073  if (!js_value->value()->IsString()) return false;
2074
2075  String* str = String::cast(js_value->value());
2076  if (index >= static_cast<uint32_t>(str->length())) return false;
2077
2078  return true;
2079}
2080
2081
2082void Object::VerifyApiCallResultType() {
2083#if ENABLE_EXTRA_CHECKS
2084  if (!(IsSmi() ||
2085        IsString() ||
2086        IsSymbol() ||
2087        IsSpecObject() ||
2088        IsHeapNumber() ||
2089        IsUndefined() ||
2090        IsTrue() ||
2091        IsFalse() ||
2092        IsNull())) {
2093    FATAL("API call returned invalid object");
2094  }
2095#endif  // ENABLE_EXTRA_CHECKS
2096}
2097
2098
2099FixedArrayBase* FixedArrayBase::cast(Object* object) {
2100  ASSERT(object->IsFixedArrayBase());
2101  return reinterpret_cast<FixedArrayBase*>(object);
2102}
2103
2104
2105Object* FixedArray::get(int index) {
2106  SLOW_ASSERT(index >= 0 && index < this->length());
2107  return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2108}
2109
2110
2111Handle<Object> FixedArray::get(Handle<FixedArray> array, int index) {
2112  return handle(array->get(index), array->GetIsolate());
2113}
2114
2115
2116bool FixedArray::is_the_hole(int index) {
2117  return get(index) == GetHeap()->the_hole_value();
2118}
2119
2120
2121void FixedArray::set(int index, Smi* value) {
2122  ASSERT(map() != GetHeap()->fixed_cow_array_map());
2123  ASSERT(index >= 0 && index < this->length());
2124  ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
2125  int offset = kHeaderSize + index * kPointerSize;
2126  WRITE_FIELD(this, offset, value);
2127}
2128
2129
2130void FixedArray::set(int index, Object* value) {
2131  ASSERT(map() != GetHeap()->fixed_cow_array_map());
2132  ASSERT(index >= 0 && index < this->length());
2133  int offset = kHeaderSize + index * kPointerSize;
2134  WRITE_FIELD(this, offset, value);
2135  WRITE_BARRIER(GetHeap(), this, offset, value);
2136}
2137
2138
2139inline bool FixedDoubleArray::is_the_hole_nan(double value) {
2140  return BitCast<uint64_t, double>(value) == kHoleNanInt64;
2141}
2142
2143
2144inline double FixedDoubleArray::hole_nan_as_double() {
2145  return BitCast<double, uint64_t>(kHoleNanInt64);
2146}
2147
2148
2149inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
2150  ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
2151  ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
2152  return OS::nan_value();
2153}
2154
2155
2156double FixedDoubleArray::get_scalar(int index) {
2157  ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2158         map() != GetHeap()->fixed_array_map());
2159  ASSERT(index >= 0 && index < this->length());
2160  double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2161  ASSERT(!is_the_hole_nan(result));
2162  return result;
2163}
2164
2165int64_t FixedDoubleArray::get_representation(int index) {
2166  ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2167         map() != GetHeap()->fixed_array_map());
2168  ASSERT(index >= 0 && index < this->length());
2169  return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
2170}
2171
2172
2173Handle<Object> FixedDoubleArray::get(Handle<FixedDoubleArray> array,
2174                                     int index) {
2175  if (array->is_the_hole(index)) {
2176    return array->GetIsolate()->factory()->the_hole_value();
2177  } else {
2178    return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
2179  }
2180}
2181
2182
2183void FixedDoubleArray::set(int index, double value) {
2184  ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2185         map() != GetHeap()->fixed_array_map());
2186  int offset = kHeaderSize + index * kDoubleSize;
2187  if (std::isnan(value)) value = canonical_not_the_hole_nan_as_double();
2188  WRITE_DOUBLE_FIELD(this, offset, value);
2189}
2190
2191
2192void FixedDoubleArray::set_the_hole(int index) {
2193  ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2194         map() != GetHeap()->fixed_array_map());
2195  int offset = kHeaderSize + index * kDoubleSize;
2196  WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
2197}
2198
2199
2200bool FixedDoubleArray::is_the_hole(int index) {
2201  int offset = kHeaderSize + index * kDoubleSize;
2202  return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
2203}
2204
2205
2206double* FixedDoubleArray::data_start() {
2207  return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2208}
2209
2210
2211void FixedDoubleArray::FillWithHoles(int from, int to) {
2212  for (int i = from; i < to; i++) {
2213    set_the_hole(i);
2214  }
2215}
2216
2217
2218bool ConstantPoolArray::is_extended_layout() {
2219  uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2220  return IsExtendedField::decode(small_layout_1);
2221}
2222
2223
2224ConstantPoolArray::LayoutSection ConstantPoolArray::final_section() {
2225  return is_extended_layout() ? EXTENDED_SECTION : SMALL_SECTION;
2226}
2227
2228
2229int ConstantPoolArray::first_extended_section_index() {
2230  ASSERT(is_extended_layout());
2231  uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2232  return TotalCountField::decode(small_layout_2);
2233}
2234
2235
2236int ConstantPoolArray::get_extended_section_header_offset() {
2237  return RoundUp(SizeFor(NumberOfEntries(this, SMALL_SECTION)), kInt64Size);
2238}
2239
2240
2241ConstantPoolArray::WeakObjectState ConstantPoolArray::get_weak_object_state() {
2242  uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2243  return WeakObjectStateField::decode(small_layout_2);
2244}
2245
2246
2247void ConstantPoolArray::set_weak_object_state(
2248      ConstantPoolArray::WeakObjectState state) {
2249  uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2250  small_layout_2 = WeakObjectStateField::update(small_layout_2, state);
2251  WRITE_INT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
2252}
2253
2254
2255int ConstantPoolArray::first_index(Type type, LayoutSection section) {
2256  int index = 0;
2257  if (section == EXTENDED_SECTION) {
2258    ASSERT(is_extended_layout());
2259    index += first_extended_section_index();
2260  }
2261
2262  for (Type type_iter = FIRST_TYPE; type_iter < type;
2263       type_iter = next_type(type_iter)) {
2264    index += number_of_entries(type_iter, section);
2265  }
2266
2267  return index;
2268}
2269
2270
2271int ConstantPoolArray::last_index(Type type, LayoutSection section) {
2272  return first_index(type, section) + number_of_entries(type, section) - 1;
2273}
2274
2275
2276int ConstantPoolArray::number_of_entries(Type type, LayoutSection section) {
2277  if (section == SMALL_SECTION) {
2278    uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2279    uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2280    switch (type) {
2281      case INT64:
2282        return Int64CountField::decode(small_layout_1);
2283      case CODE_PTR:
2284        return CodePtrCountField::decode(small_layout_1);
2285      case HEAP_PTR:
2286        return HeapPtrCountField::decode(small_layout_1);
2287      case INT32:
2288        return Int32CountField::decode(small_layout_2);
2289      default:
2290        UNREACHABLE();
2291        return 0;
2292    }
2293  } else {
2294    ASSERT(section == EXTENDED_SECTION && is_extended_layout());
2295    int offset = get_extended_section_header_offset();
2296    switch (type) {
2297      case INT64:
2298        offset += kExtendedInt64CountOffset;
2299        break;
2300      case CODE_PTR:
2301        offset += kExtendedCodePtrCountOffset;
2302        break;
2303      case HEAP_PTR:
2304        offset += kExtendedHeapPtrCountOffset;
2305        break;
2306      case INT32:
2307        offset += kExtendedInt32CountOffset;
2308        break;
2309      default:
2310        UNREACHABLE();
2311    }
2312    return READ_INT_FIELD(this, offset);
2313  }
2314}
2315
2316
2317ConstantPoolArray::Type ConstantPoolArray::get_type(int index) {
2318  LayoutSection section;
2319  if (is_extended_layout() && index >= first_extended_section_index()) {
2320    section = EXTENDED_SECTION;
2321  } else {
2322    section = SMALL_SECTION;
2323  }
2324
2325  Type type = FIRST_TYPE;
2326  while (index > last_index(type, section)) {
2327    type = next_type(type);
2328  }
2329  ASSERT(type <= LAST_TYPE);
2330  return type;
2331}
2332
2333
2334int64_t ConstantPoolArray::get_int64_entry(int index) {
2335  ASSERT(map() == GetHeap()->constant_pool_array_map());
2336  ASSERT(get_type(index) == INT64);
2337  return READ_INT64_FIELD(this, OffsetOfElementAt(index));
2338}
2339
2340
2341double ConstantPoolArray::get_int64_entry_as_double(int index) {
2342  STATIC_ASSERT(kDoubleSize == kInt64Size);
2343  ASSERT(map() == GetHeap()->constant_pool_array_map());
2344  ASSERT(get_type(index) == INT64);
2345  return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index));
2346}
2347
2348
2349Address ConstantPoolArray::get_code_ptr_entry(int index) {
2350  ASSERT(map() == GetHeap()->constant_pool_array_map());
2351  ASSERT(get_type(index) == CODE_PTR);
2352  return reinterpret_cast<Address>(READ_FIELD(this, OffsetOfElementAt(index)));
2353}
2354
2355
2356Object* ConstantPoolArray::get_heap_ptr_entry(int index) {
2357  ASSERT(map() == GetHeap()->constant_pool_array_map());
2358  ASSERT(get_type(index) == HEAP_PTR);
2359  return READ_FIELD(this, OffsetOfElementAt(index));
2360}
2361
2362
2363int32_t ConstantPoolArray::get_int32_entry(int index) {
2364  ASSERT(map() == GetHeap()->constant_pool_array_map());
2365  ASSERT(get_type(index) == INT32);
2366  return READ_INT32_FIELD(this, OffsetOfElementAt(index));
2367}
2368
2369
2370void ConstantPoolArray::set(int index, int64_t value) {
2371  ASSERT(map() == GetHeap()->constant_pool_array_map());
2372  ASSERT(get_type(index) == INT64);
2373  WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value);
2374}
2375
2376
2377void ConstantPoolArray::set(int index, double value) {
2378  STATIC_ASSERT(kDoubleSize == kInt64Size);
2379  ASSERT(map() == GetHeap()->constant_pool_array_map());
2380  ASSERT(get_type(index) == INT64);
2381  WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value);
2382}
2383
2384
2385void ConstantPoolArray::set(int index, Address value) {
2386  ASSERT(map() == GetHeap()->constant_pool_array_map());
2387  ASSERT(get_type(index) == CODE_PTR);
2388  WRITE_FIELD(this, OffsetOfElementAt(index), reinterpret_cast<Object*>(value));
2389}
2390
2391
2392void ConstantPoolArray::set(int index, Object* value) {
2393  ASSERT(map() == GetHeap()->constant_pool_array_map());
2394  ASSERT(get_type(index) == HEAP_PTR);
2395  WRITE_FIELD(this, OffsetOfElementAt(index), value);
2396  WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value);
2397}
2398
2399
2400void ConstantPoolArray::set(int index, int32_t value) {
2401  ASSERT(map() == GetHeap()->constant_pool_array_map());
2402  ASSERT(get_type(index) == INT32);
2403  WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value);
2404}
2405
2406
2407void ConstantPoolArray::Init(const NumberOfEntries& small) {
2408  uint32_t small_layout_1 =
2409      Int64CountField::encode(small.count_of(INT64)) |
2410      CodePtrCountField::encode(small.count_of(CODE_PTR)) |
2411      HeapPtrCountField::encode(small.count_of(HEAP_PTR)) |
2412      IsExtendedField::encode(false);
2413  uint32_t small_layout_2 =
2414      Int32CountField::encode(small.count_of(INT32)) |
2415      TotalCountField::encode(small.total_count()) |
2416      WeakObjectStateField::encode(NO_WEAK_OBJECTS);
2417  WRITE_UINT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
2418  WRITE_UINT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
2419  if (kHeaderSize != kFirstEntryOffset) {
2420    ASSERT(kFirstEntryOffset - kHeaderSize == kInt32Size);
2421    WRITE_UINT32_FIELD(this, kHeaderSize, 0);  // Zero out header padding.
2422  }
2423}
2424
2425
2426void ConstantPoolArray::InitExtended(const NumberOfEntries& small,
2427                                     const NumberOfEntries& extended) {
2428  // Initialize small layout fields first.
2429  Init(small);
2430
2431  // Set is_extended_layout field.
2432  uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2433  small_layout_1 = IsExtendedField::update(small_layout_1, true);
2434  WRITE_INT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
2435
2436  // Initialize the extended layout fields.
2437  int extended_header_offset = get_extended_section_header_offset();
2438  WRITE_INT_FIELD(this, extended_header_offset + kExtendedInt64CountOffset,
2439      extended.count_of(INT64));
2440  WRITE_INT_FIELD(this, extended_header_offset + kExtendedCodePtrCountOffset,
2441      extended.count_of(CODE_PTR));
2442  WRITE_INT_FIELD(this, extended_header_offset + kExtendedHeapPtrCountOffset,
2443      extended.count_of(HEAP_PTR));
2444  WRITE_INT_FIELD(this, extended_header_offset + kExtendedInt32CountOffset,
2445      extended.count_of(INT32));
2446}
2447
2448
2449int ConstantPoolArray::size() {
2450  NumberOfEntries small(this, SMALL_SECTION);
2451  if (!is_extended_layout()) {
2452    return SizeFor(small);
2453  } else {
2454    NumberOfEntries extended(this, EXTENDED_SECTION);
2455    return SizeForExtended(small, extended);
2456  }
2457}
2458
2459
2460int ConstantPoolArray::length() {
2461  uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2462  int length = TotalCountField::decode(small_layout_2);
2463  if (is_extended_layout()) {
2464    length += number_of_entries(INT64, EXTENDED_SECTION) +
2465              number_of_entries(CODE_PTR, EXTENDED_SECTION) +
2466              number_of_entries(HEAP_PTR, EXTENDED_SECTION) +
2467              number_of_entries(INT32, EXTENDED_SECTION);
2468  }
2469  return length;
2470}
2471
2472
2473int ConstantPoolArray::Iterator::next_index() {
2474  ASSERT(!is_finished());
2475  int ret = next_index_++;
2476  update_section();
2477  return ret;
2478}
2479
2480
2481bool ConstantPoolArray::Iterator::is_finished() {
2482  return next_index_ > array_->last_index(type_, final_section_);
2483}
2484
2485
2486void ConstantPoolArray::Iterator::update_section() {
2487  if (next_index_ > array_->last_index(type_, current_section_) &&
2488      current_section_ != final_section_) {
2489    ASSERT(final_section_ == EXTENDED_SECTION);
2490    current_section_ = EXTENDED_SECTION;
2491    next_index_ = array_->first_index(type_, EXTENDED_SECTION);
2492  }
2493}
2494
2495
2496WriteBarrierMode HeapObject::GetWriteBarrierMode(
2497    const DisallowHeapAllocation& promise) {
2498  Heap* heap = GetHeap();
2499  if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2500  if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2501  return UPDATE_WRITE_BARRIER;
2502}
2503
2504
2505void FixedArray::set(int index,
2506                     Object* value,
2507                     WriteBarrierMode mode) {
2508  ASSERT(map() != GetHeap()->fixed_cow_array_map());
2509  ASSERT(index >= 0 && index < this->length());
2510  int offset = kHeaderSize + index * kPointerSize;
2511  WRITE_FIELD(this, offset, value);
2512  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2513}
2514
2515
2516void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
2517                                              int index,
2518                                              Object* value) {
2519  ASSERT(array->map() != array->GetHeap()->fixed_cow_array_map());
2520  ASSERT(index >= 0 && index < array->length());
2521  int offset = kHeaderSize + index * kPointerSize;
2522  WRITE_FIELD(array, offset, value);
2523  Heap* heap = array->GetHeap();
2524  if (heap->InNewSpace(value)) {
2525    heap->RecordWrite(array->address(), offset);
2526  }
2527}
2528
2529
2530void FixedArray::NoWriteBarrierSet(FixedArray* array,
2531                                   int index,
2532                                   Object* value) {
2533  ASSERT(array->map() != array->GetHeap()->fixed_cow_array_map());
2534  ASSERT(index >= 0 && index < array->length());
2535  ASSERT(!array->GetHeap()->InNewSpace(value));
2536  WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2537}
2538
2539
2540void FixedArray::set_undefined(int index) {
2541  ASSERT(map() != GetHeap()->fixed_cow_array_map());
2542  ASSERT(index >= 0 && index < this->length());
2543  ASSERT(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2544  WRITE_FIELD(this,
2545              kHeaderSize + index * kPointerSize,
2546              GetHeap()->undefined_value());
2547}
2548
2549
2550void FixedArray::set_null(int index) {
2551  ASSERT(index >= 0 && index < this->length());
2552  ASSERT(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2553  WRITE_FIELD(this,
2554              kHeaderSize + index * kPointerSize,
2555              GetHeap()->null_value());
2556}
2557
2558
2559void FixedArray::set_the_hole(int index) {
2560  ASSERT(map() != GetHeap()->fixed_cow_array_map());
2561  ASSERT(index >= 0 && index < this->length());
2562  ASSERT(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2563  WRITE_FIELD(this,
2564              kHeaderSize + index * kPointerSize,
2565              GetHeap()->the_hole_value());
2566}
2567
2568
2569void FixedArray::FillWithHoles(int from, int to) {
2570  for (int i = from; i < to; i++) {
2571    set_the_hole(i);
2572  }
2573}
2574
2575
2576Object** FixedArray::data_start() {
2577  return HeapObject::RawField(this, kHeaderSize);
2578}
2579
2580
2581bool DescriptorArray::IsEmpty() {
2582  ASSERT(length() >= kFirstIndex ||
2583         this == GetHeap()->empty_descriptor_array());
2584  return length() < kFirstIndex;
2585}
2586
2587
2588void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2589  WRITE_FIELD(
2590      this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2591}
2592
2593
2594// Perform a binary search in a fixed array. Low and high are entry indices. If
2595// there are three entries in this array it should be called with low=0 and
2596// high=2.
2597template<SearchMode search_mode, typename T>
2598int BinarySearch(T* array, Name* name, int low, int high, int valid_entries) {
2599  uint32_t hash = name->Hash();
2600  int limit = high;
2601
2602  ASSERT(low <= high);
2603
2604  while (low != high) {
2605    int mid = (low + high) / 2;
2606    Name* mid_name = array->GetSortedKey(mid);
2607    uint32_t mid_hash = mid_name->Hash();
2608
2609    if (mid_hash >= hash) {
2610      high = mid;
2611    } else {
2612      low = mid + 1;
2613    }
2614  }
2615
2616  for (; low <= limit; ++low) {
2617    int sort_index = array->GetSortedKeyIndex(low);
2618    Name* entry = array->GetKey(sort_index);
2619    if (entry->Hash() != hash) break;
2620    if (entry->Equals(name)) {
2621      if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2622        return sort_index;
2623      }
2624      return T::kNotFound;
2625    }
2626  }
2627
2628  return T::kNotFound;
2629}
2630
2631
2632// Perform a linear search in this fixed array. len is the number of entry
2633// indices that are valid.
2634template<SearchMode search_mode, typename T>
2635int LinearSearch(T* array, Name* name, int len, int valid_entries) {
2636  uint32_t hash = name->Hash();
2637  if (search_mode == ALL_ENTRIES) {
2638    for (int number = 0; number < len; number++) {
2639      int sorted_index = array->GetSortedKeyIndex(number);
2640      Name* entry = array->GetKey(sorted_index);
2641      uint32_t current_hash = entry->Hash();
2642      if (current_hash > hash) break;
2643      if (current_hash == hash && entry->Equals(name)) return sorted_index;
2644    }
2645  } else {
2646    ASSERT(len >= valid_entries);
2647    for (int number = 0; number < valid_entries; number++) {
2648      Name* entry = array->GetKey(number);
2649      uint32_t current_hash = entry->Hash();
2650      if (current_hash == hash && entry->Equals(name)) return number;
2651    }
2652  }
2653  return T::kNotFound;
2654}
2655
2656
2657template<SearchMode search_mode, typename T>
2658int Search(T* array, Name* name, int valid_entries) {
2659  if (search_mode == VALID_ENTRIES) {
2660    SLOW_ASSERT(array->IsSortedNoDuplicates(valid_entries));
2661  } else {
2662    SLOW_ASSERT(array->IsSortedNoDuplicates());
2663  }
2664
2665  int nof = array->number_of_entries();
2666  if (nof == 0) return T::kNotFound;
2667
2668  // Fast case: do linear search for small arrays.
2669  const int kMaxElementsForLinearSearch = 8;
2670  if ((search_mode == ALL_ENTRIES &&
2671       nof <= kMaxElementsForLinearSearch) ||
2672      (search_mode == VALID_ENTRIES &&
2673       valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2674    return LinearSearch<search_mode>(array, name, nof, valid_entries);
2675  }
2676
2677  // Slow case: perform binary search.
2678  return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries);
2679}
2680
2681
2682int DescriptorArray::Search(Name* name, int valid_descriptors) {
2683  return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors);
2684}
2685
2686
2687int DescriptorArray::SearchWithCache(Name* name, Map* map) {
2688  int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2689  if (number_of_own_descriptors == 0) return kNotFound;
2690
2691  DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
2692  int number = cache->Lookup(map, name);
2693
2694  if (number == DescriptorLookupCache::kAbsent) {
2695    number = Search(name, number_of_own_descriptors);
2696    cache->Update(map, name, number);
2697  }
2698
2699  return number;
2700}
2701
2702
2703PropertyDetails Map::GetLastDescriptorDetails() {
2704  return instance_descriptors()->GetDetails(LastAdded());
2705}
2706
2707
2708void Map::LookupDescriptor(JSObject* holder,
2709                           Name* name,
2710                           LookupResult* result) {
2711  DescriptorArray* descriptors = this->instance_descriptors();
2712  int number = descriptors->SearchWithCache(name, this);
2713  if (number == DescriptorArray::kNotFound) return result->NotFound();
2714  result->DescriptorResult(holder, descriptors->GetDetails(number), number);
2715}
2716
2717
2718void Map::LookupTransition(JSObject* holder,
2719                           Name* name,
2720                           LookupResult* result) {
2721  int transition_index = this->SearchTransition(name);
2722  if (transition_index == TransitionArray::kNotFound) return result->NotFound();
2723  result->TransitionResult(holder, this->GetTransition(transition_index));
2724}
2725
2726
2727FixedArrayBase* Map::GetInitialElements() {
2728  if (has_fast_smi_or_object_elements() ||
2729      has_fast_double_elements()) {
2730    ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
2731    return GetHeap()->empty_fixed_array();
2732  } else if (has_external_array_elements()) {
2733    ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(this);
2734    ASSERT(!GetHeap()->InNewSpace(empty_array));
2735    return empty_array;
2736  } else if (has_fixed_typed_array_elements()) {
2737    FixedTypedArrayBase* empty_array =
2738      GetHeap()->EmptyFixedTypedArrayForMap(this);
2739    ASSERT(!GetHeap()->InNewSpace(empty_array));
2740    return empty_array;
2741  } else if (has_dictionary_elements()) {
2742    ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_slow_element_dictionary()));
2743    return GetHeap()->empty_slow_element_dictionary();
2744  } else {
2745    UNREACHABLE();
2746  }
2747  return NULL;
2748}
2749
2750
2751Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2752  ASSERT(descriptor_number < number_of_descriptors());
2753  return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
2754}
2755
2756
2757Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
2758  return GetKeySlot(descriptor_number);
2759}
2760
2761
2762Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
2763  return GetValueSlot(descriptor_number - 1) + 1;
2764}
2765
2766
2767Name* DescriptorArray::GetKey(int descriptor_number) {
2768  ASSERT(descriptor_number < number_of_descriptors());
2769  return Name::cast(get(ToKeyIndex(descriptor_number)));
2770}
2771
2772
2773int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2774  return GetDetails(descriptor_number).pointer();
2775}
2776
2777
2778Name* DescriptorArray::GetSortedKey(int descriptor_number) {
2779  return GetKey(GetSortedKeyIndex(descriptor_number));
2780}
2781
2782
2783void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
2784  PropertyDetails details = GetDetails(descriptor_index);
2785  set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2786}
2787
2788
2789void DescriptorArray::SetRepresentation(int descriptor_index,
2790                                        Representation representation) {
2791  ASSERT(!representation.IsNone());
2792  PropertyDetails details = GetDetails(descriptor_index);
2793  set(ToDetailsIndex(descriptor_index),
2794      details.CopyWithRepresentation(representation).AsSmi());
2795}
2796
2797
2798Object** DescriptorArray::GetValueSlot(int descriptor_number) {
2799  ASSERT(descriptor_number < number_of_descriptors());
2800  return RawFieldOfElementAt(ToValueIndex(descriptor_number));
2801}
2802
2803
2804Object* DescriptorArray::GetValue(int descriptor_number) {
2805  ASSERT(descriptor_number < number_of_descriptors());
2806  return get(ToValueIndex(descriptor_number));
2807}
2808
2809
2810void DescriptorArray::SetValue(int descriptor_index, Object* value) {
2811  set(ToValueIndex(descriptor_index), value);
2812}
2813
2814
2815PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
2816  ASSERT(descriptor_number < number_of_descriptors());
2817  Object* details = get(ToDetailsIndex(descriptor_number));
2818  return PropertyDetails(Smi::cast(details));
2819}
2820
2821
2822PropertyType DescriptorArray::GetType(int descriptor_number) {
2823  return GetDetails(descriptor_number).type();
2824}
2825
2826
2827int DescriptorArray::GetFieldIndex(int descriptor_number) {
2828  ASSERT(GetDetails(descriptor_number).type() == FIELD);
2829  return GetDetails(descriptor_number).field_index();
2830}
2831
2832
2833HeapType* DescriptorArray::GetFieldType(int descriptor_number) {
2834  ASSERT(GetDetails(descriptor_number).type() == FIELD);
2835  return HeapType::cast(GetValue(descriptor_number));
2836}
2837
2838
2839Object* DescriptorArray::GetConstant(int descriptor_number) {
2840  return GetValue(descriptor_number);
2841}
2842
2843
2844Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
2845  ASSERT(GetType(descriptor_number) == CALLBACKS);
2846  return GetValue(descriptor_number);
2847}
2848
2849
2850AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
2851  ASSERT(GetType(descriptor_number) == CALLBACKS);
2852  Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
2853  return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
2854}
2855
2856
2857void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
2858  desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
2859             handle(GetValue(descriptor_number), GetIsolate()),
2860             GetDetails(descriptor_number));
2861}
2862
2863
2864void DescriptorArray::Set(int descriptor_number,
2865                          Descriptor* desc,
2866                          const WhitenessWitness&) {
2867  // Range check.
2868  ASSERT(descriptor_number < number_of_descriptors());
2869
2870  NoIncrementalWriteBarrierSet(this,
2871                               ToKeyIndex(descriptor_number),
2872                               *desc->GetKey());
2873  NoIncrementalWriteBarrierSet(this,
2874                               ToValueIndex(descriptor_number),
2875                               *desc->GetValue());
2876  NoIncrementalWriteBarrierSet(this,
2877                               ToDetailsIndex(descriptor_number),
2878                               desc->GetDetails().AsSmi());
2879}
2880
2881
2882void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
2883  // Range check.
2884  ASSERT(descriptor_number < number_of_descriptors());
2885
2886  set(ToKeyIndex(descriptor_number), *desc->GetKey());
2887  set(ToValueIndex(descriptor_number), *desc->GetValue());
2888  set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
2889}
2890
2891
2892void DescriptorArray::Append(Descriptor* desc,
2893                             const WhitenessWitness& witness) {
2894  DisallowHeapAllocation no_gc;
2895  int descriptor_number = number_of_descriptors();
2896  SetNumberOfDescriptors(descriptor_number + 1);
2897  Set(descriptor_number, desc, witness);
2898
2899  uint32_t hash = desc->GetKey()->Hash();
2900
2901  int insertion;
2902
2903  for (insertion = descriptor_number; insertion > 0; --insertion) {
2904    Name* key = GetSortedKey(insertion - 1);
2905    if (key->Hash() <= hash) break;
2906    SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
2907  }
2908
2909  SetSortedKey(insertion, descriptor_number);
2910}
2911
2912
2913void DescriptorArray::Append(Descriptor* desc) {
2914  DisallowHeapAllocation no_gc;
2915  int descriptor_number = number_of_descriptors();
2916  SetNumberOfDescriptors(descriptor_number + 1);
2917  Set(descriptor_number, desc);
2918
2919  uint32_t hash = desc->GetKey()->Hash();
2920
2921  int insertion;
2922
2923  for (insertion = descriptor_number; insertion > 0; --insertion) {
2924    Name* key = GetSortedKey(insertion - 1);
2925    if (key->Hash() <= hash) break;
2926    SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
2927  }
2928
2929  SetSortedKey(insertion, descriptor_number);
2930}
2931
2932
2933void DescriptorArray::SwapSortedKeys(int first, int second) {
2934  int first_key = GetSortedKeyIndex(first);
2935  SetSortedKey(first, GetSortedKeyIndex(second));
2936  SetSortedKey(second, first_key);
2937}
2938
2939
2940DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
2941    : marking_(array->GetHeap()->incremental_marking()) {
2942  marking_->EnterNoMarkingScope();
2943  ASSERT(!marking_->IsMarking() ||
2944         Marking::Color(array) == Marking::WHITE_OBJECT);
2945}
2946
2947
2948DescriptorArray::WhitenessWitness::~WhitenessWitness() {
2949  marking_->LeaveNoMarkingScope();
2950}
2951
2952
2953template<typename Derived, typename Shape, typename Key>
2954int HashTable<Derived, Shape, Key>::ComputeCapacity(int at_least_space_for) {
2955  const int kMinCapacity = 32;
2956  int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
2957  if (capacity < kMinCapacity) {
2958    capacity = kMinCapacity;  // Guarantee min capacity.
2959  }
2960  return capacity;
2961}
2962
2963
2964template<typename Derived, typename Shape, typename Key>
2965int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
2966  return FindEntry(GetIsolate(), key);
2967}
2968
2969
2970// Find entry for key otherwise return kNotFound.
2971template<typename Derived, typename Shape, typename Key>
2972int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
2973  uint32_t capacity = Capacity();
2974  uint32_t entry = FirstProbe(HashTable::Hash(key), capacity);
2975  uint32_t count = 1;
2976  // EnsureCapacity will guarantee the hash table is never full.
2977  while (true) {
2978    Object* element = KeyAt(entry);
2979    // Empty entry. Uses raw unchecked accessors because it is called by the
2980    // string table during bootstrapping.
2981    if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
2982    if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
2983        Shape::IsMatch(key, element)) return entry;
2984    entry = NextProbe(entry, count++, capacity);
2985  }
2986  return kNotFound;
2987}
2988
2989
2990bool SeededNumberDictionary::requires_slow_elements() {
2991  Object* max_index_object = get(kMaxNumberKeyIndex);
2992  if (!max_index_object->IsSmi()) return false;
2993  return 0 !=
2994      (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
2995}
2996
2997uint32_t SeededNumberDictionary::max_number_key() {
2998  ASSERT(!requires_slow_elements());
2999  Object* max_index_object = get(kMaxNumberKeyIndex);
3000  if (!max_index_object->IsSmi()) return 0;
3001  uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
3002  return value >> kRequiresSlowElementsTagSize;
3003}
3004
3005void SeededNumberDictionary::set_requires_slow_elements() {
3006  set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
3007}
3008
3009
3010// ------------------------------------
3011// Cast operations
3012
3013
3014CAST_ACCESSOR(FixedArray)
3015CAST_ACCESSOR(FixedDoubleArray)
3016CAST_ACCESSOR(FixedTypedArrayBase)
3017CAST_ACCESSOR(ConstantPoolArray)
3018CAST_ACCESSOR(DescriptorArray)
3019CAST_ACCESSOR(DeoptimizationInputData)
3020CAST_ACCESSOR(DeoptimizationOutputData)
3021CAST_ACCESSOR(DependentCode)
3022CAST_ACCESSOR(StringTable)
3023CAST_ACCESSOR(JSFunctionResultCache)
3024CAST_ACCESSOR(NormalizedMapCache)
3025CAST_ACCESSOR(ScopeInfo)
3026CAST_ACCESSOR(CompilationCacheTable)
3027CAST_ACCESSOR(CodeCacheHashTable)
3028CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
3029CAST_ACCESSOR(MapCache)
3030CAST_ACCESSOR(String)
3031CAST_ACCESSOR(SeqString)
3032CAST_ACCESSOR(SeqOneByteString)
3033CAST_ACCESSOR(SeqTwoByteString)
3034CAST_ACCESSOR(SlicedString)
3035CAST_ACCESSOR(ConsString)
3036CAST_ACCESSOR(ExternalString)
3037CAST_ACCESSOR(ExternalAsciiString)
3038CAST_ACCESSOR(ExternalTwoByteString)
3039CAST_ACCESSOR(Symbol)
3040CAST_ACCESSOR(Name)
3041CAST_ACCESSOR(JSReceiver)
3042CAST_ACCESSOR(JSObject)
3043CAST_ACCESSOR(Smi)
3044CAST_ACCESSOR(HeapObject)
3045CAST_ACCESSOR(HeapNumber)
3046CAST_ACCESSOR(Oddball)
3047CAST_ACCESSOR(Cell)
3048CAST_ACCESSOR(PropertyCell)
3049CAST_ACCESSOR(SharedFunctionInfo)
3050CAST_ACCESSOR(Map)
3051CAST_ACCESSOR(JSFunction)
3052CAST_ACCESSOR(GlobalObject)
3053CAST_ACCESSOR(JSGlobalProxy)
3054CAST_ACCESSOR(JSGlobalObject)
3055CAST_ACCESSOR(JSBuiltinsObject)
3056CAST_ACCESSOR(Code)
3057CAST_ACCESSOR(JSArray)
3058CAST_ACCESSOR(JSArrayBuffer)
3059CAST_ACCESSOR(JSArrayBufferView)
3060CAST_ACCESSOR(JSTypedArray)
3061CAST_ACCESSOR(JSDataView)
3062CAST_ACCESSOR(JSRegExp)
3063CAST_ACCESSOR(JSProxy)
3064CAST_ACCESSOR(JSFunctionProxy)
3065CAST_ACCESSOR(JSSet)
3066CAST_ACCESSOR(JSMap)
3067CAST_ACCESSOR(JSSetIterator)
3068CAST_ACCESSOR(JSMapIterator)
3069CAST_ACCESSOR(JSWeakMap)
3070CAST_ACCESSOR(JSWeakSet)
3071CAST_ACCESSOR(Foreign)
3072CAST_ACCESSOR(ByteArray)
3073CAST_ACCESSOR(FreeSpace)
3074CAST_ACCESSOR(ExternalArray)
3075CAST_ACCESSOR(ExternalInt8Array)
3076CAST_ACCESSOR(ExternalUint8Array)
3077CAST_ACCESSOR(ExternalInt16Array)
3078CAST_ACCESSOR(ExternalUint16Array)
3079CAST_ACCESSOR(ExternalInt32Array)
3080CAST_ACCESSOR(ExternalUint32Array)
3081CAST_ACCESSOR(ExternalFloat32Array)
3082CAST_ACCESSOR(ExternalFloat64Array)
3083CAST_ACCESSOR(ExternalUint8ClampedArray)
3084CAST_ACCESSOR(Struct)
3085CAST_ACCESSOR(AccessorInfo)
3086
3087template <class Traits>
3088FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
3089  SLOW_ASSERT(object->IsHeapObject() &&
3090      HeapObject::cast(object)->map()->instance_type() ==
3091          Traits::kInstanceType);
3092  return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3093}
3094
3095
3096#define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
3097  STRUCT_LIST(MAKE_STRUCT_CAST)
3098#undef MAKE_STRUCT_CAST
3099
3100
3101template <typename Derived, typename Shape, typename Key>
3102HashTable<Derived, Shape, Key>*
3103HashTable<Derived, Shape, Key>::cast(Object* obj) {
3104  ASSERT(obj->IsHashTable());
3105  return reinterpret_cast<HashTable*>(obj);
3106}
3107
3108
3109SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3110SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3111
3112SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3113NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3114
3115SMI_ACCESSORS(String, length, kLengthOffset)
3116SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3117
3118
3119uint32_t Name::hash_field() {
3120  return READ_UINT32_FIELD(this, kHashFieldOffset);
3121}
3122
3123
3124void Name::set_hash_field(uint32_t value) {
3125  WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
3126#if V8_HOST_ARCH_64_BIT
3127  WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
3128#endif
3129}
3130
3131
3132bool Name::Equals(Name* other) {
3133  if (other == this) return true;
3134  if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3135      this->IsSymbol() || other->IsSymbol()) {
3136    return false;
3137  }
3138  return String::cast(this)->SlowEquals(String::cast(other));
3139}
3140
3141
3142bool Name::Equals(Handle<Name> one, Handle<Name> two) {
3143  if (one.is_identical_to(two)) return true;
3144  if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3145      one->IsSymbol() || two->IsSymbol()) {
3146    return false;
3147  }
3148  return String::SlowEquals(Handle<String>::cast(one),
3149                            Handle<String>::cast(two));
3150}
3151
3152
3153ACCESSORS(Symbol, name, Object, kNameOffset)
3154ACCESSORS(Symbol, flags, Smi, kFlagsOffset)
3155BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3156
3157
3158bool String::Equals(String* other) {
3159  if (other == this) return true;
3160  if (this->IsInternalizedString() && other->IsInternalizedString()) {
3161    return false;
3162  }
3163  return SlowEquals(other);
3164}
3165
3166
3167bool String::Equals(Handle<String> one, Handle<String> two) {
3168  if (one.is_identical_to(two)) return true;
3169  if (one->IsInternalizedString() && two->IsInternalizedString()) {
3170    return false;
3171  }
3172  return SlowEquals(one, two);
3173}
3174
3175
3176Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
3177  if (!string->IsConsString()) return string;
3178  Handle<ConsString> cons = Handle<ConsString>::cast(string);
3179  if (cons->IsFlat()) return handle(cons->first());
3180  return SlowFlatten(cons, pretenure);
3181}
3182
3183
3184uint16_t String::Get(int index) {
3185  ASSERT(index >= 0 && index < length());
3186  switch (StringShape(this).full_representation_tag()) {
3187    case kSeqStringTag | kOneByteStringTag:
3188      return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3189    case kSeqStringTag | kTwoByteStringTag:
3190      return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3191    case kConsStringTag | kOneByteStringTag:
3192    case kConsStringTag | kTwoByteStringTag:
3193      return ConsString::cast(this)->ConsStringGet(index);
3194    case kExternalStringTag | kOneByteStringTag:
3195      return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
3196    case kExternalStringTag | kTwoByteStringTag:
3197      return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3198    case kSlicedStringTag | kOneByteStringTag:
3199    case kSlicedStringTag | kTwoByteStringTag:
3200      return SlicedString::cast(this)->SlicedStringGet(index);
3201    default:
3202      break;
3203  }
3204
3205  UNREACHABLE();
3206  return 0;
3207}
3208
3209
3210void String::Set(int index, uint16_t value) {
3211  ASSERT(index >= 0 && index < length());
3212  ASSERT(StringShape(this).IsSequential());
3213
3214  return this->IsOneByteRepresentation()
3215      ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3216      : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3217}
3218
3219
3220bool String::IsFlat() {
3221  if (!StringShape(this).IsCons()) return true;
3222  return ConsString::cast(this)->second()->length() == 0;
3223}
3224
3225
3226String* String::GetUnderlying() {
3227  // Giving direct access to underlying string only makes sense if the
3228  // wrapping string is already flattened.
3229  ASSERT(this->IsFlat());
3230  ASSERT(StringShape(this).IsIndirect());
3231  STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3232  const int kUnderlyingOffset = SlicedString::kParentOffset;
3233  return String::cast(READ_FIELD(this, kUnderlyingOffset));
3234}
3235
3236
3237template<class Visitor>
3238ConsString* String::VisitFlat(Visitor* visitor,
3239                              String* string,
3240                              const int offset) {
3241  int slice_offset = offset;
3242  const int length = string->length();
3243  ASSERT(offset <= length);
3244  while (true) {
3245    int32_t type = string->map()->instance_type();
3246    switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3247      case kSeqStringTag | kOneByteStringTag:
3248        visitor->VisitOneByteString(
3249            SeqOneByteString::cast(string)->GetChars() + slice_offset,
3250            length - offset);
3251        return NULL;
3252
3253      case kSeqStringTag | kTwoByteStringTag:
3254        visitor->VisitTwoByteString(
3255            SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3256            length - offset);
3257        return NULL;
3258
3259      case kExternalStringTag | kOneByteStringTag:
3260        visitor->VisitOneByteString(
3261            ExternalAsciiString::cast(string)->GetChars() + slice_offset,
3262            length - offset);
3263        return NULL;
3264
3265      case kExternalStringTag | kTwoByteStringTag:
3266        visitor->VisitTwoByteString(
3267            ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3268            length - offset);
3269        return NULL;
3270
3271      case kSlicedStringTag | kOneByteStringTag:
3272      case kSlicedStringTag | kTwoByteStringTag: {
3273        SlicedString* slicedString = SlicedString::cast(string);
3274        slice_offset += slicedString->offset();
3275        string = slicedString->parent();
3276        continue;
3277      }
3278
3279      case kConsStringTag | kOneByteStringTag:
3280      case kConsStringTag | kTwoByteStringTag:
3281        return ConsString::cast(string);
3282
3283      default:
3284        UNREACHABLE();
3285        return NULL;
3286    }
3287  }
3288}
3289
3290
3291uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3292  ASSERT(index >= 0 && index < length());
3293  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3294}
3295
3296
3297void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3298  ASSERT(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3299  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3300                   static_cast<byte>(value));
3301}
3302
3303
3304Address SeqOneByteString::GetCharsAddress() {
3305  return FIELD_ADDR(this, kHeaderSize);
3306}
3307
3308
3309uint8_t* SeqOneByteString::GetChars() {
3310  return reinterpret_cast<uint8_t*>(GetCharsAddress());
3311}
3312
3313
3314Address SeqTwoByteString::GetCharsAddress() {
3315  return FIELD_ADDR(this, kHeaderSize);
3316}
3317
3318
3319uc16* SeqTwoByteString::GetChars() {
3320  return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3321}
3322
3323
3324uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3325  ASSERT(index >= 0 && index < length());
3326  return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
3327}
3328
3329
3330void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3331  ASSERT(index >= 0 && index < length());
3332  WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
3333}
3334
3335
3336int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3337  return SizeFor(length());
3338}
3339
3340
3341int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3342  return SizeFor(length());
3343}
3344
3345
3346String* SlicedString::parent() {
3347  return String::cast(READ_FIELD(this, kParentOffset));
3348}
3349
3350
3351void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3352  ASSERT(parent->IsSeqString() || parent->IsExternalString());
3353  WRITE_FIELD(this, kParentOffset, parent);
3354  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3355}
3356
3357
3358SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3359
3360
3361String* ConsString::first() {
3362  return String::cast(READ_FIELD(this, kFirstOffset));
3363}
3364
3365
3366Object* ConsString::unchecked_first() {
3367  return READ_FIELD(this, kFirstOffset);
3368}
3369
3370
3371void ConsString::set_first(String* value, WriteBarrierMode mode) {
3372  WRITE_FIELD(this, kFirstOffset, value);
3373  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3374}
3375
3376
3377String* ConsString::second() {
3378  return String::cast(READ_FIELD(this, kSecondOffset));
3379}
3380
3381
3382Object* ConsString::unchecked_second() {
3383  return READ_FIELD(this, kSecondOffset);
3384}
3385
3386
3387void ConsString::set_second(String* value, WriteBarrierMode mode) {
3388  WRITE_FIELD(this, kSecondOffset, value);
3389  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3390}
3391
3392
3393bool ExternalString::is_short() {
3394  InstanceType type = map()->instance_type();
3395  return (type & kShortExternalStringMask) == kShortExternalStringTag;
3396}
3397
3398
3399const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
3400  return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3401}
3402
3403
3404void ExternalAsciiString::update_data_cache() {
3405  if (is_short()) return;
3406  const char** data_field =
3407      reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3408  *data_field = resource()->data();
3409}
3410
3411
3412void ExternalAsciiString::set_resource(
3413    const ExternalAsciiString::Resource* resource) {
3414  ASSERT(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3415  *reinterpret_cast<const Resource**>(
3416      FIELD_ADDR(this, kResourceOffset)) = resource;
3417  if (resource != NULL) update_data_cache();
3418}
3419
3420
3421const uint8_t* ExternalAsciiString::GetChars() {
3422  return reinterpret_cast<const uint8_t*>(resource()->data());
3423}
3424
3425
3426uint16_t ExternalAsciiString::ExternalAsciiStringGet(int index) {
3427  ASSERT(index >= 0 && index < length());
3428  return GetChars()[index];
3429}
3430
3431
3432const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3433  return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3434}
3435
3436
3437void ExternalTwoByteString::update_data_cache() {
3438  if (is_short()) return;
3439  const uint16_t** data_field =
3440      reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3441  *data_field = resource()->data();
3442}
3443
3444
3445void ExternalTwoByteString::set_resource(
3446    const ExternalTwoByteString::Resource* resource) {
3447  *reinterpret_cast<const Resource**>(
3448      FIELD_ADDR(this, kResourceOffset)) = resource;
3449  if (resource != NULL) update_data_cache();
3450}
3451
3452
3453const uint16_t* ExternalTwoByteString::GetChars() {
3454  return resource()->data();
3455}
3456
3457
3458uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3459  ASSERT(index >= 0 && index < length());
3460  return GetChars()[index];
3461}
3462
3463
3464const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3465      unsigned start) {
3466  return GetChars() + start;
3467}
3468
3469
3470int ConsStringIteratorOp::OffsetForDepth(int depth) {
3471  return depth & kDepthMask;
3472}
3473
3474
3475void ConsStringIteratorOp::PushLeft(ConsString* string) {
3476  frames_[depth_++ & kDepthMask] = string;
3477}
3478
3479
3480void ConsStringIteratorOp::PushRight(ConsString* string) {
3481  // Inplace update.
3482  frames_[(depth_-1) & kDepthMask] = string;
3483}
3484
3485
3486void ConsStringIteratorOp::AdjustMaximumDepth() {
3487  if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3488}
3489
3490
3491void ConsStringIteratorOp::Pop() {
3492  ASSERT(depth_ > 0);
3493  ASSERT(depth_ <= maximum_depth_);
3494  depth_--;
3495}
3496
3497
3498uint16_t StringCharacterStream::GetNext() {
3499  ASSERT(buffer8_ != NULL && end_ != NULL);
3500  // Advance cursor if needed.
3501  if (buffer8_ == end_) HasMore();
3502  ASSERT(buffer8_ < end_);
3503  return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3504}
3505
3506
3507StringCharacterStream::StringCharacterStream(String* string,
3508                                             ConsStringIteratorOp* op,
3509                                             int offset)
3510  : is_one_byte_(false),
3511    op_(op) {
3512  Reset(string, offset);
3513}
3514
3515
3516void StringCharacterStream::Reset(String* string, int offset) {
3517  buffer8_ = NULL;
3518  end_ = NULL;
3519  ConsString* cons_string = String::VisitFlat(this, string, offset);
3520  op_->Reset(cons_string, offset);
3521  if (cons_string != NULL) {
3522    string = op_->Next(&offset);
3523    if (string != NULL) String::VisitFlat(this, string, offset);
3524  }
3525}
3526
3527
3528bool StringCharacterStream::HasMore() {
3529  if (buffer8_ != end_) return true;
3530  int offset;
3531  String* string = op_->Next(&offset);
3532  ASSERT_EQ(offset, 0);
3533  if (string == NULL) return false;
3534  String::VisitFlat(this, string);
3535  ASSERT(buffer8_ != end_);
3536  return true;
3537}
3538
3539
3540void StringCharacterStream::VisitOneByteString(
3541    const uint8_t* chars, int length) {
3542  is_one_byte_ = true;
3543  buffer8_ = chars;
3544  end_ = chars + length;
3545}
3546
3547
3548void StringCharacterStream::VisitTwoByteString(
3549    const uint16_t* chars, int length) {
3550  is_one_byte_ = false;
3551  buffer16_ = chars;
3552  end_ = reinterpret_cast<const uint8_t*>(chars + length);
3553}
3554
3555
3556void JSFunctionResultCache::MakeZeroSize() {
3557  set_finger_index(kEntriesIndex);
3558  set_size(kEntriesIndex);
3559}
3560
3561
3562void JSFunctionResultCache::Clear() {
3563  int cache_size = size();
3564  Object** entries_start = RawFieldOfElementAt(kEntriesIndex);
3565  MemsetPointer(entries_start,
3566                GetHeap()->the_hole_value(),
3567                cache_size - kEntriesIndex);
3568  MakeZeroSize();
3569}
3570
3571
3572int JSFunctionResultCache::size() {
3573  return Smi::cast(get(kCacheSizeIndex))->value();
3574}
3575
3576
3577void JSFunctionResultCache::set_size(int size) {
3578  set(kCacheSizeIndex, Smi::FromInt(size));
3579}
3580
3581
3582int JSFunctionResultCache::finger_index() {
3583  return Smi::cast(get(kFingerIndex))->value();
3584}
3585
3586
3587void JSFunctionResultCache::set_finger_index(int finger_index) {
3588  set(kFingerIndex, Smi::FromInt(finger_index));
3589}
3590
3591
3592byte ByteArray::get(int index) {
3593  ASSERT(index >= 0 && index < this->length());
3594  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3595}
3596
3597
3598void ByteArray::set(int index, byte value) {
3599  ASSERT(index >= 0 && index < this->length());
3600  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3601}
3602
3603
3604int ByteArray::get_int(int index) {
3605  ASSERT(index >= 0 && (index * kIntSize) < this->length());
3606  return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
3607}
3608
3609
3610ByteArray* ByteArray::FromDataStartAddress(Address address) {
3611  ASSERT_TAG_ALIGNED(address);
3612  return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
3613}
3614
3615
3616Address ByteArray::GetDataStartAddress() {
3617  return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
3618}
3619
3620
3621uint8_t* ExternalUint8ClampedArray::external_uint8_clamped_pointer() {
3622  return reinterpret_cast<uint8_t*>(external_pointer());
3623}
3624
3625
3626uint8_t ExternalUint8ClampedArray::get_scalar(int index) {
3627  ASSERT((index >= 0) && (index < this->length()));
3628  uint8_t* ptr = external_uint8_clamped_pointer();
3629  return ptr[index];
3630}
3631
3632
3633Handle<Object> ExternalUint8ClampedArray::get(
3634    Handle<ExternalUint8ClampedArray> array,
3635    int index) {
3636  return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3637                     array->GetIsolate());
3638}
3639
3640
3641void ExternalUint8ClampedArray::set(int index, uint8_t value) {
3642  ASSERT((index >= 0) && (index < this->length()));
3643  uint8_t* ptr = external_uint8_clamped_pointer();
3644  ptr[index] = value;
3645}
3646
3647
3648void* ExternalArray::external_pointer() {
3649  intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
3650  return reinterpret_cast<void*>(ptr);
3651}
3652
3653
3654void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
3655  intptr_t ptr = reinterpret_cast<intptr_t>(value);
3656  WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
3657}
3658
3659
3660int8_t ExternalInt8Array::get_scalar(int index) {
3661  ASSERT((index >= 0) && (index < this->length()));
3662  int8_t* ptr = static_cast<int8_t*>(external_pointer());
3663  return ptr[index];
3664}
3665
3666
3667Handle<Object> ExternalInt8Array::get(Handle<ExternalInt8Array> array,
3668                                      int index) {
3669  return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3670                     array->GetIsolate());
3671}
3672
3673
3674void ExternalInt8Array::set(int index, int8_t value) {
3675  ASSERT((index >= 0) && (index < this->length()));
3676  int8_t* ptr = static_cast<int8_t*>(external_pointer());
3677  ptr[index] = value;
3678}
3679
3680
3681uint8_t ExternalUint8Array::get_scalar(int index) {
3682  ASSERT((index >= 0) && (index < this->length()));
3683  uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3684  return ptr[index];
3685}
3686
3687
3688Handle<Object> ExternalUint8Array::get(Handle<ExternalUint8Array> array,
3689                                       int index) {
3690  return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3691                     array->GetIsolate());
3692}
3693
3694
3695void ExternalUint8Array::set(int index, uint8_t value) {
3696  ASSERT((index >= 0) && (index < this->length()));
3697  uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3698  ptr[index] = value;
3699}
3700
3701
3702int16_t ExternalInt16Array::get_scalar(int index) {
3703  ASSERT((index >= 0) && (index < this->length()));
3704  int16_t* ptr = static_cast<int16_t*>(external_pointer());
3705  return ptr[index];
3706}
3707
3708
3709Handle<Object> ExternalInt16Array::get(Handle<ExternalInt16Array> array,
3710                                       int index) {
3711  return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3712                     array->GetIsolate());
3713}
3714
3715
3716void ExternalInt16Array::set(int index, int16_t value) {
3717  ASSERT((index >= 0) && (index < this->length()));
3718  int16_t* ptr = static_cast<int16_t*>(external_pointer());
3719  ptr[index] = value;
3720}
3721
3722
3723uint16_t ExternalUint16Array::get_scalar(int index) {
3724  ASSERT((index >= 0) && (index < this->length()));
3725  uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3726  return ptr[index];
3727}
3728
3729
3730Handle<Object> ExternalUint16Array::get(Handle<ExternalUint16Array> array,
3731                                        int index) {
3732  return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3733                     array->GetIsolate());
3734}
3735
3736
3737void ExternalUint16Array::set(int index, uint16_t value) {
3738  ASSERT((index >= 0) && (index < this->length()));
3739  uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3740  ptr[index] = value;
3741}
3742
3743
3744int32_t ExternalInt32Array::get_scalar(int index) {
3745  ASSERT((index >= 0) && (index < this->length()));
3746  int32_t* ptr = static_cast<int32_t*>(external_pointer());
3747  return ptr[index];
3748}
3749
3750
3751Handle<Object> ExternalInt32Array::get(Handle<ExternalInt32Array> array,
3752                                       int index) {
3753  return array->GetIsolate()->factory()->
3754      NewNumberFromInt(array->get_scalar(index));
3755}
3756
3757
3758void ExternalInt32Array::set(int index, int32_t value) {
3759  ASSERT((index >= 0) && (index < this->length()));
3760  int32_t* ptr = static_cast<int32_t*>(external_pointer());
3761  ptr[index] = value;
3762}
3763
3764
3765uint32_t ExternalUint32Array::get_scalar(int index) {
3766  ASSERT((index >= 0) && (index < this->length()));
3767  uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3768  return ptr[index];
3769}
3770
3771
3772Handle<Object> ExternalUint32Array::get(Handle<ExternalUint32Array> array,
3773                                        int index) {
3774  return array->GetIsolate()->factory()->
3775      NewNumberFromUint(array->get_scalar(index));
3776}
3777
3778
3779void ExternalUint32Array::set(int index, uint32_t value) {
3780  ASSERT((index >= 0) && (index < this->length()));
3781  uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3782  ptr[index] = value;
3783}
3784
3785
3786float ExternalFloat32Array::get_scalar(int index) {
3787  ASSERT((index >= 0) && (index < this->length()));
3788  float* ptr = static_cast<float*>(external_pointer());
3789  return ptr[index];
3790}
3791
3792
3793Handle<Object> ExternalFloat32Array::get(Handle<ExternalFloat32Array> array,
3794                                         int index) {
3795  return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
3796}
3797
3798
3799void ExternalFloat32Array::set(int index, float value) {
3800  ASSERT((index >= 0) && (index < this->length()));
3801  float* ptr = static_cast<float*>(external_pointer());
3802  ptr[index] = value;
3803}
3804
3805
3806double ExternalFloat64Array::get_scalar(int index) {
3807  ASSERT((index >= 0) && (index < this->length()));
3808  double* ptr = static_cast<double*>(external_pointer());
3809  return ptr[index];
3810}
3811
3812
3813Handle<Object> ExternalFloat64Array::get(Handle<ExternalFloat64Array> array,
3814                                         int index) {
3815  return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
3816}
3817
3818
3819void ExternalFloat64Array::set(int index, double value) {
3820  ASSERT((index >= 0) && (index < this->length()));
3821  double* ptr = static_cast<double*>(external_pointer());
3822  ptr[index] = value;
3823}
3824
3825
3826void* FixedTypedArrayBase::DataPtr() {
3827  return FIELD_ADDR(this, kDataOffset);
3828}
3829
3830
3831int FixedTypedArrayBase::DataSize(InstanceType type) {
3832  int element_size;
3833  switch (type) {
3834#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size)                       \
3835    case FIXED_##TYPE##_ARRAY_TYPE:                                           \
3836      element_size = size;                                                    \
3837      break;
3838
3839    TYPED_ARRAYS(TYPED_ARRAY_CASE)
3840#undef TYPED_ARRAY_CASE
3841    default:
3842      UNREACHABLE();
3843      return 0;
3844  }
3845  return length() * element_size;
3846}
3847
3848
3849int FixedTypedArrayBase::DataSize() {
3850  return DataSize(map()->instance_type());
3851}
3852
3853
3854int FixedTypedArrayBase::size() {
3855  return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
3856}
3857
3858
3859int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
3860  return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
3861}
3862
3863
3864uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
3865
3866
3867uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
3868
3869
3870int8_t Int8ArrayTraits::defaultValue() { return 0; }
3871
3872
3873uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
3874
3875
3876int16_t Int16ArrayTraits::defaultValue() { return 0; }
3877
3878
3879uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
3880
3881
3882int32_t Int32ArrayTraits::defaultValue() { return 0; }
3883
3884
3885float Float32ArrayTraits::defaultValue() {
3886  return static_cast<float>(OS::nan_value());
3887}
3888
3889
3890double Float64ArrayTraits::defaultValue() { return OS::nan_value(); }
3891
3892
3893template <class Traits>
3894typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
3895  ASSERT((index >= 0) && (index < this->length()));
3896  ElementType* ptr = reinterpret_cast<ElementType*>(
3897      FIELD_ADDR(this, kDataOffset));
3898  return ptr[index];
3899}
3900
3901
3902template<> inline
3903FixedTypedArray<Float64ArrayTraits>::ElementType
3904    FixedTypedArray<Float64ArrayTraits>::get_scalar(int index) {
3905  ASSERT((index >= 0) && (index < this->length()));
3906  return READ_DOUBLE_FIELD(this, ElementOffset(index));
3907}
3908
3909
3910template <class Traits>
3911void FixedTypedArray<Traits>::set(int index, ElementType value) {
3912  ASSERT((index >= 0) && (index < this->length()));
3913  ElementType* ptr = reinterpret_cast<ElementType*>(
3914      FIELD_ADDR(this, kDataOffset));
3915  ptr[index] = value;
3916}
3917
3918
3919template<> inline
3920void FixedTypedArray<Float64ArrayTraits>::set(
3921    int index, Float64ArrayTraits::ElementType value) {
3922  ASSERT((index >= 0) && (index < this->length()));
3923  WRITE_DOUBLE_FIELD(this, ElementOffset(index), value);
3924}
3925
3926
3927template <class Traits>
3928typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
3929  return static_cast<ElementType>(value);
3930}
3931
3932
3933template <> inline
3934uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
3935  if (value < 0) return 0;
3936  if (value > 0xFF) return 0xFF;
3937  return static_cast<uint8_t>(value);
3938}
3939
3940
3941template <class Traits>
3942typename Traits::ElementType FixedTypedArray<Traits>::from_double(
3943    double value) {
3944  return static_cast<ElementType>(DoubleToInt32(value));
3945}
3946
3947
3948template<> inline
3949uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
3950  if (value < 0) return 0;
3951  if (value > 0xFF) return 0xFF;
3952  return static_cast<uint8_t>(lrint(value));
3953}
3954
3955
3956template<> inline
3957float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
3958  return static_cast<float>(value);
3959}
3960
3961
3962template<> inline
3963double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
3964  return value;
3965}
3966
3967
3968template <class Traits>
3969Handle<Object> FixedTypedArray<Traits>::get(
3970    Handle<FixedTypedArray<Traits> > array,
3971    int index) {
3972  return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
3973}
3974
3975
3976template <class Traits>
3977Handle<Object> FixedTypedArray<Traits>::SetValue(
3978    Handle<FixedTypedArray<Traits> > array,
3979    uint32_t index,
3980    Handle<Object> value) {
3981  ElementType cast_value = Traits::defaultValue();
3982  if (index < static_cast<uint32_t>(array->length())) {
3983    if (value->IsSmi()) {
3984      int int_value = Handle<Smi>::cast(value)->value();
3985      cast_value = from_int(int_value);
3986    } else if (value->IsHeapNumber()) {
3987      double double_value = Handle<HeapNumber>::cast(value)->value();
3988      cast_value = from_double(double_value);
3989    } else {
3990      // Clamp undefined to the default value. All other types have been
3991      // converted to a number type further up in the call chain.
3992      ASSERT(value->IsUndefined());
3993    }
3994    array->set(index, cast_value);
3995  }
3996  return Traits::ToHandle(array->GetIsolate(), cast_value);
3997}
3998
3999
4000Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
4001  return handle(Smi::FromInt(scalar), isolate);
4002}
4003
4004
4005Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
4006                                                 uint8_t scalar) {
4007  return handle(Smi::FromInt(scalar), isolate);
4008}
4009
4010
4011Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4012  return handle(Smi::FromInt(scalar), isolate);
4013}
4014
4015
4016Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
4017  return handle(Smi::FromInt(scalar), isolate);
4018}
4019
4020
4021Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
4022  return handle(Smi::FromInt(scalar), isolate);
4023}
4024
4025
4026Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
4027  return isolate->factory()->NewNumberFromUint(scalar);
4028}
4029
4030
4031Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
4032  return isolate->factory()->NewNumberFromInt(scalar);
4033}
4034
4035
4036Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
4037  return isolate->factory()->NewNumber(scalar);
4038}
4039
4040
4041Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
4042  return isolate->factory()->NewNumber(scalar);
4043}
4044
4045
4046int Map::visitor_id() {
4047  return READ_BYTE_FIELD(this, kVisitorIdOffset);
4048}
4049
4050
4051void Map::set_visitor_id(int id) {
4052  ASSERT(0 <= id && id < 256);
4053  WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4054}
4055
4056
4057int Map::instance_size() {
4058  return NOBARRIER_READ_BYTE_FIELD(
4059      this, kInstanceSizeOffset) << kPointerSizeLog2;
4060}
4061
4062
4063int Map::inobject_properties() {
4064  return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
4065}
4066
4067
4068int Map::pre_allocated_property_fields() {
4069  return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
4070}
4071
4072
4073int Map::GetInObjectPropertyOffset(int index) {
4074  // Adjust for the number of properties stored in the object.
4075  index -= inobject_properties();
4076  ASSERT(index <= 0);
4077  return instance_size() + (index * kPointerSize);
4078}
4079
4080
4081int HeapObject::SizeFromMap(Map* map) {
4082  int instance_size = map->instance_size();
4083  if (instance_size != kVariableSizeSentinel) return instance_size;
4084  // Only inline the most frequent cases.
4085  InstanceType instance_type = map->instance_type();
4086  if (instance_type == FIXED_ARRAY_TYPE) {
4087    return FixedArray::BodyDescriptor::SizeOf(map, this);
4088  }
4089  if (instance_type == ASCII_STRING_TYPE ||
4090      instance_type == ASCII_INTERNALIZED_STRING_TYPE) {
4091    return SeqOneByteString::SizeFor(
4092        reinterpret_cast<SeqOneByteString*>(this)->length());
4093  }
4094  if (instance_type == BYTE_ARRAY_TYPE) {
4095    return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4096  }
4097  if (instance_type == FREE_SPACE_TYPE) {
4098    return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
4099  }
4100  if (instance_type == STRING_TYPE ||
4101      instance_type == INTERNALIZED_STRING_TYPE) {
4102    return SeqTwoByteString::SizeFor(
4103        reinterpret_cast<SeqTwoByteString*>(this)->length());
4104  }
4105  if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4106    return FixedDoubleArray::SizeFor(
4107        reinterpret_cast<FixedDoubleArray*>(this)->length());
4108  }
4109  if (instance_type == CONSTANT_POOL_ARRAY_TYPE) {
4110    return reinterpret_cast<ConstantPoolArray*>(this)->size();
4111  }
4112  if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4113      instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4114    return reinterpret_cast<FixedTypedArrayBase*>(
4115        this)->TypedArraySize(instance_type);
4116  }
4117  ASSERT(instance_type == CODE_TYPE);
4118  return reinterpret_cast<Code*>(this)->CodeSize();
4119}
4120
4121
4122void Map::set_instance_size(int value) {
4123  ASSERT_EQ(0, value & (kPointerSize - 1));
4124  value >>= kPointerSizeLog2;
4125  ASSERT(0 <= value && value < 256);
4126  NOBARRIER_WRITE_BYTE_FIELD(
4127      this, kInstanceSizeOffset, static_cast<byte>(value));
4128}
4129
4130
4131void Map::set_inobject_properties(int value) {
4132  ASSERT(0 <= value && value < 256);
4133  WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
4134}
4135
4136
4137void Map::set_pre_allocated_property_fields(int value) {
4138  ASSERT(0 <= value && value < 256);
4139  WRITE_BYTE_FIELD(this,
4140                   kPreAllocatedPropertyFieldsOffset,
4141                   static_cast<byte>(value));
4142}
4143
4144
4145InstanceType Map::instance_type() {
4146  return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4147}
4148
4149
4150void Map::set_instance_type(InstanceType value) {
4151  WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4152}
4153
4154
4155int Map::unused_property_fields() {
4156  return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4157}
4158
4159
4160void Map::set_unused_property_fields(int value) {
4161  WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4162}
4163
4164
4165byte Map::bit_field() {
4166  return READ_BYTE_FIELD(this, kBitFieldOffset);
4167}
4168
4169
4170void Map::set_bit_field(byte value) {
4171  WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4172}
4173
4174
4175byte Map::bit_field2() {
4176  return READ_BYTE_FIELD(this, kBitField2Offset);
4177}
4178
4179
4180void Map::set_bit_field2(byte value) {
4181  WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4182}
4183
4184
4185void Map::set_non_instance_prototype(bool value) {
4186  if (value) {
4187    set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4188  } else {
4189    set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4190  }
4191}
4192
4193
4194bool Map::has_non_instance_prototype() {
4195  return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4196}
4197
4198
4199void Map::set_function_with_prototype(bool value) {
4200  set_bit_field(FunctionWithPrototype::update(bit_field(), value));
4201}
4202
4203
4204bool Map::function_with_prototype() {
4205  return FunctionWithPrototype::decode(bit_field());
4206}
4207
4208
4209void Map::set_is_access_check_needed(bool access_check_needed) {
4210  if (access_check_needed) {
4211    set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4212  } else {
4213    set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4214  }
4215}
4216
4217
4218bool Map::is_access_check_needed() {
4219  return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4220}
4221
4222
4223void Map::set_is_extensible(bool value) {
4224  if (value) {
4225    set_bit_field2(bit_field2() | (1 << kIsExtensible));
4226  } else {
4227    set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4228  }
4229}
4230
4231bool Map::is_extensible() {
4232  return ((1 << kIsExtensible) & bit_field2()) != 0;
4233}
4234
4235
4236void Map::set_is_shared(bool value) {
4237  set_bit_field3(IsShared::update(bit_field3(), value));
4238}
4239
4240
4241bool Map::is_shared() {
4242  return IsShared::decode(bit_field3()); }
4243
4244
4245void Map::set_dictionary_map(bool value) {
4246  uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4247  new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4248  set_bit_field3(new_bit_field3);
4249}
4250
4251
4252bool Map::is_dictionary_map() {
4253  return DictionaryMap::decode(bit_field3());
4254}
4255
4256
4257Code::Flags Code::flags() {
4258  return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4259}
4260
4261
4262void Map::set_owns_descriptors(bool is_shared) {
4263  set_bit_field3(OwnsDescriptors::update(bit_field3(), is_shared));
4264}
4265
4266
4267bool Map::owns_descriptors() {
4268  return OwnsDescriptors::decode(bit_field3());
4269}
4270
4271
4272void Map::set_has_instance_call_handler() {
4273  set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
4274}
4275
4276
4277bool Map::has_instance_call_handler() {
4278  return HasInstanceCallHandler::decode(bit_field3());
4279}
4280
4281
4282void Map::deprecate() {
4283  set_bit_field3(Deprecated::update(bit_field3(), true));
4284}
4285
4286
4287bool Map::is_deprecated() {
4288  return Deprecated::decode(bit_field3());
4289}
4290
4291
4292void Map::set_migration_target(bool value) {
4293  set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4294}
4295
4296
4297bool Map::is_migration_target() {
4298  return IsMigrationTarget::decode(bit_field3());
4299}
4300
4301
4302void Map::set_done_inobject_slack_tracking(bool value) {
4303  set_bit_field3(DoneInobjectSlackTracking::update(bit_field3(), value));
4304}
4305
4306
4307bool Map::done_inobject_slack_tracking() {
4308  return DoneInobjectSlackTracking::decode(bit_field3());
4309}
4310
4311
4312void Map::set_construction_count(int value) {
4313  set_bit_field3(ConstructionCount::update(bit_field3(), value));
4314}
4315
4316
4317int Map::construction_count() {
4318  return ConstructionCount::decode(bit_field3());
4319}
4320
4321
4322void Map::freeze() {
4323  set_bit_field3(IsFrozen::update(bit_field3(), true));
4324}
4325
4326
4327bool Map::is_frozen() {
4328  return IsFrozen::decode(bit_field3());
4329}
4330
4331
4332void Map::mark_unstable() {
4333  set_bit_field3(IsUnstable::update(bit_field3(), true));
4334}
4335
4336
4337bool Map::is_stable() {
4338  return !IsUnstable::decode(bit_field3());
4339}
4340
4341
4342bool Map::has_code_cache() {
4343  return code_cache() != GetIsolate()->heap()->empty_fixed_array();
4344}
4345
4346
4347bool Map::CanBeDeprecated() {
4348  int descriptor = LastAdded();
4349  for (int i = 0; i <= descriptor; i++) {
4350    PropertyDetails details = instance_descriptors()->GetDetails(i);
4351    if (details.representation().IsNone()) return true;
4352    if (details.representation().IsSmi()) return true;
4353    if (details.representation().IsDouble()) return true;
4354    if (details.representation().IsHeapObject()) return true;
4355    if (details.type() == CONSTANT) return true;
4356  }
4357  return false;
4358}
4359
4360
4361void Map::NotifyLeafMapLayoutChange() {
4362  if (is_stable()) {
4363    mark_unstable();
4364    dependent_code()->DeoptimizeDependentCodeGroup(
4365        GetIsolate(),
4366        DependentCode::kPrototypeCheckGroup);
4367  }
4368}
4369
4370
4371bool Map::CanOmitMapChecks() {
4372  return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4373}
4374
4375
4376int DependentCode::number_of_entries(DependencyGroup group) {
4377  if (length() == 0) return 0;
4378  return Smi::cast(get(group))->value();
4379}
4380
4381
4382void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
4383  set(group, Smi::FromInt(value));
4384}
4385
4386
4387bool DependentCode::is_code_at(int i) {
4388  return get(kCodesStartIndex + i)->IsCode();
4389}
4390
4391Code* DependentCode::code_at(int i) {
4392  return Code::cast(get(kCodesStartIndex + i));
4393}
4394
4395
4396CompilationInfo* DependentCode::compilation_info_at(int i) {
4397  return reinterpret_cast<CompilationInfo*>(
4398      Foreign::cast(get(kCodesStartIndex + i))->foreign_address());
4399}
4400
4401
4402void DependentCode::set_object_at(int i, Object* object) {
4403  set(kCodesStartIndex + i, object);
4404}
4405
4406
4407Object* DependentCode::object_at(int i) {
4408  return get(kCodesStartIndex + i);
4409}
4410
4411
4412Object** DependentCode::slot_at(int i) {
4413  return RawFieldOfElementAt(kCodesStartIndex + i);
4414}
4415
4416
4417void DependentCode::clear_at(int i) {
4418  set_undefined(kCodesStartIndex + i);
4419}
4420
4421
4422void DependentCode::copy(int from, int to) {
4423  set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4424}
4425
4426
4427void DependentCode::ExtendGroup(DependencyGroup group) {
4428  GroupStartIndexes starts(this);
4429  for (int g = kGroupCount - 1; g > group; g--) {
4430    if (starts.at(g) < starts.at(g + 1)) {
4431      copy(starts.at(g), starts.at(g + 1));
4432    }
4433  }
4434}
4435
4436
4437void Code::set_flags(Code::Flags flags) {
4438  STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4439  WRITE_INT_FIELD(this, kFlagsOffset, flags);
4440}
4441
4442
4443Code::Kind Code::kind() {
4444  return ExtractKindFromFlags(flags());
4445}
4446
4447
4448InlineCacheState Code::ic_state() {
4449  InlineCacheState result = ExtractICStateFromFlags(flags());
4450  // Only allow uninitialized or debugger states for non-IC code
4451  // objects. This is used in the debugger to determine whether or not
4452  // a call to code object has been replaced with a debug break call.
4453  ASSERT(is_inline_cache_stub() ||
4454         result == UNINITIALIZED ||
4455         result == DEBUG_STUB);
4456  return result;
4457}
4458
4459
4460ExtraICState Code::extra_ic_state() {
4461  ASSERT(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
4462  return ExtractExtraICStateFromFlags(flags());
4463}
4464
4465
4466Code::StubType Code::type() {
4467  return ExtractTypeFromFlags(flags());
4468}
4469
4470
4471// For initialization.
4472void Code::set_raw_kind_specific_flags1(int value) {
4473  WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4474}
4475
4476
4477void Code::set_raw_kind_specific_flags2(int value) {
4478  WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4479}
4480
4481
4482inline bool Code::is_crankshafted() {
4483  return IsCrankshaftedField::decode(
4484      READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4485}
4486
4487
4488inline void Code::set_is_crankshafted(bool value) {
4489  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4490  int updated = IsCrankshaftedField::update(previous, value);
4491  WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4492}
4493
4494
4495int Code::major_key() {
4496  ASSERT(has_major_key());
4497  return StubMajorKeyField::decode(
4498      READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4499}
4500
4501
4502void Code::set_major_key(int major) {
4503  ASSERT(has_major_key());
4504  ASSERT(0 <= major && major < 256);
4505  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4506  int updated = StubMajorKeyField::update(previous, major);
4507  WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4508}
4509
4510
4511bool Code::has_major_key() {
4512  return kind() == STUB ||
4513      kind() == HANDLER ||
4514      kind() == BINARY_OP_IC ||
4515      kind() == COMPARE_IC ||
4516      kind() == COMPARE_NIL_IC ||
4517      kind() == LOAD_IC ||
4518      kind() == KEYED_LOAD_IC ||
4519      kind() == STORE_IC ||
4520      kind() == CALL_IC ||
4521      kind() == KEYED_STORE_IC ||
4522      kind() == TO_BOOLEAN_IC;
4523}
4524
4525
4526bool Code::optimizable() {
4527  ASSERT_EQ(FUNCTION, kind());
4528  return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
4529}
4530
4531
4532void Code::set_optimizable(bool value) {
4533  ASSERT_EQ(FUNCTION, kind());
4534  WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
4535}
4536
4537
4538bool Code::has_deoptimization_support() {
4539  ASSERT_EQ(FUNCTION, kind());
4540  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4541  return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
4542}
4543
4544
4545void Code::set_has_deoptimization_support(bool value) {
4546  ASSERT_EQ(FUNCTION, kind());
4547  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4548  flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
4549  WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4550}
4551
4552
4553bool Code::has_debug_break_slots() {
4554  ASSERT_EQ(FUNCTION, kind());
4555  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4556  return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
4557}
4558
4559
4560void Code::set_has_debug_break_slots(bool value) {
4561  ASSERT_EQ(FUNCTION, kind());
4562  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4563  flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
4564  WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4565}
4566
4567
4568bool Code::is_compiled_optimizable() {
4569  ASSERT_EQ(FUNCTION, kind());
4570  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4571  return FullCodeFlagsIsCompiledOptimizable::decode(flags);
4572}
4573
4574
4575void Code::set_compiled_optimizable(bool value) {
4576  ASSERT_EQ(FUNCTION, kind());
4577  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4578  flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
4579  WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4580}
4581
4582
4583int Code::allow_osr_at_loop_nesting_level() {
4584  ASSERT_EQ(FUNCTION, kind());
4585  return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
4586}
4587
4588
4589void Code::set_allow_osr_at_loop_nesting_level(int level) {
4590  ASSERT_EQ(FUNCTION, kind());
4591  ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
4592  WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
4593}
4594
4595
4596int Code::profiler_ticks() {
4597  ASSERT_EQ(FUNCTION, kind());
4598  return READ_BYTE_FIELD(this, kProfilerTicksOffset);
4599}
4600
4601
4602void Code::set_profiler_ticks(int ticks) {
4603  ASSERT_EQ(FUNCTION, kind());
4604  ASSERT(ticks < 256);
4605  WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
4606}
4607
4608
4609unsigned Code::stack_slots() {
4610  ASSERT(is_crankshafted());
4611  return StackSlotsField::decode(
4612      READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4613}
4614
4615
4616void Code::set_stack_slots(unsigned slots) {
4617  CHECK(slots <= (1 << kStackSlotsBitCount));
4618  ASSERT(is_crankshafted());
4619  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4620  int updated = StackSlotsField::update(previous, slots);
4621  WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4622}
4623
4624
4625unsigned Code::safepoint_table_offset() {
4626  ASSERT(is_crankshafted());
4627  return SafepointTableOffsetField::decode(
4628      READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4629}
4630
4631
4632void Code::set_safepoint_table_offset(unsigned offset) {
4633  CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
4634  ASSERT(is_crankshafted());
4635  ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
4636  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4637  int updated = SafepointTableOffsetField::update(previous, offset);
4638  WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4639}
4640
4641
4642unsigned Code::back_edge_table_offset() {
4643  ASSERT_EQ(FUNCTION, kind());
4644  return BackEdgeTableOffsetField::decode(
4645      READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4646}
4647
4648
4649void Code::set_back_edge_table_offset(unsigned offset) {
4650  ASSERT_EQ(FUNCTION, kind());
4651  ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
4652  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4653  int updated = BackEdgeTableOffsetField::update(previous, offset);
4654  WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4655}
4656
4657
4658bool Code::back_edges_patched_for_osr() {
4659  ASSERT_EQ(FUNCTION, kind());
4660  return BackEdgesPatchedForOSRField::decode(
4661      READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4662}
4663
4664
4665void Code::set_back_edges_patched_for_osr(bool value) {
4666  ASSERT_EQ(FUNCTION, kind());
4667  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4668  int updated = BackEdgesPatchedForOSRField::update(previous, value);
4669  WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4670}
4671
4672
4673
4674byte Code::to_boolean_state() {
4675  return extra_ic_state();
4676}
4677
4678
4679bool Code::has_function_cache() {
4680  ASSERT(kind() == STUB);
4681  return HasFunctionCacheField::decode(
4682      READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4683}
4684
4685
4686void Code::set_has_function_cache(bool flag) {
4687  ASSERT(kind() == STUB);
4688  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4689  int updated = HasFunctionCacheField::update(previous, flag);
4690  WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4691}
4692
4693
4694bool Code::marked_for_deoptimization() {
4695  ASSERT(kind() == OPTIMIZED_FUNCTION);
4696  return MarkedForDeoptimizationField::decode(
4697      READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4698}
4699
4700
4701void Code::set_marked_for_deoptimization(bool flag) {
4702  ASSERT(kind() == OPTIMIZED_FUNCTION);
4703  ASSERT(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
4704  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4705  int updated = MarkedForDeoptimizationField::update(previous, flag);
4706  WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4707}
4708
4709
4710bool Code::is_weak_stub() {
4711  return CanBeWeakStub() && WeakStubField::decode(
4712      READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4713}
4714
4715
4716void Code::mark_as_weak_stub() {
4717  ASSERT(CanBeWeakStub());
4718  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4719  int updated = WeakStubField::update(previous, true);
4720  WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4721}
4722
4723
4724bool Code::is_invalidated_weak_stub() {
4725  return is_weak_stub() && InvalidatedWeakStubField::decode(
4726      READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4727}
4728
4729
4730void Code::mark_as_invalidated_weak_stub() {
4731  ASSERT(is_inline_cache_stub());
4732  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4733  int updated = InvalidatedWeakStubField::update(previous, true);
4734  WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4735}
4736
4737
4738bool Code::is_inline_cache_stub() {
4739  Kind kind = this->kind();
4740  switch (kind) {
4741#define CASE(name) case name: return true;
4742    IC_KIND_LIST(CASE)
4743#undef CASE
4744    default: return false;
4745  }
4746}
4747
4748
4749bool Code::is_keyed_stub() {
4750  return is_keyed_load_stub() || is_keyed_store_stub();
4751}
4752
4753
4754bool Code::is_debug_stub() {
4755  return ic_state() == DEBUG_STUB;
4756}
4757
4758
4759ConstantPoolArray* Code::constant_pool() {
4760  return ConstantPoolArray::cast(READ_FIELD(this, kConstantPoolOffset));
4761}
4762
4763
4764void Code::set_constant_pool(Object* value) {
4765  ASSERT(value->IsConstantPoolArray());
4766  WRITE_FIELD(this, kConstantPoolOffset, value);
4767  WRITE_BARRIER(GetHeap(), this, kConstantPoolOffset, value);
4768}
4769
4770
4771Code::Flags Code::ComputeFlags(Kind kind,
4772                               InlineCacheState ic_state,
4773                               ExtraICState extra_ic_state,
4774                               StubType type,
4775                               InlineCacheHolderFlag holder) {
4776  // Compute the bit mask.
4777  unsigned int bits = KindField::encode(kind)
4778      | ICStateField::encode(ic_state)
4779      | TypeField::encode(type)
4780      | ExtraICStateField::encode(extra_ic_state)
4781      | CacheHolderField::encode(holder);
4782  return static_cast<Flags>(bits);
4783}
4784
4785
4786Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
4787                                          ExtraICState extra_ic_state,
4788                                          InlineCacheHolderFlag holder,
4789                                          StubType type) {
4790  return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, holder);
4791}
4792
4793
4794Code::Flags Code::ComputeHandlerFlags(Kind handler_kind,
4795                                      StubType type,
4796                                      InlineCacheHolderFlag holder) {
4797  return ComputeFlags(Code::HANDLER, MONOMORPHIC, handler_kind, type, holder);
4798}
4799
4800
4801Code::Kind Code::ExtractKindFromFlags(Flags flags) {
4802  return KindField::decode(flags);
4803}
4804
4805
4806InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
4807  return ICStateField::decode(flags);
4808}
4809
4810
4811ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
4812  return ExtraICStateField::decode(flags);
4813}
4814
4815
4816Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
4817  return TypeField::decode(flags);
4818}
4819
4820
4821InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
4822  return CacheHolderField::decode(flags);
4823}
4824
4825
4826Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
4827  int bits = flags & ~TypeField::kMask;
4828  return static_cast<Flags>(bits);
4829}
4830
4831
4832Code* Code::GetCodeFromTargetAddress(Address address) {
4833  HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
4834  // GetCodeFromTargetAddress might be called when marking objects during mark
4835  // sweep. reinterpret_cast is therefore used instead of the more appropriate
4836  // Code::cast. Code::cast does not work when the object's map is
4837  // marked.
4838  Code* result = reinterpret_cast<Code*>(code);
4839  return result;
4840}
4841
4842
4843Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
4844  return HeapObject::
4845      FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
4846}
4847
4848
4849bool Code::IsWeakObjectInOptimizedCode(Object* object) {
4850  if (!FLAG_collect_maps) return false;
4851  if (object->IsMap()) {
4852    return Map::cast(object)->CanTransition() &&
4853           FLAG_weak_embedded_maps_in_optimized_code;
4854  }
4855  if (object->IsJSObject() ||
4856      (object->IsCell() && Cell::cast(object)->value()->IsJSObject())) {
4857    return FLAG_weak_embedded_objects_in_optimized_code;
4858  }
4859  return false;
4860}
4861
4862
4863class Code::FindAndReplacePattern {
4864 public:
4865  FindAndReplacePattern() : count_(0) { }
4866  void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) {
4867    ASSERT(count_ < kMaxCount);
4868    find_[count_] = map_to_find;
4869    replace_[count_] = obj_to_replace;
4870    ++count_;
4871  }
4872 private:
4873  static const int kMaxCount = 4;
4874  int count_;
4875  Handle<Map> find_[kMaxCount];
4876  Handle<Object> replace_[kMaxCount];
4877  friend class Code;
4878};
4879
4880
4881bool Code::IsWeakObjectInIC(Object* object) {
4882  return object->IsMap() && Map::cast(object)->CanTransition() &&
4883         FLAG_collect_maps &&
4884         FLAG_weak_embedded_maps_in_ic;
4885}
4886
4887
4888Object* Map::prototype() {
4889  return READ_FIELD(this, kPrototypeOffset);
4890}
4891
4892
4893void Map::set_prototype(Object* value, WriteBarrierMode mode) {
4894  ASSERT(value->IsNull() || value->IsJSReceiver());
4895  WRITE_FIELD(this, kPrototypeOffset, value);
4896  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
4897}
4898
4899
4900// If the descriptor is using the empty transition array, install a new empty
4901// transition array that will have place for an element transition.
4902static void EnsureHasTransitionArray(Handle<Map> map) {
4903  Handle<TransitionArray> transitions;
4904  if (!map->HasTransitionArray()) {
4905    transitions = TransitionArray::Allocate(map->GetIsolate(), 0);
4906    transitions->set_back_pointer_storage(map->GetBackPointer());
4907  } else if (!map->transitions()->IsFullTransitionArray()) {
4908    transitions = TransitionArray::ExtendToFullTransitionArray(map);
4909  } else {
4910    return;
4911  }
4912  map->set_transitions(*transitions);
4913}
4914
4915
4916void Map::InitializeDescriptors(DescriptorArray* descriptors) {
4917  int len = descriptors->number_of_descriptors();
4918  set_instance_descriptors(descriptors);
4919  SetNumberOfOwnDescriptors(len);
4920}
4921
4922
4923ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
4924
4925
4926void Map::set_bit_field3(uint32_t bits) {
4927  if (kInt32Size != kPointerSize) {
4928    WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
4929  }
4930  WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
4931}
4932
4933
4934uint32_t Map::bit_field3() {
4935  return READ_UINT32_FIELD(this, kBitField3Offset);
4936}
4937
4938
4939void Map::AppendDescriptor(Descriptor* desc) {
4940  DescriptorArray* descriptors = instance_descriptors();
4941  int number_of_own_descriptors = NumberOfOwnDescriptors();
4942  ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors);
4943  descriptors->Append(desc);
4944  SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
4945}
4946
4947
4948Object* Map::GetBackPointer() {
4949  Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
4950  if (object->IsDescriptorArray()) {
4951    return TransitionArray::cast(object)->back_pointer_storage();
4952  } else {
4953    ASSERT(object->IsMap() || object->IsUndefined());
4954    return object;
4955  }
4956}
4957
4958
4959bool Map::HasElementsTransition() {
4960  return HasTransitionArray() && transitions()->HasElementsTransition();
4961}
4962
4963
4964bool Map::HasTransitionArray() {
4965  Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
4966  return object->IsTransitionArray();
4967}
4968
4969
4970Map* Map::elements_transition_map() {
4971  int index = transitions()->Search(GetHeap()->elements_transition_symbol());
4972  return transitions()->GetTarget(index);
4973}
4974
4975
4976bool Map::CanHaveMoreTransitions() {
4977  if (!HasTransitionArray()) return true;
4978  return FixedArray::SizeFor(transitions()->length() +
4979                             TransitionArray::kTransitionSize)
4980      <= Page::kMaxRegularHeapObjectSize;
4981}
4982
4983
4984Map* Map::GetTransition(int transition_index) {
4985  return transitions()->GetTarget(transition_index);
4986}
4987
4988
4989int Map::SearchTransition(Name* name) {
4990  if (HasTransitionArray()) return transitions()->Search(name);
4991  return TransitionArray::kNotFound;
4992}
4993
4994
4995FixedArray* Map::GetPrototypeTransitions() {
4996  if (!HasTransitionArray()) return GetHeap()->empty_fixed_array();
4997  if (!transitions()->HasPrototypeTransitions()) {
4998    return GetHeap()->empty_fixed_array();
4999  }
5000  return transitions()->GetPrototypeTransitions();
5001}
5002
5003
5004void Map::SetPrototypeTransitions(
5005    Handle<Map> map, Handle<FixedArray> proto_transitions) {
5006  EnsureHasTransitionArray(map);
5007  int old_number_of_transitions = map->NumberOfProtoTransitions();
5008#ifdef DEBUG
5009  if (map->HasPrototypeTransitions()) {
5010    ASSERT(map->GetPrototypeTransitions() != *proto_transitions);
5011    map->ZapPrototypeTransitions();
5012  }
5013#endif
5014  map->transitions()->SetPrototypeTransitions(*proto_transitions);
5015  map->SetNumberOfProtoTransitions(old_number_of_transitions);
5016}
5017
5018
5019bool Map::HasPrototypeTransitions() {
5020  return HasTransitionArray() && transitions()->HasPrototypeTransitions();
5021}
5022
5023
5024TransitionArray* Map::transitions() {
5025  ASSERT(HasTransitionArray());
5026  Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5027  return TransitionArray::cast(object);
5028}
5029
5030
5031void Map::set_transitions(TransitionArray* transition_array,
5032                          WriteBarrierMode mode) {
5033  // Transition arrays are not shared. When one is replaced, it should not
5034  // keep referenced objects alive, so we zap it.
5035  // When there is another reference to the array somewhere (e.g. a handle),
5036  // not zapping turns from a waste of memory into a source of crashes.
5037  if (HasTransitionArray()) {
5038#ifdef DEBUG
5039    for (int i = 0; i < transitions()->number_of_transitions(); i++) {
5040      Map* target = transitions()->GetTarget(i);
5041      if (target->instance_descriptors() == instance_descriptors()) {
5042        Name* key = transitions()->GetKey(i);
5043        int new_target_index = transition_array->Search(key);
5044        ASSERT(new_target_index != TransitionArray::kNotFound);
5045        ASSERT(transition_array->GetTarget(new_target_index) == target);
5046      }
5047    }
5048#endif
5049    ASSERT(transitions() != transition_array);
5050    ZapTransitions();
5051  }
5052
5053  WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
5054  CONDITIONAL_WRITE_BARRIER(
5055      GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
5056}
5057
5058
5059void Map::init_back_pointer(Object* undefined) {
5060  ASSERT(undefined->IsUndefined());
5061  WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
5062}
5063
5064
5065void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
5066  ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE);
5067  ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
5068         (value->IsMap() && GetBackPointer()->IsUndefined()));
5069  Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5070  if (object->IsTransitionArray()) {
5071    TransitionArray::cast(object)->set_back_pointer_storage(value);
5072  } else {
5073    WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value);
5074    CONDITIONAL_WRITE_BARRIER(
5075        GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
5076  }
5077}
5078
5079
5080ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
5081ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
5082ACCESSORS(Map, constructor, Object, kConstructorOffset)
5083
5084ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5085ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
5086ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5087
5088ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
5089ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
5090ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
5091ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
5092
5093ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5094ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
5095
5096ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5097ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
5098ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5099          kExpectedReceiverTypeOffset)
5100
5101ACCESSORS(DeclaredAccessorDescriptor, serialized_data, ByteArray,
5102          kSerializedDataOffset)
5103
5104ACCESSORS(DeclaredAccessorInfo, descriptor, DeclaredAccessorDescriptor,
5105          kDescriptorOffset)
5106
5107ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
5108ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
5109ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
5110
5111ACCESSORS(Box, value, Object, kValueOffset)
5112
5113ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5114ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5115ACCESSORS_TO_SMI(AccessorPair, access_flags, kAccessFlagsOffset)
5116
5117ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
5118ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
5119ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5120
5121ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5122ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5123ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5124ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5125ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5126ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5127
5128ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5129ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5130
5131ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5132ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5133ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5134
5135ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
5136ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5137ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5138          kPrototypeTemplateOffset)
5139ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5140ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5141          kNamedPropertyHandlerOffset)
5142ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5143          kIndexedPropertyHandlerOffset)
5144ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5145          kInstanceTemplateOffset)
5146ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5147ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5148ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5149          kInstanceCallHandlerOffset)
5150ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5151          kAccessCheckInfoOffset)
5152ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
5153
5154ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5155ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
5156          kInternalFieldCountOffset)
5157
5158ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
5159ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
5160
5161ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
5162
5163ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5164ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5165ACCESSORS_TO_SMI(AllocationSite, pretenure_data, kPretenureDataOffset)
5166ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count,
5167                 kPretenureCreateCountOffset)
5168ACCESSORS(AllocationSite, dependent_code, DependentCode,
5169          kDependentCodeOffset)
5170ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5171ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5172
5173ACCESSORS(Script, source, Object, kSourceOffset)
5174ACCESSORS(Script, name, Object, kNameOffset)
5175ACCESSORS(Script, id, Smi, kIdOffset)
5176ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
5177ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
5178ACCESSORS(Script, context_data, Object, kContextOffset)
5179ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
5180ACCESSORS_TO_SMI(Script, type, kTypeOffset)
5181ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5182ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
5183ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
5184                 kEvalFrominstructionsOffsetOffset)
5185ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
5186BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit)
5187
5188Script::CompilationType Script::compilation_type() {
5189  return BooleanBit::get(flags(), kCompilationTypeBit) ?
5190      COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5191}
5192void Script::set_compilation_type(CompilationType type) {
5193  set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5194      type == COMPILATION_TYPE_EVAL));
5195}
5196Script::CompilationState Script::compilation_state() {
5197  return BooleanBit::get(flags(), kCompilationStateBit) ?
5198      COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5199}
5200void Script::set_compilation_state(CompilationState state) {
5201  set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5202      state == COMPILATION_STATE_COMPILED));
5203}
5204
5205
5206ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5207ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
5208ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
5209ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5210
5211ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
5212ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
5213ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
5214ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5215
5216ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5217ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
5218                 kOptimizedCodeMapOffset)
5219ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5220ACCESSORS(SharedFunctionInfo, feedback_vector, FixedArray,
5221          kFeedbackVectorOffset)
5222ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5223          kInstanceClassNameOffset)
5224ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5225ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5226ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5227ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
5228
5229
5230SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5231BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5232               kHiddenPrototypeBit)
5233BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5234BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5235               kNeedsAccessCheckBit)
5236BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5237               kReadOnlyPrototypeBit)
5238BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5239               kRemovePrototypeBit)
5240BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5241               kDoNotCacheBit)
5242BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
5243               kIsExpressionBit)
5244BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5245               kIsTopLevelBit)
5246
5247BOOL_ACCESSORS(SharedFunctionInfo,
5248               compiler_hints,
5249               allows_lazy_compilation,
5250               kAllowLazyCompilation)
5251BOOL_ACCESSORS(SharedFunctionInfo,
5252               compiler_hints,
5253               allows_lazy_compilation_without_context,
5254               kAllowLazyCompilationWithoutContext)
5255BOOL_ACCESSORS(SharedFunctionInfo,
5256               compiler_hints,
5257               uses_arguments,
5258               kUsesArguments)
5259BOOL_ACCESSORS(SharedFunctionInfo,
5260               compiler_hints,
5261               has_duplicate_parameters,
5262               kHasDuplicateParameters)
5263
5264
5265#if V8_HOST_ARCH_32_BIT
5266SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5267SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
5268              kFormalParameterCountOffset)
5269SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5270              kExpectedNofPropertiesOffset)
5271SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5272SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5273              kStartPositionAndTypeOffset)
5274SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5275SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5276              kFunctionTokenPositionOffset)
5277SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5278              kCompilerHintsOffset)
5279SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5280              kOptCountAndBailoutReasonOffset)
5281SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5282SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5283SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
5284
5285#else
5286
5287#define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset)             \
5288  STATIC_ASSERT(holder::offset % kPointerSize == 0);              \
5289  int holder::name() {                                            \
5290    int value = READ_INT_FIELD(this, offset);                     \
5291    ASSERT(kHeapObjectTag == 1);                                  \
5292    ASSERT((value & kHeapObjectTag) == 0);                        \
5293    return value >> 1;                                            \
5294  }                                                               \
5295  void holder::set_##name(int value) {                            \
5296    ASSERT(kHeapObjectTag == 1);                                  \
5297    ASSERT((value & 0xC0000000) == 0xC0000000 ||                  \
5298           (value & 0xC0000000) == 0x0);                          \
5299    WRITE_INT_FIELD(this,                                         \
5300                    offset,                                       \
5301                    (value << 1) & ~kHeapObjectTag);              \
5302  }
5303
5304#define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset)             \
5305  STATIC_ASSERT(holder::offset % kPointerSize == kIntSize);       \
5306  INT_ACCESSORS(holder, name, offset)
5307
5308
5309PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5310PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5311                        formal_parameter_count,
5312                        kFormalParameterCountOffset)
5313
5314PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5315                        expected_nof_properties,
5316                        kExpectedNofPropertiesOffset)
5317PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5318
5319PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5320PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5321                        start_position_and_type,
5322                        kStartPositionAndTypeOffset)
5323
5324PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5325                        function_token_position,
5326                        kFunctionTokenPositionOffset)
5327PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5328                        compiler_hints,
5329                        kCompilerHintsOffset)
5330
5331PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5332                        opt_count_and_bailout_reason,
5333                        kOptCountAndBailoutReasonOffset)
5334PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5335
5336PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5337                        ast_node_count,
5338                        kAstNodeCountOffset)
5339PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5340                        profiler_ticks,
5341                        kProfilerTicksOffset)
5342
5343#endif
5344
5345
5346BOOL_GETTER(SharedFunctionInfo,
5347            compiler_hints,
5348            optimization_disabled,
5349            kOptimizationDisabled)
5350
5351
5352void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5353  set_compiler_hints(BooleanBit::set(compiler_hints(),
5354                                     kOptimizationDisabled,
5355                                     disable));
5356  // If disabling optimizations we reflect that in the code object so
5357  // it will not be counted as optimizable code.
5358  if ((code()->kind() == Code::FUNCTION) && disable) {
5359    code()->set_optimizable(false);
5360  }
5361}
5362
5363
5364StrictMode SharedFunctionInfo::strict_mode() {
5365  return BooleanBit::get(compiler_hints(), kStrictModeFunction)
5366      ? STRICT : SLOPPY;
5367}
5368
5369
5370void SharedFunctionInfo::set_strict_mode(StrictMode strict_mode) {
5371  // We only allow mode transitions from sloppy to strict.
5372  ASSERT(this->strict_mode() == SLOPPY || this->strict_mode() == strict_mode);
5373  int hints = compiler_hints();
5374  hints = BooleanBit::set(hints, kStrictModeFunction, strict_mode == STRICT);
5375  set_compiler_hints(hints);
5376}
5377
5378
5379BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5380BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, inline_builtin,
5381               kInlineBuiltin)
5382BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
5383               name_should_print_as_anonymous,
5384               kNameShouldPrintAsAnonymous)
5385BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
5386BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
5387BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5388BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize,
5389               kDontOptimize)
5390BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
5391BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
5392BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5393BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5394
5395ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
5396ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
5397
5398ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
5399
5400bool Script::HasValidSource() {
5401  Object* src = this->source();
5402  if (!src->IsString()) return true;
5403  String* src_str = String::cast(src);
5404  if (!StringShape(src_str).IsExternal()) return true;
5405  if (src_str->IsOneByteRepresentation()) {
5406    return ExternalAsciiString::cast(src)->resource() != NULL;
5407  } else if (src_str->IsTwoByteRepresentation()) {
5408    return ExternalTwoByteString::cast(src)->resource() != NULL;
5409  }
5410  return true;
5411}
5412
5413
5414void SharedFunctionInfo::DontAdaptArguments() {
5415  ASSERT(code()->kind() == Code::BUILTIN);
5416  set_formal_parameter_count(kDontAdaptArgumentsSentinel);
5417}
5418
5419
5420int SharedFunctionInfo::start_position() {
5421  return start_position_and_type() >> kStartPositionShift;
5422}
5423
5424
5425void SharedFunctionInfo::set_start_position(int start_position) {
5426  set_start_position_and_type((start_position << kStartPositionShift)
5427    | (start_position_and_type() & ~kStartPositionMask));
5428}
5429
5430
5431Code* SharedFunctionInfo::code() {
5432  return Code::cast(READ_FIELD(this, kCodeOffset));
5433}
5434
5435
5436void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
5437  ASSERT(value->kind() != Code::OPTIMIZED_FUNCTION);
5438  WRITE_FIELD(this, kCodeOffset, value);
5439  CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
5440}
5441
5442
5443void SharedFunctionInfo::ReplaceCode(Code* value) {
5444  // If the GC metadata field is already used then the function was
5445  // enqueued as a code flushing candidate and we remove it now.
5446  if (code()->gc_metadata() != NULL) {
5447    CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
5448    flusher->EvictCandidate(this);
5449  }
5450
5451  ASSERT(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
5452
5453  set_code(value);
5454}
5455
5456
5457ScopeInfo* SharedFunctionInfo::scope_info() {
5458  return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
5459}
5460
5461
5462void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5463                                        WriteBarrierMode mode) {
5464  WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
5465  CONDITIONAL_WRITE_BARRIER(GetHeap(),
5466                            this,
5467                            kScopeInfoOffset,
5468                            reinterpret_cast<Object*>(value),
5469                            mode);
5470}
5471
5472
5473bool SharedFunctionInfo::is_compiled() {
5474  return code() !=
5475      GetIsolate()->builtins()->builtin(Builtins::kCompileUnoptimized);
5476}
5477
5478
5479bool SharedFunctionInfo::IsApiFunction() {
5480  return function_data()->IsFunctionTemplateInfo();
5481}
5482
5483
5484FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
5485  ASSERT(IsApiFunction());
5486  return FunctionTemplateInfo::cast(function_data());
5487}
5488
5489
5490bool SharedFunctionInfo::HasBuiltinFunctionId() {
5491  return function_data()->IsSmi();
5492}
5493
5494
5495BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
5496  ASSERT(HasBuiltinFunctionId());
5497  return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
5498}
5499
5500
5501int SharedFunctionInfo::ic_age() {
5502  return ICAgeBits::decode(counters());
5503}
5504
5505
5506void SharedFunctionInfo::set_ic_age(int ic_age) {
5507  set_counters(ICAgeBits::update(counters(), ic_age));
5508}
5509
5510
5511int SharedFunctionInfo::deopt_count() {
5512  return DeoptCountBits::decode(counters());
5513}
5514
5515
5516void SharedFunctionInfo::set_deopt_count(int deopt_count) {
5517  set_counters(DeoptCountBits::update(counters(), deopt_count));
5518}
5519
5520
5521void SharedFunctionInfo::increment_deopt_count() {
5522  int value = counters();
5523  int deopt_count = DeoptCountBits::decode(value);
5524  deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
5525  set_counters(DeoptCountBits::update(value, deopt_count));
5526}
5527
5528
5529int SharedFunctionInfo::opt_reenable_tries() {
5530  return OptReenableTriesBits::decode(counters());
5531}
5532
5533
5534void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
5535  set_counters(OptReenableTriesBits::update(counters(), tries));
5536}
5537
5538
5539int SharedFunctionInfo::opt_count() {
5540  return OptCountBits::decode(opt_count_and_bailout_reason());
5541}
5542
5543
5544void SharedFunctionInfo::set_opt_count(int opt_count) {
5545  set_opt_count_and_bailout_reason(
5546      OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
5547}
5548
5549
5550BailoutReason SharedFunctionInfo::DisableOptimizationReason() {
5551  BailoutReason reason = static_cast<BailoutReason>(
5552      DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
5553  return reason;
5554}
5555
5556
5557bool SharedFunctionInfo::has_deoptimization_support() {
5558  Code* code = this->code();
5559  return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
5560}
5561
5562
5563void SharedFunctionInfo::TryReenableOptimization() {
5564  int tries = opt_reenable_tries();
5565  set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
5566  // We reenable optimization whenever the number of tries is a large
5567  // enough power of 2.
5568  if (tries >= 16 && (((tries - 1) & tries) == 0)) {
5569    set_optimization_disabled(false);
5570    set_opt_count(0);
5571    set_deopt_count(0);
5572    code()->set_optimizable(true);
5573  }
5574}
5575
5576
5577bool JSFunction::IsBuiltin() {
5578  return context()->global_object()->IsJSBuiltinsObject();
5579}
5580
5581
5582bool JSFunction::IsNative() {
5583  Object* script = shared()->script();
5584  bool native = script->IsScript() &&
5585                Script::cast(script)->type()->value() == Script::TYPE_NATIVE;
5586  ASSERT(!IsBuiltin() || native);  // All builtins are also native.
5587  return native;
5588}
5589
5590
5591bool JSFunction::NeedsArgumentsAdaption() {
5592  return shared()->formal_parameter_count() !=
5593      SharedFunctionInfo::kDontAdaptArgumentsSentinel;
5594}
5595
5596
5597bool JSFunction::IsOptimized() {
5598  return code()->kind() == Code::OPTIMIZED_FUNCTION;
5599}
5600
5601
5602bool JSFunction::IsOptimizable() {
5603  return code()->kind() == Code::FUNCTION && code()->optimizable();
5604}
5605
5606
5607bool JSFunction::IsMarkedForOptimization() {
5608  return code() == GetIsolate()->builtins()->builtin(
5609      Builtins::kCompileOptimized);
5610}
5611
5612
5613bool JSFunction::IsMarkedForConcurrentOptimization() {
5614  return code() == GetIsolate()->builtins()->builtin(
5615      Builtins::kCompileOptimizedConcurrent);
5616}
5617
5618
5619bool JSFunction::IsInOptimizationQueue() {
5620  return code() == GetIsolate()->builtins()->builtin(
5621      Builtins::kInOptimizationQueue);
5622}
5623
5624
5625bool JSFunction::IsInobjectSlackTrackingInProgress() {
5626  return has_initial_map() &&
5627      initial_map()->construction_count() != JSFunction::kNoSlackTracking;
5628}
5629
5630
5631Code* JSFunction::code() {
5632  return Code::cast(
5633      Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
5634}
5635
5636
5637void JSFunction::set_code(Code* value) {
5638  ASSERT(!GetHeap()->InNewSpace(value));
5639  Address entry = value->entry();
5640  WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5641  GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
5642      this,
5643      HeapObject::RawField(this, kCodeEntryOffset),
5644      value);
5645}
5646
5647
5648void JSFunction::set_code_no_write_barrier(Code* value) {
5649  ASSERT(!GetHeap()->InNewSpace(value));
5650  Address entry = value->entry();
5651  WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5652}
5653
5654
5655void JSFunction::ReplaceCode(Code* code) {
5656  bool was_optimized = IsOptimized();
5657  bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
5658
5659  if (was_optimized && is_optimized) {
5660    shared()->EvictFromOptimizedCodeMap(this->code(),
5661        "Replacing with another optimized code");
5662  }
5663
5664  set_code(code);
5665
5666  // Add/remove the function from the list of optimized functions for this
5667  // context based on the state change.
5668  if (!was_optimized && is_optimized) {
5669    context()->native_context()->AddOptimizedFunction(this);
5670  }
5671  if (was_optimized && !is_optimized) {
5672    // TODO(titzer): linear in the number of optimized functions; fix!
5673    context()->native_context()->RemoveOptimizedFunction(this);
5674  }
5675}
5676
5677
5678Context* JSFunction::context() {
5679  return Context::cast(READ_FIELD(this, kContextOffset));
5680}
5681
5682
5683void JSFunction::set_context(Object* value) {
5684  ASSERT(value->IsUndefined() || value->IsContext());
5685  WRITE_FIELD(this, kContextOffset, value);
5686  WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
5687}
5688
5689ACCESSORS(JSFunction, prototype_or_initial_map, Object,
5690          kPrototypeOrInitialMapOffset)
5691
5692
5693Map* JSFunction::initial_map() {
5694  return Map::cast(prototype_or_initial_map());
5695}
5696
5697
5698void JSFunction::set_initial_map(Map* value) {
5699  set_prototype_or_initial_map(value);
5700}
5701
5702
5703bool JSFunction::has_initial_map() {
5704  return prototype_or_initial_map()->IsMap();
5705}
5706
5707
5708bool JSFunction::has_instance_prototype() {
5709  return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
5710}
5711
5712
5713bool JSFunction::has_prototype() {
5714  return map()->has_non_instance_prototype() || has_instance_prototype();
5715}
5716
5717
5718Object* JSFunction::instance_prototype() {
5719  ASSERT(has_instance_prototype());
5720  if (has_initial_map()) return initial_map()->prototype();
5721  // When there is no initial map and the prototype is a JSObject, the
5722  // initial map field is used for the prototype field.
5723  return prototype_or_initial_map();
5724}
5725
5726
5727Object* JSFunction::prototype() {
5728  ASSERT(has_prototype());
5729  // If the function's prototype property has been set to a non-JSObject
5730  // value, that value is stored in the constructor field of the map.
5731  if (map()->has_non_instance_prototype()) return map()->constructor();
5732  return instance_prototype();
5733}
5734
5735
5736bool JSFunction::should_have_prototype() {
5737  return map()->function_with_prototype();
5738}
5739
5740
5741bool JSFunction::is_compiled() {
5742  return code() !=
5743      GetIsolate()->builtins()->builtin(Builtins::kCompileUnoptimized);
5744}
5745
5746
5747FixedArray* JSFunction::literals() {
5748  ASSERT(!shared()->bound());
5749  return literals_or_bindings();
5750}
5751
5752
5753void JSFunction::set_literals(FixedArray* literals) {
5754  ASSERT(!shared()->bound());
5755  set_literals_or_bindings(literals);
5756}
5757
5758
5759FixedArray* JSFunction::function_bindings() {
5760  ASSERT(shared()->bound());
5761  return literals_or_bindings();
5762}
5763
5764
5765void JSFunction::set_function_bindings(FixedArray* bindings) {
5766  ASSERT(shared()->bound());
5767  // Bound function literal may be initialized to the empty fixed array
5768  // before the bindings are set.
5769  ASSERT(bindings == GetHeap()->empty_fixed_array() ||
5770         bindings->map() == GetHeap()->fixed_cow_array_map());
5771  set_literals_or_bindings(bindings);
5772}
5773
5774
5775int JSFunction::NumberOfLiterals() {
5776  ASSERT(!shared()->bound());
5777  return literals()->length();
5778}
5779
5780
5781Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
5782  ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
5783  return READ_FIELD(this, OffsetOfFunctionWithId(id));
5784}
5785
5786
5787void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
5788                                              Object* value) {
5789  ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
5790  WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
5791  WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
5792}
5793
5794
5795Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
5796  ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
5797  return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
5798}
5799
5800
5801void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
5802                                                   Code* value) {
5803  ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
5804  WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
5805  ASSERT(!GetHeap()->InNewSpace(value));
5806}
5807
5808
5809ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
5810ACCESSORS(JSProxy, hash, Object, kHashOffset)
5811ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
5812ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
5813
5814
5815void JSProxy::InitializeBody(int object_size, Object* value) {
5816  ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
5817  for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
5818    WRITE_FIELD(this, offset, value);
5819  }
5820}
5821
5822
5823ACCESSORS(JSSet, table, Object, kTableOffset)
5824ACCESSORS(JSMap, table, Object, kTableOffset)
5825
5826
5827#define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset)    \
5828  template<class Derived, class TableType>                           \
5829  type* OrderedHashTableIterator<Derived, TableType>::name() {       \
5830    return type::cast(READ_FIELD(this, offset));                     \
5831  }                                                                  \
5832  template<class Derived, class TableType>                           \
5833  void OrderedHashTableIterator<Derived, TableType>::set_##name(     \
5834      type* value, WriteBarrierMode mode) {                          \
5835    WRITE_FIELD(this, offset, value);                                \
5836    CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
5837  }
5838
5839ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
5840ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Smi, kIndexOffset)
5841ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Smi, kKindOffset)
5842
5843#undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
5844
5845
5846ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
5847ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
5848
5849
5850Address Foreign::foreign_address() {
5851  return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
5852}
5853
5854
5855void Foreign::set_foreign_address(Address value) {
5856  WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
5857}
5858
5859
5860ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
5861ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
5862ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
5863SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
5864ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
5865SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset)
5866
5867bool JSGeneratorObject::is_suspended() {
5868  ASSERT_LT(kGeneratorExecuting, kGeneratorClosed);
5869  ASSERT_EQ(kGeneratorClosed, 0);
5870  return continuation() > 0;
5871}
5872
5873bool JSGeneratorObject::is_closed() {
5874  return continuation() == kGeneratorClosed;
5875}
5876
5877bool JSGeneratorObject::is_executing() {
5878  return continuation() == kGeneratorExecuting;
5879}
5880
5881JSGeneratorObject* JSGeneratorObject::cast(Object* obj) {
5882  ASSERT(obj->IsJSGeneratorObject());
5883  ASSERT(HeapObject::cast(obj)->Size() == JSGeneratorObject::kSize);
5884  return reinterpret_cast<JSGeneratorObject*>(obj);
5885}
5886
5887
5888ACCESSORS(JSModule, context, Object, kContextOffset)
5889ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
5890
5891
5892JSModule* JSModule::cast(Object* obj) {
5893  ASSERT(obj->IsJSModule());
5894  ASSERT(HeapObject::cast(obj)->Size() == JSModule::kSize);
5895  return reinterpret_cast<JSModule*>(obj);
5896}
5897
5898
5899ACCESSORS(JSValue, value, Object, kValueOffset)
5900
5901
5902JSValue* JSValue::cast(Object* obj) {
5903  ASSERT(obj->IsJSValue());
5904  ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
5905  return reinterpret_cast<JSValue*>(obj);
5906}
5907
5908
5909ACCESSORS(JSDate, value, Object, kValueOffset)
5910ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
5911ACCESSORS(JSDate, year, Object, kYearOffset)
5912ACCESSORS(JSDate, month, Object, kMonthOffset)
5913ACCESSORS(JSDate, day, Object, kDayOffset)
5914ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
5915ACCESSORS(JSDate, hour, Object, kHourOffset)
5916ACCESSORS(JSDate, min, Object, kMinOffset)
5917ACCESSORS(JSDate, sec, Object, kSecOffset)
5918
5919
5920JSDate* JSDate::cast(Object* obj) {
5921  ASSERT(obj->IsJSDate());
5922  ASSERT(HeapObject::cast(obj)->Size() == JSDate::kSize);
5923  return reinterpret_cast<JSDate*>(obj);
5924}
5925
5926
5927ACCESSORS(JSMessageObject, type, String, kTypeOffset)
5928ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
5929ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
5930ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
5931SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
5932SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
5933
5934
5935JSMessageObject* JSMessageObject::cast(Object* obj) {
5936  ASSERT(obj->IsJSMessageObject());
5937  ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
5938  return reinterpret_cast<JSMessageObject*>(obj);
5939}
5940
5941
5942INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
5943INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
5944ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
5945ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
5946ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
5947ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
5948ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
5949
5950
5951void Code::WipeOutHeader() {
5952  WRITE_FIELD(this, kRelocationInfoOffset, NULL);
5953  WRITE_FIELD(this, kHandlerTableOffset, NULL);
5954  WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
5955  WRITE_FIELD(this, kConstantPoolOffset, NULL);
5956  // Do not wipe out e.g. a minor key.
5957  if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
5958    WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
5959  }
5960}
5961
5962
5963Object* Code::type_feedback_info() {
5964  ASSERT(kind() == FUNCTION);
5965  return raw_type_feedback_info();
5966}
5967
5968
5969void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
5970  ASSERT(kind() == FUNCTION);
5971  set_raw_type_feedback_info(value, mode);
5972  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
5973                            value, mode);
5974}
5975
5976
5977int Code::stub_info() {
5978  ASSERT(kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
5979         kind() == BINARY_OP_IC || kind() == LOAD_IC || kind() == CALL_IC);
5980  return Smi::cast(raw_type_feedback_info())->value();
5981}
5982
5983
5984void Code::set_stub_info(int value) {
5985  ASSERT(kind() == COMPARE_IC ||
5986         kind() == COMPARE_NIL_IC ||
5987         kind() == BINARY_OP_IC ||
5988         kind() == STUB ||
5989         kind() == LOAD_IC ||
5990         kind() == CALL_IC ||
5991         kind() == KEYED_LOAD_IC ||
5992         kind() == STORE_IC ||
5993         kind() == KEYED_STORE_IC);
5994  set_raw_type_feedback_info(Smi::FromInt(value));
5995}
5996
5997
5998ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
5999INT_ACCESSORS(Code, ic_age, kICAgeOffset)
6000
6001
6002byte* Code::instruction_start()  {
6003  return FIELD_ADDR(this, kHeaderSize);
6004}
6005
6006
6007byte* Code::instruction_end()  {
6008  return instruction_start() + instruction_size();
6009}
6010
6011
6012int Code::body_size() {
6013  return RoundUp(instruction_size(), kObjectAlignment);
6014}
6015
6016
6017ByteArray* Code::unchecked_relocation_info() {
6018  return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
6019}
6020
6021
6022byte* Code::relocation_start() {
6023  return unchecked_relocation_info()->GetDataStartAddress();
6024}
6025
6026
6027int Code::relocation_size() {
6028  return unchecked_relocation_info()->length();
6029}
6030
6031
6032byte* Code::entry() {
6033  return instruction_start();
6034}
6035
6036
6037bool Code::contains(byte* inner_pointer) {
6038  return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
6039}
6040
6041
6042ACCESSORS(JSArray, length, Object, kLengthOffset)
6043
6044
6045void* JSArrayBuffer::backing_store() {
6046  intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
6047  return reinterpret_cast<void*>(ptr);
6048}
6049
6050
6051void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6052  intptr_t ptr = reinterpret_cast<intptr_t>(value);
6053  WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6054}
6055
6056
6057ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6058ACCESSORS_TO_SMI(JSArrayBuffer, flag, kFlagOffset)
6059
6060
6061bool JSArrayBuffer::is_external() {
6062  return BooleanBit::get(flag(), kIsExternalBit);
6063}
6064
6065
6066void JSArrayBuffer::set_is_external(bool value) {
6067  set_flag(BooleanBit::set(flag(), kIsExternalBit, value));
6068}
6069
6070
6071bool JSArrayBuffer::should_be_freed() {
6072  return BooleanBit::get(flag(), kShouldBeFreed);
6073}
6074
6075
6076void JSArrayBuffer::set_should_be_freed(bool value) {
6077  set_flag(BooleanBit::set(flag(), kShouldBeFreed, value));
6078}
6079
6080
6081ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
6082ACCESSORS(JSArrayBuffer, weak_first_view, Object, kWeakFirstViewOffset)
6083
6084
6085ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
6086ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset)
6087ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset)
6088ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset)
6089ACCESSORS(JSTypedArray, length, Object, kLengthOffset)
6090
6091ACCESSORS(JSRegExp, data, Object, kDataOffset)
6092
6093
6094JSRegExp::Type JSRegExp::TypeTag() {
6095  Object* data = this->data();
6096  if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
6097  Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6098  return static_cast<JSRegExp::Type>(smi->value());
6099}
6100
6101
6102int JSRegExp::CaptureCount() {
6103  switch (TypeTag()) {
6104    case ATOM:
6105      return 0;
6106    case IRREGEXP:
6107      return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
6108    default:
6109      UNREACHABLE();
6110      return -1;
6111  }
6112}
6113
6114
6115JSRegExp::Flags JSRegExp::GetFlags() {
6116  ASSERT(this->data()->IsFixedArray());
6117  Object* data = this->data();
6118  Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
6119  return Flags(smi->value());
6120}
6121
6122
6123String* JSRegExp::Pattern() {
6124  ASSERT(this->data()->IsFixedArray());
6125  Object* data = this->data();
6126  String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
6127  return pattern;
6128}
6129
6130
6131Object* JSRegExp::DataAt(int index) {
6132  ASSERT(TypeTag() != NOT_COMPILED);
6133  return FixedArray::cast(data())->get(index);
6134}
6135
6136
6137void JSRegExp::SetDataAt(int index, Object* value) {
6138  ASSERT(TypeTag() != NOT_COMPILED);
6139  ASSERT(index >= kDataIndex);  // Only implementation data can be set this way.
6140  FixedArray::cast(data())->set(index, value);
6141}
6142
6143
6144ElementsKind JSObject::GetElementsKind() {
6145  ElementsKind kind = map()->elements_kind();
6146#if DEBUG
6147  FixedArrayBase* fixed_array =
6148      reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
6149
6150  // If a GC was caused while constructing this object, the elements
6151  // pointer may point to a one pointer filler map.
6152  if (ElementsAreSafeToExamine()) {
6153    Map* map = fixed_array->map();
6154    ASSERT((IsFastSmiOrObjectElementsKind(kind) &&
6155            (map == GetHeap()->fixed_array_map() ||
6156             map == GetHeap()->fixed_cow_array_map())) ||
6157           (IsFastDoubleElementsKind(kind) &&
6158            (fixed_array->IsFixedDoubleArray() ||
6159             fixed_array == GetHeap()->empty_fixed_array())) ||
6160           (kind == DICTIONARY_ELEMENTS &&
6161            fixed_array->IsFixedArray() &&
6162            fixed_array->IsDictionary()) ||
6163           (kind > DICTIONARY_ELEMENTS));
6164    ASSERT((kind != SLOPPY_ARGUMENTS_ELEMENTS) ||
6165           (elements()->IsFixedArray() && elements()->length() >= 2));
6166  }
6167#endif
6168  return kind;
6169}
6170
6171
6172ElementsAccessor* JSObject::GetElementsAccessor() {
6173  return ElementsAccessor::ForKind(GetElementsKind());
6174}
6175
6176
6177bool JSObject::HasFastObjectElements() {
6178  return IsFastObjectElementsKind(GetElementsKind());
6179}
6180
6181
6182bool JSObject::HasFastSmiElements() {
6183  return IsFastSmiElementsKind(GetElementsKind());
6184}
6185
6186
6187bool JSObject::HasFastSmiOrObjectElements() {
6188  return IsFastSmiOrObjectElementsKind(GetElementsKind());
6189}
6190
6191
6192bool JSObject::HasFastDoubleElements() {
6193  return IsFastDoubleElementsKind(GetElementsKind());
6194}
6195
6196
6197bool JSObject::HasFastHoleyElements() {
6198  return IsFastHoleyElementsKind(GetElementsKind());
6199}
6200
6201
6202bool JSObject::HasFastElements() {
6203  return IsFastElementsKind(GetElementsKind());
6204}
6205
6206
6207bool JSObject::HasDictionaryElements() {
6208  return GetElementsKind() == DICTIONARY_ELEMENTS;
6209}
6210
6211
6212bool JSObject::HasSloppyArgumentsElements() {
6213  return GetElementsKind() == SLOPPY_ARGUMENTS_ELEMENTS;
6214}
6215
6216
6217bool JSObject::HasExternalArrayElements() {
6218  HeapObject* array = elements();
6219  ASSERT(array != NULL);
6220  return array->IsExternalArray();
6221}
6222
6223
6224#define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size)          \
6225bool JSObject::HasExternal##Type##Elements() {                          \
6226  HeapObject* array = elements();                                       \
6227  ASSERT(array != NULL);                                                \
6228  if (!array->IsHeapObject())                                           \
6229    return false;                                                       \
6230  return array->map()->instance_type() == EXTERNAL_##TYPE##_ARRAY_TYPE; \
6231}
6232
6233TYPED_ARRAYS(EXTERNAL_ELEMENTS_CHECK)
6234
6235#undef EXTERNAL_ELEMENTS_CHECK
6236
6237
6238bool JSObject::HasFixedTypedArrayElements() {
6239  HeapObject* array = elements();
6240  ASSERT(array != NULL);
6241  return array->IsFixedTypedArrayBase();
6242}
6243
6244
6245#define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size)         \
6246bool JSObject::HasFixed##Type##Elements() {                               \
6247  HeapObject* array = elements();                                         \
6248  ASSERT(array != NULL);                                                  \
6249  if (!array->IsHeapObject())                                             \
6250    return false;                                                         \
6251  return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE;      \
6252}
6253
6254TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
6255
6256#undef FIXED_TYPED_ELEMENTS_CHECK
6257
6258
6259bool JSObject::HasNamedInterceptor() {
6260  return map()->has_named_interceptor();
6261}
6262
6263
6264bool JSObject::HasIndexedInterceptor() {
6265  return map()->has_indexed_interceptor();
6266}
6267
6268
6269NameDictionary* JSObject::property_dictionary() {
6270  ASSERT(!HasFastProperties());
6271  return NameDictionary::cast(properties());
6272}
6273
6274
6275SeededNumberDictionary* JSObject::element_dictionary() {
6276  ASSERT(HasDictionaryElements());
6277  return SeededNumberDictionary::cast(elements());
6278}
6279
6280
6281bool Name::IsHashFieldComputed(uint32_t field) {
6282  return (field & kHashNotComputedMask) == 0;
6283}
6284
6285
6286bool Name::HasHashCode() {
6287  return IsHashFieldComputed(hash_field());
6288}
6289
6290
6291uint32_t Name::Hash() {
6292  // Fast case: has hash code already been computed?
6293  uint32_t field = hash_field();
6294  if (IsHashFieldComputed(field)) return field >> kHashShift;
6295  // Slow case: compute hash code and set it. Has to be a string.
6296  return String::cast(this)->ComputeAndSetHash();
6297}
6298
6299
6300StringHasher::StringHasher(int length, uint32_t seed)
6301  : length_(length),
6302    raw_running_hash_(seed),
6303    array_index_(0),
6304    is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6305    is_first_char_(true) {
6306  ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
6307}
6308
6309
6310bool StringHasher::has_trivial_hash() {
6311  return length_ > String::kMaxHashCalcLength;
6312}
6313
6314
6315uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
6316  running_hash += c;
6317  running_hash += (running_hash << 10);
6318  running_hash ^= (running_hash >> 6);
6319  return running_hash;
6320}
6321
6322
6323uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6324  running_hash += (running_hash << 3);
6325  running_hash ^= (running_hash >> 11);
6326  running_hash += (running_hash << 15);
6327  if ((running_hash & String::kHashBitMask) == 0) {
6328    return kZeroHash;
6329  }
6330  return running_hash;
6331}
6332
6333
6334void StringHasher::AddCharacter(uint16_t c) {
6335  // Use the Jenkins one-at-a-time hash function to update the hash
6336  // for the given character.
6337  raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6338}
6339
6340
6341bool StringHasher::UpdateIndex(uint16_t c) {
6342  ASSERT(is_array_index_);
6343  if (c < '0' || c > '9') {
6344    is_array_index_ = false;
6345    return false;
6346  }
6347  int d = c - '0';
6348  if (is_first_char_) {
6349    is_first_char_ = false;
6350    if (c == '0' && length_ > 1) {
6351      is_array_index_ = false;
6352      return false;
6353    }
6354  }
6355  if (array_index_ > 429496729U - ((d + 2) >> 3)) {
6356    is_array_index_ = false;
6357    return false;
6358  }
6359  array_index_ = array_index_ * 10 + d;
6360  return true;
6361}
6362
6363
6364template<typename Char>
6365inline void StringHasher::AddCharacters(const Char* chars, int length) {
6366  ASSERT(sizeof(Char) == 1 || sizeof(Char) == 2);
6367  int i = 0;
6368  if (is_array_index_) {
6369    for (; i < length; i++) {
6370      AddCharacter(chars[i]);
6371      if (!UpdateIndex(chars[i])) {
6372        i++;
6373        break;
6374      }
6375    }
6376  }
6377  for (; i < length; i++) {
6378    ASSERT(!is_array_index_);
6379    AddCharacter(chars[i]);
6380  }
6381}
6382
6383
6384template <typename schar>
6385uint32_t StringHasher::HashSequentialString(const schar* chars,
6386                                            int length,
6387                                            uint32_t seed) {
6388  StringHasher hasher(length, seed);
6389  if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
6390  return hasher.GetHashField();
6391}
6392
6393
6394bool Name::AsArrayIndex(uint32_t* index) {
6395  return IsString() && String::cast(this)->AsArrayIndex(index);
6396}
6397
6398
6399bool String::AsArrayIndex(uint32_t* index) {
6400  uint32_t field = hash_field();
6401  if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
6402    return false;
6403  }
6404  return SlowAsArrayIndex(index);
6405}
6406
6407
6408Object* JSReceiver::GetPrototype() {
6409  return map()->prototype();
6410}
6411
6412
6413Object* JSReceiver::GetConstructor() {
6414  return map()->constructor();
6415}
6416
6417
6418bool JSReceiver::HasProperty(Handle<JSReceiver> object,
6419                             Handle<Name> name) {
6420  if (object->IsJSProxy()) {
6421    Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6422    return JSProxy::HasPropertyWithHandler(proxy, name);
6423  }
6424  return GetPropertyAttributes(object, name) != ABSENT;
6425}
6426
6427
6428bool JSReceiver::HasOwnProperty(Handle<JSReceiver> object, Handle<Name> name) {
6429  if (object->IsJSProxy()) {
6430    Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6431    return JSProxy::HasPropertyWithHandler(proxy, name);
6432  }
6433  return GetOwnPropertyAttributes(object, name) != ABSENT;
6434}
6435
6436
6437PropertyAttributes JSReceiver::GetPropertyAttributes(Handle<JSReceiver> object,
6438                                                     Handle<Name> key) {
6439  uint32_t index;
6440  if (object->IsJSObject() && key->AsArrayIndex(&index)) {
6441    return GetElementAttribute(object, index);
6442  }
6443  LookupIterator it(object, key);
6444  return GetPropertyAttributes(&it);
6445}
6446
6447
6448PropertyAttributes JSReceiver::GetElementAttribute(Handle<JSReceiver> object,
6449                                                   uint32_t index) {
6450  if (object->IsJSProxy()) {
6451    return JSProxy::GetElementAttributeWithHandler(
6452        Handle<JSProxy>::cast(object), object, index);
6453  }
6454  return JSObject::GetElementAttributeWithReceiver(
6455      Handle<JSObject>::cast(object), object, index, true);
6456}
6457
6458
6459bool JSGlobalObject::IsDetached() {
6460  return JSGlobalProxy::cast(global_receiver())->IsDetachedFrom(this);
6461}
6462
6463
6464bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) {
6465  return GetPrototype() != global;
6466}
6467
6468
6469Handle<Smi> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
6470  return object->IsJSProxy()
6471      ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
6472      : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
6473}
6474
6475
6476Object* JSReceiver::GetIdentityHash() {
6477  return IsJSProxy()
6478      ? JSProxy::cast(this)->GetIdentityHash()
6479      : JSObject::cast(this)->GetIdentityHash();
6480}
6481
6482
6483bool JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
6484  if (object->IsJSProxy()) {
6485    Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6486    return JSProxy::HasElementWithHandler(proxy, index);
6487  }
6488  return JSObject::GetElementAttributeWithReceiver(
6489      Handle<JSObject>::cast(object), object, index, true) != ABSENT;
6490}
6491
6492
6493bool JSReceiver::HasOwnElement(Handle<JSReceiver> object, uint32_t index) {
6494  if (object->IsJSProxy()) {
6495    Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6496    return JSProxy::HasElementWithHandler(proxy, index);
6497  }
6498  return JSObject::GetElementAttributeWithReceiver(
6499      Handle<JSObject>::cast(object), object, index, false) != ABSENT;
6500}
6501
6502
6503PropertyAttributes JSReceiver::GetOwnElementAttribute(
6504    Handle<JSReceiver> object, uint32_t index) {
6505  if (object->IsJSProxy()) {
6506    return JSProxy::GetElementAttributeWithHandler(
6507        Handle<JSProxy>::cast(object), object, index);
6508  }
6509  return JSObject::GetElementAttributeWithReceiver(
6510      Handle<JSObject>::cast(object), object, index, false);
6511}
6512
6513
6514bool AccessorInfo::all_can_read() {
6515  return BooleanBit::get(flag(), kAllCanReadBit);
6516}
6517
6518
6519void AccessorInfo::set_all_can_read(bool value) {
6520  set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
6521}
6522
6523
6524bool AccessorInfo::all_can_write() {
6525  return BooleanBit::get(flag(), kAllCanWriteBit);
6526}
6527
6528
6529void AccessorInfo::set_all_can_write(bool value) {
6530  set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
6531}
6532
6533
6534PropertyAttributes AccessorInfo::property_attributes() {
6535  return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
6536}
6537
6538
6539void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
6540  set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
6541}
6542
6543
6544bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
6545  Object* function_template = expected_receiver_type();
6546  if (!function_template->IsFunctionTemplateInfo()) return true;
6547  return FunctionTemplateInfo::cast(function_template)->IsTemplateFor(receiver);
6548}
6549
6550
6551void ExecutableAccessorInfo::clear_setter() {
6552  set_setter(GetIsolate()->heap()->undefined_value(), SKIP_WRITE_BARRIER);
6553}
6554
6555
6556void AccessorPair::set_access_flags(v8::AccessControl access_control) {
6557  int current = access_flags()->value();
6558  current = BooleanBit::set(current,
6559                            kAllCanReadBit,
6560                            access_control & ALL_CAN_READ);
6561  current = BooleanBit::set(current,
6562                            kAllCanWriteBit,
6563                            access_control & ALL_CAN_WRITE);
6564  set_access_flags(Smi::FromInt(current));
6565}
6566
6567
6568bool AccessorPair::all_can_read() {
6569  return BooleanBit::get(access_flags(), kAllCanReadBit);
6570}
6571
6572
6573bool AccessorPair::all_can_write() {
6574  return BooleanBit::get(access_flags(), kAllCanWriteBit);
6575}
6576
6577
6578template<typename Derived, typename Shape, typename Key>
6579void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
6580                                               Handle<Object> key,
6581                                               Handle<Object> value) {
6582  SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
6583}
6584
6585
6586template<typename Derived, typename Shape, typename Key>
6587void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
6588                                               Handle<Object> key,
6589                                               Handle<Object> value,
6590                                               PropertyDetails details) {
6591  ASSERT(!key->IsName() ||
6592         details.IsDeleted() ||
6593         details.dictionary_index() > 0);
6594  int index = DerivedHashTable::EntryToIndex(entry);
6595  DisallowHeapAllocation no_gc;
6596  WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
6597  FixedArray::set(index, *key, mode);
6598  FixedArray::set(index+1, *value, mode);
6599  FixedArray::set(index+2, details.AsSmi());
6600}
6601
6602
6603bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
6604  ASSERT(other->IsNumber());
6605  return key == static_cast<uint32_t>(other->Number());
6606}
6607
6608
6609uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
6610  return ComputeIntegerHash(key, 0);
6611}
6612
6613
6614uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
6615                                                      Object* other) {
6616  ASSERT(other->IsNumber());
6617  return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
6618}
6619
6620
6621uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
6622  return ComputeIntegerHash(key, seed);
6623}
6624
6625
6626uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
6627                                                          uint32_t seed,
6628                                                          Object* other) {
6629  ASSERT(other->IsNumber());
6630  return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
6631}
6632
6633
6634Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
6635  return isolate->factory()->NewNumberFromUint(key);
6636}
6637
6638
6639bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
6640  // We know that all entries in a hash table had their hash keys created.
6641  // Use that knowledge to have fast failure.
6642  if (key->Hash() != Name::cast(other)->Hash()) return false;
6643  return key->Equals(Name::cast(other));
6644}
6645
6646
6647uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
6648  return key->Hash();
6649}
6650
6651
6652uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
6653  return Name::cast(other)->Hash();
6654}
6655
6656
6657Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
6658                                             Handle<Name> key) {
6659  ASSERT(key->IsUniqueName());
6660  return key;
6661}
6662
6663
6664void NameDictionary::DoGenerateNewEnumerationIndices(
6665    Handle<NameDictionary> dictionary) {
6666  DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
6667}
6668
6669
6670bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
6671  return key->SameValue(other);
6672}
6673
6674
6675uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
6676  return Smi::cast(key->GetHash())->value();
6677}
6678
6679
6680uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
6681                                             Object* other) {
6682  return Smi::cast(other->GetHash())->value();
6683}
6684
6685
6686Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
6687                                              Handle<Object> key) {
6688  return key;
6689}
6690
6691
6692Handle<ObjectHashTable> ObjectHashTable::Shrink(
6693    Handle<ObjectHashTable> table, Handle<Object> key) {
6694  return DerivedHashTable::Shrink(table, key);
6695}
6696
6697
6698template <int entrysize>
6699bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
6700  return key->SameValue(other);
6701}
6702
6703
6704template <int entrysize>
6705uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
6706  intptr_t hash = reinterpret_cast<intptr_t>(*key);
6707  return (uint32_t)(hash & 0xFFFFFFFF);
6708}
6709
6710
6711template <int entrysize>
6712uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
6713                                                      Object* other) {
6714  intptr_t hash = reinterpret_cast<intptr_t>(other);
6715  return (uint32_t)(hash & 0xFFFFFFFF);
6716}
6717
6718
6719template <int entrysize>
6720Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
6721                                                       Handle<Object> key) {
6722  return key;
6723}
6724
6725
6726void Map::ClearCodeCache(Heap* heap) {
6727  // No write barrier is needed since empty_fixed_array is not in new space.
6728  // Please note this function is used during marking:
6729  //  - MarkCompactCollector::MarkUnmarkedObject
6730  //  - IncrementalMarking::Step
6731  ASSERT(!heap->InNewSpace(heap->empty_fixed_array()));
6732  WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
6733}
6734
6735
6736void JSArray::EnsureSize(Handle<JSArray> array, int required_size) {
6737  ASSERT(array->HasFastSmiOrObjectElements());
6738  Handle<FixedArray> elts = handle(FixedArray::cast(array->elements()));
6739  const int kArraySizeThatFitsComfortablyInNewSpace = 128;
6740  if (elts->length() < required_size) {
6741    // Doubling in size would be overkill, but leave some slack to avoid
6742    // constantly growing.
6743    Expand(array, required_size + (required_size >> 3));
6744    // It's a performance benefit to keep a frequently used array in new-space.
6745  } else if (!array->GetHeap()->new_space()->Contains(*elts) &&
6746             required_size < kArraySizeThatFitsComfortablyInNewSpace) {
6747    // Expand will allocate a new backing store in new space even if the size
6748    // we asked for isn't larger than what we had before.
6749    Expand(array, required_size);
6750  }
6751}
6752
6753
6754void JSArray::set_length(Smi* length) {
6755  // Don't need a write barrier for a Smi.
6756  set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
6757}
6758
6759
6760bool JSArray::AllowsSetElementsLength() {
6761  bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
6762  ASSERT(result == !HasExternalArrayElements());
6763  return result;
6764}
6765
6766
6767void JSArray::SetContent(Handle<JSArray> array,
6768                         Handle<FixedArrayBase> storage) {
6769  EnsureCanContainElements(array, storage, storage->length(),
6770                           ALLOW_COPIED_DOUBLE_ELEMENTS);
6771
6772  ASSERT((storage->map() == array->GetHeap()->fixed_double_array_map() &&
6773          IsFastDoubleElementsKind(array->GetElementsKind())) ||
6774         ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
6775          (IsFastObjectElementsKind(array->GetElementsKind()) ||
6776           (IsFastSmiElementsKind(array->GetElementsKind()) &&
6777            Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
6778  array->set_elements(*storage);
6779  array->set_length(Smi::FromInt(storage->length()));
6780}
6781
6782
6783Handle<Object> TypeFeedbackInfo::UninitializedSentinel(Isolate* isolate) {
6784  return isolate->factory()->uninitialized_symbol();
6785}
6786
6787
6788Handle<Object> TypeFeedbackInfo::MegamorphicSentinel(Isolate* isolate) {
6789  return isolate->factory()->megamorphic_symbol();
6790}
6791
6792
6793Handle<Object> TypeFeedbackInfo::MonomorphicArraySentinel(Isolate* isolate,
6794    ElementsKind elements_kind) {
6795  return Handle<Object>(Smi::FromInt(static_cast<int>(elements_kind)), isolate);
6796}
6797
6798
6799Object* TypeFeedbackInfo::RawUninitializedSentinel(Heap* heap) {
6800  return heap->uninitialized_symbol();
6801}
6802
6803
6804int TypeFeedbackInfo::ic_total_count() {
6805  int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6806  return ICTotalCountField::decode(current);
6807}
6808
6809
6810void TypeFeedbackInfo::set_ic_total_count(int count) {
6811  int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6812  value = ICTotalCountField::update(value,
6813                                    ICTotalCountField::decode(count));
6814  WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
6815}
6816
6817
6818int TypeFeedbackInfo::ic_with_type_info_count() {
6819  int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6820  return ICsWithTypeInfoCountField::decode(current);
6821}
6822
6823
6824void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
6825  int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6826  int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
6827  // We can get negative count here when the type-feedback info is
6828  // shared between two code objects. The can only happen when
6829  // the debugger made a shallow copy of code object (see Heap::CopyCode).
6830  // Since we do not optimize when the debugger is active, we can skip
6831  // this counter update.
6832  if (new_count >= 0) {
6833    new_count &= ICsWithTypeInfoCountField::kMask;
6834    value = ICsWithTypeInfoCountField::update(value, new_count);
6835    WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
6836  }
6837}
6838
6839
6840void TypeFeedbackInfo::initialize_storage() {
6841  WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
6842  WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
6843}
6844
6845
6846void TypeFeedbackInfo::change_own_type_change_checksum() {
6847  int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6848  int checksum = OwnTypeChangeChecksum::decode(value);
6849  checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
6850  value = OwnTypeChangeChecksum::update(value, checksum);
6851  // Ensure packed bit field is in Smi range.
6852  if (value > Smi::kMaxValue) value |= Smi::kMinValue;
6853  if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
6854  WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
6855}
6856
6857
6858void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
6859  int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6860  int mask = (1 << kTypeChangeChecksumBits) - 1;
6861  value = InlinedTypeChangeChecksum::update(value, checksum & mask);
6862  // Ensure packed bit field is in Smi range.
6863  if (value > Smi::kMaxValue) value |= Smi::kMinValue;
6864  if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
6865  WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
6866}
6867
6868
6869int TypeFeedbackInfo::own_type_change_checksum() {
6870  int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6871  return OwnTypeChangeChecksum::decode(value);
6872}
6873
6874
6875bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
6876  int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6877  int mask = (1 << kTypeChangeChecksumBits) - 1;
6878  return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
6879}
6880
6881
6882SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
6883
6884
6885Relocatable::Relocatable(Isolate* isolate) {
6886  isolate_ = isolate;
6887  prev_ = isolate->relocatable_top();
6888  isolate->set_relocatable_top(this);
6889}
6890
6891
6892Relocatable::~Relocatable() {
6893  ASSERT_EQ(isolate_->relocatable_top(), this);
6894  isolate_->set_relocatable_top(prev_);
6895}
6896
6897
6898int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
6899  return map->instance_size();
6900}
6901
6902
6903void Foreign::ForeignIterateBody(ObjectVisitor* v) {
6904  v->VisitExternalReference(
6905      reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
6906}
6907
6908
6909template<typename StaticVisitor>
6910void Foreign::ForeignIterateBody() {
6911  StaticVisitor::VisitExternalReference(
6912      reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
6913}
6914
6915
6916void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
6917  typedef v8::String::ExternalAsciiStringResource Resource;
6918  v->VisitExternalAsciiString(
6919      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6920}
6921
6922
6923template<typename StaticVisitor>
6924void ExternalAsciiString::ExternalAsciiStringIterateBody() {
6925  typedef v8::String::ExternalAsciiStringResource Resource;
6926  StaticVisitor::VisitExternalAsciiString(
6927      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6928}
6929
6930
6931void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
6932  typedef v8::String::ExternalStringResource Resource;
6933  v->VisitExternalTwoByteString(
6934      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6935}
6936
6937
6938template<typename StaticVisitor>
6939void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
6940  typedef v8::String::ExternalStringResource Resource;
6941  StaticVisitor::VisitExternalTwoByteString(
6942      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6943}
6944
6945
6946template<int start_offset, int end_offset, int size>
6947void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
6948    HeapObject* obj,
6949    ObjectVisitor* v) {
6950    v->VisitPointers(HeapObject::RawField(obj, start_offset),
6951                     HeapObject::RawField(obj, end_offset));
6952}
6953
6954
6955template<int start_offset>
6956void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
6957                                                       int object_size,
6958                                                       ObjectVisitor* v) {
6959  v->VisitPointers(HeapObject::RawField(obj, start_offset),
6960                   HeapObject::RawField(obj, object_size));
6961}
6962
6963
6964#undef TYPE_CHECKER
6965#undef CAST_ACCESSOR
6966#undef INT_ACCESSORS
6967#undef ACCESSORS
6968#undef ACCESSORS_TO_SMI
6969#undef SMI_ACCESSORS
6970#undef SYNCHRONIZED_SMI_ACCESSORS
6971#undef NOBARRIER_SMI_ACCESSORS
6972#undef BOOL_GETTER
6973#undef BOOL_ACCESSORS
6974#undef FIELD_ADDR
6975#undef READ_FIELD
6976#undef NOBARRIER_READ_FIELD
6977#undef WRITE_FIELD
6978#undef NOBARRIER_WRITE_FIELD
6979#undef WRITE_BARRIER
6980#undef CONDITIONAL_WRITE_BARRIER
6981#undef READ_DOUBLE_FIELD
6982#undef WRITE_DOUBLE_FIELD
6983#undef READ_INT_FIELD
6984#undef WRITE_INT_FIELD
6985#undef READ_INTPTR_FIELD
6986#undef WRITE_INTPTR_FIELD
6987#undef READ_UINT32_FIELD
6988#undef WRITE_UINT32_FIELD
6989#undef READ_SHORT_FIELD
6990#undef WRITE_SHORT_FIELD
6991#undef READ_BYTE_FIELD
6992#undef WRITE_BYTE_FIELD
6993#undef NOBARRIER_READ_BYTE_FIELD
6994#undef NOBARRIER_WRITE_BYTE_FIELD
6995
6996} }  // namespace v8::internal
6997
6998#endif  // V8_OBJECTS_INL_H_
6999