objects-inl.h revision 44f0eee88ff00398ff7f715fab053374d808c90d
1// Copyright 2010 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27//
28// Review notes:
29//
30// - The use of macros in these inline functions may seem superfluous
31// but it is absolutely needed to make sure gcc generates optimal
32// code. gcc is not happy when attempting to inline too deep.
33//
34
35#ifndef V8_OBJECTS_INL_H_
36#define V8_OBJECTS_INL_H_
37
38#include "objects.h"
39#include "contexts.h"
40#include "conversions-inl.h"
41#include "heap.h"
42#include "isolate.h"
43#include "property.h"
44#include "spaces.h"
45#include "v8memory.h"
46
47namespace v8 {
48namespace internal {
49
50PropertyDetails::PropertyDetails(Smi* smi) {
51  value_ = smi->value();
52}
53
54
55Smi* PropertyDetails::AsSmi() {
56  return Smi::FromInt(value_);
57}
58
59
60PropertyDetails PropertyDetails::AsDeleted() {
61  Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
62  return PropertyDetails(smi);
63}
64
65
66#define CAST_ACCESSOR(type)                     \
67  type* type::cast(Object* object) {            \
68    ASSERT(object->Is##type());                 \
69    return reinterpret_cast<type*>(object);     \
70  }
71
72
73#define INT_ACCESSORS(holder, name, offset)                             \
74  int holder::name() { return READ_INT_FIELD(this, offset); }           \
75  void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
76
77
78#define ACCESSORS(holder, name, type, offset)                           \
79  type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
80  void holder::set_##name(type* value, WriteBarrierMode mode) {         \
81    WRITE_FIELD(this, offset, value);                                   \
82    CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode);           \
83  }
84
85
86// GC-safe accessors do not use HeapObject::GetHeap(), but access TLS instead.
87#define ACCESSORS_GCSAFE(holder, name, type, offset)                    \
88  type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
89  void holder::set_##name(type* value, WriteBarrierMode mode) {         \
90    WRITE_FIELD(this, offset, value);                                   \
91    CONDITIONAL_WRITE_BARRIER(HEAP, this, offset, mode);                \
92  }
93
94
95#define SMI_ACCESSORS(holder, name, offset)             \
96  int holder::name() {                                  \
97    Object* value = READ_FIELD(this, offset);           \
98    return Smi::cast(value)->value();                   \
99  }                                                     \
100  void holder::set_##name(int value) {                  \
101    WRITE_FIELD(this, offset, Smi::FromInt(value));     \
102  }
103
104
105#define BOOL_GETTER(holder, field, name, offset)           \
106  bool holder::name() {                                    \
107    return BooleanBit::get(field(), offset);               \
108  }                                                        \
109
110
111#define BOOL_ACCESSORS(holder, field, name, offset)        \
112  bool holder::name() {                                    \
113    return BooleanBit::get(field(), offset);               \
114  }                                                        \
115  void holder::set_##name(bool value) {                    \
116    set_##field(BooleanBit::set(field(), offset, value));  \
117  }
118
119
120bool Object::IsInstanceOf(FunctionTemplateInfo* expected) {
121  // There is a constraint on the object; check.
122  if (!this->IsJSObject()) return false;
123  // Fetch the constructor function of the object.
124  Object* cons_obj = JSObject::cast(this)->map()->constructor();
125  if (!cons_obj->IsJSFunction()) return false;
126  JSFunction* fun = JSFunction::cast(cons_obj);
127  // Iterate through the chain of inheriting function templates to
128  // see if the required one occurs.
129  for (Object* type = fun->shared()->function_data();
130       type->IsFunctionTemplateInfo();
131       type = FunctionTemplateInfo::cast(type)->parent_template()) {
132    if (type == expected) return true;
133  }
134  // Didn't find the required type in the inheritance chain.
135  return false;
136}
137
138
139bool Object::IsSmi() {
140  return HAS_SMI_TAG(this);
141}
142
143
144bool Object::IsHeapObject() {
145  return Internals::HasHeapObjectTag(this);
146}
147
148
149bool Object::IsHeapNumber() {
150  return Object::IsHeapObject()
151    && HeapObject::cast(this)->map()->instance_type() == HEAP_NUMBER_TYPE;
152}
153
154
155bool Object::IsString() {
156  return Object::IsHeapObject()
157    && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
158}
159
160
161bool Object::IsSymbol() {
162  if (!this->IsHeapObject()) return false;
163  uint32_t type = HeapObject::cast(this)->map()->instance_type();
164  // Because the symbol tag is non-zero and no non-string types have the
165  // symbol bit set we can test for symbols with a very simple test
166  // operation.
167  ASSERT(kSymbolTag != 0);
168  ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
169  return (type & kIsSymbolMask) != 0;
170}
171
172
173bool Object::IsConsString() {
174  if (!this->IsHeapObject()) return false;
175  uint32_t type = HeapObject::cast(this)->map()->instance_type();
176  return (type & (kIsNotStringMask | kStringRepresentationMask)) ==
177         (kStringTag | kConsStringTag);
178}
179
180
181bool Object::IsSeqString() {
182  if (!IsString()) return false;
183  return StringShape(String::cast(this)).IsSequential();
184}
185
186
187bool Object::IsSeqAsciiString() {
188  if (!IsString()) return false;
189  return StringShape(String::cast(this)).IsSequential() &&
190         String::cast(this)->IsAsciiRepresentation();
191}
192
193
194bool Object::IsSeqTwoByteString() {
195  if (!IsString()) return false;
196  return StringShape(String::cast(this)).IsSequential() &&
197         String::cast(this)->IsTwoByteRepresentation();
198}
199
200
201bool Object::IsExternalString() {
202  if (!IsString()) return false;
203  return StringShape(String::cast(this)).IsExternal();
204}
205
206
207bool Object::IsExternalAsciiString() {
208  if (!IsString()) return false;
209  return StringShape(String::cast(this)).IsExternal() &&
210         String::cast(this)->IsAsciiRepresentation();
211}
212
213
214bool Object::IsExternalTwoByteString() {
215  if (!IsString()) return false;
216  return StringShape(String::cast(this)).IsExternal() &&
217         String::cast(this)->IsTwoByteRepresentation();
218}
219
220
221StringShape::StringShape(String* str)
222  : type_(str->map()->instance_type()) {
223  set_valid();
224  ASSERT((type_ & kIsNotStringMask) == kStringTag);
225}
226
227
228StringShape::StringShape(Map* map)
229  : type_(map->instance_type()) {
230  set_valid();
231  ASSERT((type_ & kIsNotStringMask) == kStringTag);
232}
233
234
235StringShape::StringShape(InstanceType t)
236  : type_(static_cast<uint32_t>(t)) {
237  set_valid();
238  ASSERT((type_ & kIsNotStringMask) == kStringTag);
239}
240
241
242bool StringShape::IsSymbol() {
243  ASSERT(valid());
244  ASSERT(kSymbolTag != 0);
245  return (type_ & kIsSymbolMask) != 0;
246}
247
248
249bool String::IsAsciiRepresentation() {
250  uint32_t type = map()->instance_type();
251  return (type & kStringEncodingMask) == kAsciiStringTag;
252}
253
254
255bool String::IsTwoByteRepresentation() {
256  uint32_t type = map()->instance_type();
257  return (type & kStringEncodingMask) == kTwoByteStringTag;
258}
259
260
261bool String::HasOnlyAsciiChars() {
262  uint32_t type = map()->instance_type();
263  return (type & kStringEncodingMask) == kAsciiStringTag ||
264         (type & kAsciiDataHintMask) == kAsciiDataHintTag;
265}
266
267
268bool StringShape::IsCons() {
269  return (type_ & kStringRepresentationMask) == kConsStringTag;
270}
271
272
273bool StringShape::IsExternal() {
274  return (type_ & kStringRepresentationMask) == kExternalStringTag;
275}
276
277
278bool StringShape::IsSequential() {
279  return (type_ & kStringRepresentationMask) == kSeqStringTag;
280}
281
282
283StringRepresentationTag StringShape::representation_tag() {
284  uint32_t tag = (type_ & kStringRepresentationMask);
285  return static_cast<StringRepresentationTag>(tag);
286}
287
288
289uint32_t StringShape::full_representation_tag() {
290  return (type_ & (kStringRepresentationMask | kStringEncodingMask));
291}
292
293
294STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
295             Internals::kFullStringRepresentationMask);
296
297
298bool StringShape::IsSequentialAscii() {
299  return full_representation_tag() == (kSeqStringTag | kAsciiStringTag);
300}
301
302
303bool StringShape::IsSequentialTwoByte() {
304  return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
305}
306
307
308bool StringShape::IsExternalAscii() {
309  return full_representation_tag() == (kExternalStringTag | kAsciiStringTag);
310}
311
312
313bool StringShape::IsExternalTwoByte() {
314  return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
315}
316
317
318STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
319             Internals::kExternalTwoByteRepresentationTag);
320
321
322uc32 FlatStringReader::Get(int index) {
323  ASSERT(0 <= index && index <= length_);
324  if (is_ascii_) {
325    return static_cast<const byte*>(start_)[index];
326  } else {
327    return static_cast<const uc16*>(start_)[index];
328  }
329}
330
331
332bool Object::IsNumber() {
333  return IsSmi() || IsHeapNumber();
334}
335
336
337bool Object::IsByteArray() {
338  return Object::IsHeapObject()
339    && HeapObject::cast(this)->map()->instance_type() == BYTE_ARRAY_TYPE;
340}
341
342
343bool Object::IsExternalPixelArray() {
344  return Object::IsHeapObject() &&
345      HeapObject::cast(this)->map()->instance_type() ==
346          EXTERNAL_PIXEL_ARRAY_TYPE;
347}
348
349
350bool Object::IsExternalArray() {
351  if (!Object::IsHeapObject())
352    return false;
353  InstanceType instance_type =
354      HeapObject::cast(this)->map()->instance_type();
355  return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
356          instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
357}
358
359
360bool Object::IsExternalByteArray() {
361  return Object::IsHeapObject() &&
362      HeapObject::cast(this)->map()->instance_type() ==
363      EXTERNAL_BYTE_ARRAY_TYPE;
364}
365
366
367bool Object::IsExternalUnsignedByteArray() {
368  return Object::IsHeapObject() &&
369      HeapObject::cast(this)->map()->instance_type() ==
370      EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE;
371}
372
373
374bool Object::IsExternalShortArray() {
375  return Object::IsHeapObject() &&
376      HeapObject::cast(this)->map()->instance_type() ==
377      EXTERNAL_SHORT_ARRAY_TYPE;
378}
379
380
381bool Object::IsExternalUnsignedShortArray() {
382  return Object::IsHeapObject() &&
383      HeapObject::cast(this)->map()->instance_type() ==
384      EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE;
385}
386
387
388bool Object::IsExternalIntArray() {
389  return Object::IsHeapObject() &&
390      HeapObject::cast(this)->map()->instance_type() ==
391      EXTERNAL_INT_ARRAY_TYPE;
392}
393
394
395bool Object::IsExternalUnsignedIntArray() {
396  return Object::IsHeapObject() &&
397      HeapObject::cast(this)->map()->instance_type() ==
398      EXTERNAL_UNSIGNED_INT_ARRAY_TYPE;
399}
400
401
402bool Object::IsExternalFloatArray() {
403  return Object::IsHeapObject() &&
404      HeapObject::cast(this)->map()->instance_type() ==
405      EXTERNAL_FLOAT_ARRAY_TYPE;
406}
407
408
409bool MaybeObject::IsFailure() {
410  return HAS_FAILURE_TAG(this);
411}
412
413
414bool MaybeObject::IsRetryAfterGC() {
415  return HAS_FAILURE_TAG(this)
416    && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC;
417}
418
419
420bool MaybeObject::IsOutOfMemory() {
421  return HAS_FAILURE_TAG(this)
422      && Failure::cast(this)->IsOutOfMemoryException();
423}
424
425
426bool MaybeObject::IsException() {
427  return this == Failure::Exception();
428}
429
430
431bool MaybeObject::IsTheHole() {
432  return !IsFailure() && ToObjectUnchecked()->IsTheHole();
433}
434
435
436Failure* Failure::cast(MaybeObject* obj) {
437  ASSERT(HAS_FAILURE_TAG(obj));
438  return reinterpret_cast<Failure*>(obj);
439}
440
441
442bool Object::IsJSObject() {
443  return IsHeapObject()
444      && HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
445}
446
447
448bool Object::IsJSContextExtensionObject() {
449  return IsHeapObject()
450      && (HeapObject::cast(this)->map()->instance_type() ==
451          JS_CONTEXT_EXTENSION_OBJECT_TYPE);
452}
453
454
455bool Object::IsMap() {
456  return Object::IsHeapObject()
457      && HeapObject::cast(this)->map()->instance_type() == MAP_TYPE;
458}
459
460
461bool Object::IsFixedArray() {
462  return Object::IsHeapObject()
463      && HeapObject::cast(this)->map()->instance_type() == FIXED_ARRAY_TYPE;
464}
465
466
467bool Object::IsDescriptorArray() {
468  return IsFixedArray();
469}
470
471
472bool Object::IsDeoptimizationInputData() {
473  // Must be a fixed array.
474  if (!IsFixedArray()) return false;
475
476  // There's no sure way to detect the difference between a fixed array and
477  // a deoptimization data array.  Since this is used for asserts we can
478  // check that the length is zero or else the fixed size plus a multiple of
479  // the entry size.
480  int length = FixedArray::cast(this)->length();
481  if (length == 0) return true;
482
483  length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
484  return length >= 0 &&
485      length % DeoptimizationInputData::kDeoptEntrySize == 0;
486}
487
488
489bool Object::IsDeoptimizationOutputData() {
490  if (!IsFixedArray()) return false;
491  // There's actually no way to see the difference between a fixed array and
492  // a deoptimization data array.  Since this is used for asserts we can check
493  // that the length is plausible though.
494  if (FixedArray::cast(this)->length() % 2 != 0) return false;
495  return true;
496}
497
498
499bool Object::IsContext() {
500  if (Object::IsHeapObject()) {
501    Heap* heap = HeapObject::cast(this)->GetHeap();
502    return (HeapObject::cast(this)->map() == heap->context_map() ||
503            HeapObject::cast(this)->map() == heap->catch_context_map() ||
504            HeapObject::cast(this)->map() == heap->global_context_map());
505  }
506  return false;
507}
508
509
510bool Object::IsCatchContext() {
511  return Object::IsHeapObject() &&
512      HeapObject::cast(this)->map() ==
513      HeapObject::cast(this)->GetHeap()->catch_context_map();
514}
515
516
517bool Object::IsGlobalContext() {
518  return Object::IsHeapObject() &&
519      HeapObject::cast(this)->map() ==
520      HeapObject::cast(this)->GetHeap()->global_context_map();
521}
522
523
524bool Object::IsJSFunction() {
525  return Object::IsHeapObject()
526      && HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_TYPE;
527}
528
529
530template <> inline bool Is<JSFunction>(Object* obj) {
531  return obj->IsJSFunction();
532}
533
534
535bool Object::IsCode() {
536  return Object::IsHeapObject()
537      && HeapObject::cast(this)->map()->instance_type() == CODE_TYPE;
538}
539
540
541bool Object::IsOddball() {
542  ASSERT(HEAP->is_safe_to_read_maps());
543  return Object::IsHeapObject()
544    && HeapObject::cast(this)->map()->instance_type() == ODDBALL_TYPE;
545}
546
547
548bool Object::IsJSGlobalPropertyCell() {
549  return Object::IsHeapObject()
550      && HeapObject::cast(this)->map()->instance_type()
551      == JS_GLOBAL_PROPERTY_CELL_TYPE;
552}
553
554
555bool Object::IsSharedFunctionInfo() {
556  return Object::IsHeapObject() &&
557      (HeapObject::cast(this)->map()->instance_type() ==
558       SHARED_FUNCTION_INFO_TYPE);
559}
560
561
562bool Object::IsJSValue() {
563  return Object::IsHeapObject()
564      && HeapObject::cast(this)->map()->instance_type() == JS_VALUE_TYPE;
565}
566
567
568bool Object::IsJSMessageObject() {
569  return Object::IsHeapObject()
570      && (HeapObject::cast(this)->map()->instance_type() ==
571          JS_MESSAGE_OBJECT_TYPE);
572}
573
574
575bool Object::IsStringWrapper() {
576  return IsJSValue() && JSValue::cast(this)->value()->IsString();
577}
578
579
580bool Object::IsProxy() {
581  return Object::IsHeapObject()
582      && HeapObject::cast(this)->map()->instance_type() == PROXY_TYPE;
583}
584
585
586bool Object::IsBoolean() {
587  return IsOddball() &&
588      ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
589}
590
591
592bool Object::IsJSArray() {
593  return Object::IsHeapObject()
594      && HeapObject::cast(this)->map()->instance_type() == JS_ARRAY_TYPE;
595}
596
597
598bool Object::IsJSRegExp() {
599  return Object::IsHeapObject()
600      && HeapObject::cast(this)->map()->instance_type() == JS_REGEXP_TYPE;
601}
602
603
604template <> inline bool Is<JSArray>(Object* obj) {
605  return obj->IsJSArray();
606}
607
608
609bool Object::IsHashTable() {
610  return Object::IsHeapObject() &&
611      HeapObject::cast(this)->map() ==
612      HeapObject::cast(this)->GetHeap()->hash_table_map();
613}
614
615
616bool Object::IsDictionary() {
617  return IsHashTable() && this !=
618         HeapObject::cast(this)->GetHeap()->symbol_table();
619}
620
621
622bool Object::IsSymbolTable() {
623  return IsHashTable() && this ==
624         HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
625}
626
627
628bool Object::IsJSFunctionResultCache() {
629  if (!IsFixedArray()) return false;
630  FixedArray* self = FixedArray::cast(this);
631  int length = self->length();
632  if (length < JSFunctionResultCache::kEntriesIndex) return false;
633  if ((length - JSFunctionResultCache::kEntriesIndex)
634      % JSFunctionResultCache::kEntrySize != 0) {
635    return false;
636  }
637#ifdef DEBUG
638  reinterpret_cast<JSFunctionResultCache*>(this)->JSFunctionResultCacheVerify();
639#endif
640  return true;
641}
642
643
644bool Object::IsNormalizedMapCache() {
645  if (!IsFixedArray()) return false;
646  if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
647    return false;
648  }
649#ifdef DEBUG
650  reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
651#endif
652  return true;
653}
654
655
656bool Object::IsCompilationCacheTable() {
657  return IsHashTable();
658}
659
660
661bool Object::IsCodeCacheHashTable() {
662  return IsHashTable();
663}
664
665
666bool Object::IsMapCache() {
667  return IsHashTable();
668}
669
670
671bool Object::IsPrimitive() {
672  return IsOddball() || IsNumber() || IsString();
673}
674
675
676bool Object::IsJSGlobalProxy() {
677  bool result = IsHeapObject() &&
678                (HeapObject::cast(this)->map()->instance_type() ==
679                 JS_GLOBAL_PROXY_TYPE);
680  ASSERT(!result || IsAccessCheckNeeded());
681  return result;
682}
683
684
685bool Object::IsGlobalObject() {
686  if (!IsHeapObject()) return false;
687
688  InstanceType type = HeapObject::cast(this)->map()->instance_type();
689  return type == JS_GLOBAL_OBJECT_TYPE ||
690         type == JS_BUILTINS_OBJECT_TYPE;
691}
692
693
694bool Object::IsJSGlobalObject() {
695  return IsHeapObject() &&
696      (HeapObject::cast(this)->map()->instance_type() ==
697       JS_GLOBAL_OBJECT_TYPE);
698}
699
700
701bool Object::IsJSBuiltinsObject() {
702  return IsHeapObject() &&
703      (HeapObject::cast(this)->map()->instance_type() ==
704       JS_BUILTINS_OBJECT_TYPE);
705}
706
707
708bool Object::IsUndetectableObject() {
709  return IsHeapObject()
710    && HeapObject::cast(this)->map()->is_undetectable();
711}
712
713
714bool Object::IsAccessCheckNeeded() {
715  return IsHeapObject()
716    && HeapObject::cast(this)->map()->is_access_check_needed();
717}
718
719
720bool Object::IsStruct() {
721  if (!IsHeapObject()) return false;
722  switch (HeapObject::cast(this)->map()->instance_type()) {
723#define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
724  STRUCT_LIST(MAKE_STRUCT_CASE)
725#undef MAKE_STRUCT_CASE
726    default: return false;
727  }
728}
729
730
731#define MAKE_STRUCT_PREDICATE(NAME, Name, name)                  \
732  bool Object::Is##Name() {                                      \
733    return Object::IsHeapObject()                                \
734      && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
735  }
736  STRUCT_LIST(MAKE_STRUCT_PREDICATE)
737#undef MAKE_STRUCT_PREDICATE
738
739
740bool Object::IsUndefined() {
741  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
742}
743
744
745bool Object::IsNull() {
746  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
747}
748
749
750bool Object::IsTheHole() {
751  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
752}
753
754
755bool Object::IsTrue() {
756  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
757}
758
759
760bool Object::IsFalse() {
761  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
762}
763
764
765bool Object::IsArgumentsMarker() {
766  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
767}
768
769
770double Object::Number() {
771  ASSERT(IsNumber());
772  return IsSmi()
773    ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
774    : reinterpret_cast<HeapNumber*>(this)->value();
775}
776
777
778MaybeObject* Object::ToSmi() {
779  if (IsSmi()) return this;
780  if (IsHeapNumber()) {
781    double value = HeapNumber::cast(this)->value();
782    int int_value = FastD2I(value);
783    if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
784      return Smi::FromInt(int_value);
785    }
786  }
787  return Failure::Exception();
788}
789
790
791bool Object::HasSpecificClassOf(String* name) {
792  return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
793}
794
795
796MaybeObject* Object::GetElement(uint32_t index) {
797  // GetElement can trigger a getter which can cause allocation.
798  // This was not always the case. This ASSERT is here to catch
799  // leftover incorrect uses.
800  ASSERT(HEAP->IsAllocationAllowed());
801  return GetElementWithReceiver(this, index);
802}
803
804
805Object* Object::GetElementNoExceptionThrown(uint32_t index) {
806  MaybeObject* maybe = GetElementWithReceiver(this, index);
807  ASSERT(!maybe->IsFailure());
808  Object* result = NULL;  // Initialization to please compiler.
809  maybe->ToObject(&result);
810  return result;
811}
812
813
814MaybeObject* Object::GetProperty(String* key) {
815  PropertyAttributes attributes;
816  return GetPropertyWithReceiver(this, key, &attributes);
817}
818
819
820MaybeObject* Object::GetProperty(String* key, PropertyAttributes* attributes) {
821  return GetPropertyWithReceiver(this, key, attributes);
822}
823
824
825#define FIELD_ADDR(p, offset) \
826  (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
827
828#define READ_FIELD(p, offset) \
829  (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
830
831#define WRITE_FIELD(p, offset, value) \
832  (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
833
834// TODO(isolates): Pass heap in to these macros.
835#define WRITE_BARRIER(object, offset) \
836  object->GetHeap()->RecordWrite(object->address(), offset);
837
838// CONDITIONAL_WRITE_BARRIER must be issued after the actual
839// write due to the assert validating the written value.
840#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, mode) \
841  if (mode == UPDATE_WRITE_BARRIER) { \
842    heap->RecordWrite(object->address(), offset); \
843  } else { \
844    ASSERT(mode == SKIP_WRITE_BARRIER); \
845    ASSERT(heap->InNewSpace(object) || \
846           !heap->InNewSpace(READ_FIELD(object, offset)) || \
847           Page::FromAddress(object->address())->           \
848               IsRegionDirty(object->address() + offset));  \
849  }
850
851#ifndef V8_TARGET_ARCH_MIPS
852  #define READ_DOUBLE_FIELD(p, offset) \
853    (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
854#else  // V8_TARGET_ARCH_MIPS
855  // Prevent gcc from using load-double (mips ldc1) on (possibly)
856  // non-64-bit aligned HeapNumber::value.
857  static inline double read_double_field(HeapNumber* p, int offset) {
858    union conversion {
859      double d;
860      uint32_t u[2];
861    } c;
862    c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
863    c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
864    return c.d;
865  }
866  #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
867#endif  // V8_TARGET_ARCH_MIPS
868
869
870#ifndef V8_TARGET_ARCH_MIPS
871  #define WRITE_DOUBLE_FIELD(p, offset, value) \
872    (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
873#else  // V8_TARGET_ARCH_MIPS
874  // Prevent gcc from using store-double (mips sdc1) on (possibly)
875  // non-64-bit aligned HeapNumber::value.
876  static inline void write_double_field(HeapNumber* p, int offset,
877                                        double value) {
878    union conversion {
879      double d;
880      uint32_t u[2];
881    } c;
882    c.d = value;
883    (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
884    (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
885  }
886  #define WRITE_DOUBLE_FIELD(p, offset, value) \
887    write_double_field(p, offset, value)
888#endif  // V8_TARGET_ARCH_MIPS
889
890
891#define READ_INT_FIELD(p, offset) \
892  (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
893
894#define WRITE_INT_FIELD(p, offset, value) \
895  (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
896
897#define READ_INTPTR_FIELD(p, offset) \
898  (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
899
900#define WRITE_INTPTR_FIELD(p, offset, value) \
901  (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
902
903#define READ_UINT32_FIELD(p, offset) \
904  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
905
906#define WRITE_UINT32_FIELD(p, offset, value) \
907  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
908
909#define READ_SHORT_FIELD(p, offset) \
910  (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
911
912#define WRITE_SHORT_FIELD(p, offset, value) \
913  (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
914
915#define READ_BYTE_FIELD(p, offset) \
916  (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
917
918#define WRITE_BYTE_FIELD(p, offset, value) \
919  (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
920
921
922Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
923  return &READ_FIELD(obj, byte_offset);
924}
925
926
927int Smi::value() {
928  return Internals::SmiValue(this);
929}
930
931
932Smi* Smi::FromInt(int value) {
933  ASSERT(Smi::IsValid(value));
934  int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
935  intptr_t tagged_value =
936      (static_cast<intptr_t>(value) << smi_shift_bits) | kSmiTag;
937  return reinterpret_cast<Smi*>(tagged_value);
938}
939
940
941Smi* Smi::FromIntptr(intptr_t value) {
942  ASSERT(Smi::IsValid(value));
943  int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
944  return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
945}
946
947
948Failure::Type Failure::type() const {
949  return static_cast<Type>(value() & kFailureTypeTagMask);
950}
951
952
953bool Failure::IsInternalError() const {
954  return type() == INTERNAL_ERROR;
955}
956
957
958bool Failure::IsOutOfMemoryException() const {
959  return type() == OUT_OF_MEMORY_EXCEPTION;
960}
961
962
963AllocationSpace Failure::allocation_space() const {
964  ASSERT_EQ(RETRY_AFTER_GC, type());
965  return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
966                                      & kSpaceTagMask);
967}
968
969
970Failure* Failure::InternalError() {
971  return Construct(INTERNAL_ERROR);
972}
973
974
975Failure* Failure::Exception() {
976  return Construct(EXCEPTION);
977}
978
979
980Failure* Failure::OutOfMemoryException() {
981  return Construct(OUT_OF_MEMORY_EXCEPTION);
982}
983
984
985intptr_t Failure::value() const {
986  return static_cast<intptr_t>(
987      reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
988}
989
990
991Failure* Failure::RetryAfterGC() {
992  return RetryAfterGC(NEW_SPACE);
993}
994
995
996Failure* Failure::RetryAfterGC(AllocationSpace space) {
997  ASSERT((space & ~kSpaceTagMask) == 0);
998  return Construct(RETRY_AFTER_GC, space);
999}
1000
1001
1002Failure* Failure::Construct(Type type, intptr_t value) {
1003  uintptr_t info =
1004      (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
1005  ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
1006  return reinterpret_cast<Failure*>((info << kFailureTagSize) | kFailureTag);
1007}
1008
1009
1010bool Smi::IsValid(intptr_t value) {
1011#ifdef DEBUG
1012  bool in_range = (value >= kMinValue) && (value <= kMaxValue);
1013#endif
1014
1015#ifdef V8_TARGET_ARCH_X64
1016  // To be representable as a long smi, the value must be a 32-bit integer.
1017  bool result = (value == static_cast<int32_t>(value));
1018#else
1019  // To be representable as an tagged small integer, the two
1020  // most-significant bits of 'value' must be either 00 or 11 due to
1021  // sign-extension. To check this we add 01 to the two
1022  // most-significant bits, and check if the most-significant bit is 0
1023  //
1024  // CAUTION: The original code below:
1025  // bool result = ((value + 0x40000000) & 0x80000000) == 0;
1026  // may lead to incorrect results according to the C language spec, and
1027  // in fact doesn't work correctly with gcc4.1.1 in some cases: The
1028  // compiler may produce undefined results in case of signed integer
1029  // overflow. The computation must be done w/ unsigned ints.
1030  bool result = (static_cast<uintptr_t>(value + 0x40000000U) < 0x80000000U);
1031#endif
1032  ASSERT(result == in_range);
1033  return result;
1034}
1035
1036
1037MapWord MapWord::FromMap(Map* map) {
1038  return MapWord(reinterpret_cast<uintptr_t>(map));
1039}
1040
1041
1042Map* MapWord::ToMap() {
1043  return reinterpret_cast<Map*>(value_);
1044}
1045
1046
1047bool MapWord::IsForwardingAddress() {
1048  return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1049}
1050
1051
1052MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1053  Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1054  return MapWord(reinterpret_cast<uintptr_t>(raw));
1055}
1056
1057
1058HeapObject* MapWord::ToForwardingAddress() {
1059  ASSERT(IsForwardingAddress());
1060  return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1061}
1062
1063
1064bool MapWord::IsMarked() {
1065  return (value_ & kMarkingMask) == 0;
1066}
1067
1068
1069void MapWord::SetMark() {
1070  value_ &= ~kMarkingMask;
1071}
1072
1073
1074void MapWord::ClearMark() {
1075  value_ |= kMarkingMask;
1076}
1077
1078
1079bool MapWord::IsOverflowed() {
1080  return (value_ & kOverflowMask) != 0;
1081}
1082
1083
1084void MapWord::SetOverflow() {
1085  value_ |= kOverflowMask;
1086}
1087
1088
1089void MapWord::ClearOverflow() {
1090  value_ &= ~kOverflowMask;
1091}
1092
1093
1094MapWord MapWord::EncodeAddress(Address map_address, int offset) {
1095  // Offset is the distance in live bytes from the first live object in the
1096  // same page. The offset between two objects in the same page should not
1097  // exceed the object area size of a page.
1098  ASSERT(0 <= offset && offset < Page::kObjectAreaSize);
1099
1100  uintptr_t compact_offset = offset >> kObjectAlignmentBits;
1101  ASSERT(compact_offset < (1 << kForwardingOffsetBits));
1102
1103  Page* map_page = Page::FromAddress(map_address);
1104  ASSERT_MAP_PAGE_INDEX(map_page->mc_page_index);
1105
1106  uintptr_t map_page_offset =
1107      map_page->Offset(map_address) >> kMapAlignmentBits;
1108
1109  uintptr_t encoding =
1110      (compact_offset << kForwardingOffsetShift) |
1111      (map_page_offset << kMapPageOffsetShift) |
1112      (map_page->mc_page_index << kMapPageIndexShift);
1113  return MapWord(encoding);
1114}
1115
1116
1117Address MapWord::DecodeMapAddress(MapSpace* map_space) {
1118  int map_page_index =
1119      static_cast<int>((value_ & kMapPageIndexMask) >> kMapPageIndexShift);
1120  ASSERT_MAP_PAGE_INDEX(map_page_index);
1121
1122  int map_page_offset = static_cast<int>(
1123      ((value_ & kMapPageOffsetMask) >> kMapPageOffsetShift) <<
1124      kMapAlignmentBits);
1125
1126  return (map_space->PageAddress(map_page_index) + map_page_offset);
1127}
1128
1129
1130int MapWord::DecodeOffset() {
1131  // The offset field is represented in the kForwardingOffsetBits
1132  // most-significant bits.
1133  uintptr_t offset = (value_ >> kForwardingOffsetShift) << kObjectAlignmentBits;
1134  ASSERT(offset < static_cast<uintptr_t>(Page::kObjectAreaSize));
1135  return static_cast<int>(offset);
1136}
1137
1138
1139MapWord MapWord::FromEncodedAddress(Address address) {
1140  return MapWord(reinterpret_cast<uintptr_t>(address));
1141}
1142
1143
1144Address MapWord::ToEncodedAddress() {
1145  return reinterpret_cast<Address>(value_);
1146}
1147
1148
1149#ifdef DEBUG
1150void HeapObject::VerifyObjectField(int offset) {
1151  VerifyPointer(READ_FIELD(this, offset));
1152}
1153
1154void HeapObject::VerifySmiField(int offset) {
1155  ASSERT(READ_FIELD(this, offset)->IsSmi());
1156}
1157#endif
1158
1159
1160Heap* HeapObject::GetHeap() {
1161  // During GC, the map pointer in HeapObject is used in various ways that
1162  // prevent us from retrieving Heap from the map.
1163  // Assert that we are not in GC, implement GC code in a way that it doesn't
1164  // pull heap from the map.
1165  ASSERT(HEAP->is_safe_to_read_maps());
1166  return map()->heap();
1167}
1168
1169
1170Isolate* HeapObject::GetIsolate() {
1171  return GetHeap()->isolate();
1172}
1173
1174
1175Map* HeapObject::map() {
1176  return map_word().ToMap();
1177}
1178
1179
1180void HeapObject::set_map(Map* value) {
1181  set_map_word(MapWord::FromMap(value));
1182}
1183
1184
1185MapWord HeapObject::map_word() {
1186  return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1187}
1188
1189
1190void HeapObject::set_map_word(MapWord map_word) {
1191  // WRITE_FIELD does not invoke write barrier, but there is no need
1192  // here.
1193  WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1194}
1195
1196
1197HeapObject* HeapObject::FromAddress(Address address) {
1198  ASSERT_TAG_ALIGNED(address);
1199  return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1200}
1201
1202
1203Address HeapObject::address() {
1204  return reinterpret_cast<Address>(this) - kHeapObjectTag;
1205}
1206
1207
1208int HeapObject::Size() {
1209  return SizeFromMap(map());
1210}
1211
1212
1213void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1214  v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1215                   reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1216}
1217
1218
1219void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1220  v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1221}
1222
1223
1224bool HeapObject::IsMarked() {
1225  return map_word().IsMarked();
1226}
1227
1228
1229void HeapObject::SetMark() {
1230  ASSERT(!IsMarked());
1231  MapWord first_word = map_word();
1232  first_word.SetMark();
1233  set_map_word(first_word);
1234}
1235
1236
1237void HeapObject::ClearMark() {
1238  ASSERT(IsMarked());
1239  MapWord first_word = map_word();
1240  first_word.ClearMark();
1241  set_map_word(first_word);
1242}
1243
1244
1245bool HeapObject::IsOverflowed() {
1246  return map_word().IsOverflowed();
1247}
1248
1249
1250void HeapObject::SetOverflow() {
1251  MapWord first_word = map_word();
1252  first_word.SetOverflow();
1253  set_map_word(first_word);
1254}
1255
1256
1257void HeapObject::ClearOverflow() {
1258  ASSERT(IsOverflowed());
1259  MapWord first_word = map_word();
1260  first_word.ClearOverflow();
1261  set_map_word(first_word);
1262}
1263
1264
1265double HeapNumber::value() {
1266  return READ_DOUBLE_FIELD(this, kValueOffset);
1267}
1268
1269
1270void HeapNumber::set_value(double value) {
1271  WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1272}
1273
1274
1275int HeapNumber::get_exponent() {
1276  return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1277          kExponentShift) - kExponentBias;
1278}
1279
1280
1281int HeapNumber::get_sign() {
1282  return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1283}
1284
1285
1286ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1287
1288
1289HeapObject* JSObject::elements() {
1290  Object* array = READ_FIELD(this, kElementsOffset);
1291  // In the assert below Dictionary is covered under FixedArray.
1292  ASSERT(array->IsFixedArray() || array->IsExternalArray());
1293  return reinterpret_cast<HeapObject*>(array);
1294}
1295
1296
1297void JSObject::set_elements(HeapObject* value, WriteBarrierMode mode) {
1298  ASSERT(map()->has_fast_elements() ==
1299         (value->map() == GetHeap()->fixed_array_map() ||
1300          value->map() == GetHeap()->fixed_cow_array_map()));
1301  // In the assert below Dictionary is covered under FixedArray.
1302  ASSERT(value->IsFixedArray() || value->IsExternalArray());
1303  WRITE_FIELD(this, kElementsOffset, value);
1304  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, mode);
1305}
1306
1307
1308void JSObject::initialize_properties() {
1309  ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1310  WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1311}
1312
1313
1314void JSObject::initialize_elements() {
1315  ASSERT(map()->has_fast_elements());
1316  ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1317  WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1318}
1319
1320
1321MaybeObject* JSObject::ResetElements() {
1322  Object* obj;
1323  { MaybeObject* maybe_obj = map()->GetFastElementsMap();
1324    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1325  }
1326  set_map(Map::cast(obj));
1327  initialize_elements();
1328  return this;
1329}
1330
1331
1332ACCESSORS(Oddball, to_string, String, kToStringOffset)
1333ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1334
1335
1336byte Oddball::kind() {
1337  return READ_BYTE_FIELD(this, kKindOffset);
1338}
1339
1340
1341void Oddball::set_kind(byte value) {
1342  WRITE_BYTE_FIELD(this, kKindOffset, value);
1343}
1344
1345
1346Object* JSGlobalPropertyCell::value() {
1347  return READ_FIELD(this, kValueOffset);
1348}
1349
1350
1351void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) {
1352  // The write barrier is not used for global property cells.
1353  ASSERT(!val->IsJSGlobalPropertyCell());
1354  WRITE_FIELD(this, kValueOffset, val);
1355}
1356
1357
1358int JSObject::GetHeaderSize() {
1359  InstanceType type = map()->instance_type();
1360  // Check for the most common kind of JavaScript object before
1361  // falling into the generic switch. This speeds up the internal
1362  // field operations considerably on average.
1363  if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1364  switch (type) {
1365    case JS_GLOBAL_PROXY_TYPE:
1366      return JSGlobalProxy::kSize;
1367    case JS_GLOBAL_OBJECT_TYPE:
1368      return JSGlobalObject::kSize;
1369    case JS_BUILTINS_OBJECT_TYPE:
1370      return JSBuiltinsObject::kSize;
1371    case JS_FUNCTION_TYPE:
1372      return JSFunction::kSize;
1373    case JS_VALUE_TYPE:
1374      return JSValue::kSize;
1375    case JS_ARRAY_TYPE:
1376      return JSValue::kSize;
1377    case JS_REGEXP_TYPE:
1378      return JSValue::kSize;
1379    case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1380      return JSObject::kHeaderSize;
1381    case JS_MESSAGE_OBJECT_TYPE:
1382      return JSMessageObject::kSize;
1383    default:
1384      UNREACHABLE();
1385      return 0;
1386  }
1387}
1388
1389
1390int JSObject::GetInternalFieldCount() {
1391  ASSERT(1 << kPointerSizeLog2 == kPointerSize);
1392  // Make sure to adjust for the number of in-object properties. These
1393  // properties do contribute to the size, but are not internal fields.
1394  return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1395         map()->inobject_properties();
1396}
1397
1398
1399int JSObject::GetInternalFieldOffset(int index) {
1400  ASSERT(index < GetInternalFieldCount() && index >= 0);
1401  return GetHeaderSize() + (kPointerSize * index);
1402}
1403
1404
1405Object* JSObject::GetInternalField(int index) {
1406  ASSERT(index < GetInternalFieldCount() && index >= 0);
1407  // Internal objects do follow immediately after the header, whereas in-object
1408  // properties are at the end of the object. Therefore there is no need
1409  // to adjust the index here.
1410  return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1411}
1412
1413
1414void JSObject::SetInternalField(int index, Object* value) {
1415  ASSERT(index < GetInternalFieldCount() && index >= 0);
1416  // Internal objects do follow immediately after the header, whereas in-object
1417  // properties are at the end of the object. Therefore there is no need
1418  // to adjust the index here.
1419  int offset = GetHeaderSize() + (kPointerSize * index);
1420  WRITE_FIELD(this, offset, value);
1421  WRITE_BARRIER(this, offset);
1422}
1423
1424
1425// Access fast-case object properties at index. The use of these routines
1426// is needed to correctly distinguish between properties stored in-object and
1427// properties stored in the properties array.
1428Object* JSObject::FastPropertyAt(int index) {
1429  // Adjust for the number of properties stored in the object.
1430  index -= map()->inobject_properties();
1431  if (index < 0) {
1432    int offset = map()->instance_size() + (index * kPointerSize);
1433    return READ_FIELD(this, offset);
1434  } else {
1435    ASSERT(index < properties()->length());
1436    return properties()->get(index);
1437  }
1438}
1439
1440
1441Object* JSObject::FastPropertyAtPut(int index, Object* value) {
1442  // Adjust for the number of properties stored in the object.
1443  index -= map()->inobject_properties();
1444  if (index < 0) {
1445    int offset = map()->instance_size() + (index * kPointerSize);
1446    WRITE_FIELD(this, offset, value);
1447    WRITE_BARRIER(this, offset);
1448  } else {
1449    ASSERT(index < properties()->length());
1450    properties()->set(index, value);
1451  }
1452  return value;
1453}
1454
1455
1456int JSObject::GetInObjectPropertyOffset(int index) {
1457  // Adjust for the number of properties stored in the object.
1458  index -= map()->inobject_properties();
1459  ASSERT(index < 0);
1460  return map()->instance_size() + (index * kPointerSize);
1461}
1462
1463
1464Object* JSObject::InObjectPropertyAt(int index) {
1465  // Adjust for the number of properties stored in the object.
1466  index -= map()->inobject_properties();
1467  ASSERT(index < 0);
1468  int offset = map()->instance_size() + (index * kPointerSize);
1469  return READ_FIELD(this, offset);
1470}
1471
1472
1473Object* JSObject::InObjectPropertyAtPut(int index,
1474                                        Object* value,
1475                                        WriteBarrierMode mode) {
1476  // Adjust for the number of properties stored in the object.
1477  index -= map()->inobject_properties();
1478  ASSERT(index < 0);
1479  int offset = map()->instance_size() + (index * kPointerSize);
1480  WRITE_FIELD(this, offset, value);
1481  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode);
1482  return value;
1483}
1484
1485
1486
1487void JSObject::InitializeBody(int object_size, Object* value) {
1488  ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
1489  for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1490    WRITE_FIELD(this, offset, value);
1491  }
1492}
1493
1494
1495bool JSObject::HasFastProperties() {
1496  return !properties()->IsDictionary();
1497}
1498
1499
1500int JSObject::MaxFastProperties() {
1501  // Allow extra fast properties if the object has more than
1502  // kMaxFastProperties in-object properties. When this is the case,
1503  // it is very unlikely that the object is being used as a dictionary
1504  // and there is a good chance that allowing more map transitions
1505  // will be worth it.
1506  return Max(map()->inobject_properties(), kMaxFastProperties);
1507}
1508
1509
1510void Struct::InitializeBody(int object_size) {
1511  Object* value = GetHeap()->undefined_value();
1512  for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1513    WRITE_FIELD(this, offset, value);
1514  }
1515}
1516
1517
1518bool Object::ToArrayIndex(uint32_t* index) {
1519  if (IsSmi()) {
1520    int value = Smi::cast(this)->value();
1521    if (value < 0) return false;
1522    *index = value;
1523    return true;
1524  }
1525  if (IsHeapNumber()) {
1526    double value = HeapNumber::cast(this)->value();
1527    uint32_t uint_value = static_cast<uint32_t>(value);
1528    if (value == static_cast<double>(uint_value)) {
1529      *index = uint_value;
1530      return true;
1531    }
1532  }
1533  return false;
1534}
1535
1536
1537bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
1538  if (!this->IsJSValue()) return false;
1539
1540  JSValue* js_value = JSValue::cast(this);
1541  if (!js_value->value()->IsString()) return false;
1542
1543  String* str = String::cast(js_value->value());
1544  if (index >= (uint32_t)str->length()) return false;
1545
1546  return true;
1547}
1548
1549
1550Object* FixedArray::get(int index) {
1551  ASSERT(index >= 0 && index < this->length());
1552  return READ_FIELD(this, kHeaderSize + index * kPointerSize);
1553}
1554
1555
1556void FixedArray::set(int index, Smi* value) {
1557  ASSERT(map() != HEAP->fixed_cow_array_map());
1558  ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1559  int offset = kHeaderSize + index * kPointerSize;
1560  WRITE_FIELD(this, offset, value);
1561}
1562
1563
1564void FixedArray::set(int index, Object* value) {
1565  ASSERT(map() != HEAP->fixed_cow_array_map());
1566  ASSERT(index >= 0 && index < this->length());
1567  int offset = kHeaderSize + index * kPointerSize;
1568  WRITE_FIELD(this, offset, value);
1569  WRITE_BARRIER(this, offset);
1570}
1571
1572
1573WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
1574  if (GetHeap()->InNewSpace(this)) return SKIP_WRITE_BARRIER;
1575  return UPDATE_WRITE_BARRIER;
1576}
1577
1578
1579void FixedArray::set(int index,
1580                     Object* value,
1581                     WriteBarrierMode mode) {
1582  ASSERT(map() != HEAP->fixed_cow_array_map());
1583  ASSERT(index >= 0 && index < this->length());
1584  int offset = kHeaderSize + index * kPointerSize;
1585  WRITE_FIELD(this, offset, value);
1586  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode);
1587}
1588
1589
1590void FixedArray::fast_set(FixedArray* array, int index, Object* value) {
1591  ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1592  ASSERT(index >= 0 && index < array->length());
1593  ASSERT(!HEAP->InNewSpace(value));
1594  WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
1595}
1596
1597
1598void FixedArray::set_undefined(int index) {
1599  ASSERT(map() != HEAP->fixed_cow_array_map());
1600  set_undefined(GetHeap(), index);
1601}
1602
1603
1604void FixedArray::set_undefined(Heap* heap, int index) {
1605  ASSERT(index >= 0 && index < this->length());
1606  ASSERT(!heap->InNewSpace(heap->undefined_value()));
1607  WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
1608              heap->undefined_value());
1609}
1610
1611
1612void FixedArray::set_null(int index) {
1613  set_null(GetHeap(), index);
1614}
1615
1616
1617void FixedArray::set_null(Heap* heap, int index) {
1618  ASSERT(index >= 0 && index < this->length());
1619  ASSERT(!heap->InNewSpace(heap->null_value()));
1620  WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1621}
1622
1623
1624void FixedArray::set_the_hole(int index) {
1625  ASSERT(map() != HEAP->fixed_cow_array_map());
1626  ASSERT(index >= 0 && index < this->length());
1627  ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
1628  WRITE_FIELD(this,
1629              kHeaderSize + index * kPointerSize,
1630              GetHeap()->the_hole_value());
1631}
1632
1633
1634void FixedArray::set_unchecked(int index, Smi* value) {
1635  ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1636  int offset = kHeaderSize + index * kPointerSize;
1637  WRITE_FIELD(this, offset, value);
1638}
1639
1640
1641void FixedArray::set_unchecked(Heap* heap,
1642                               int index,
1643                               Object* value,
1644                               WriteBarrierMode mode) {
1645  int offset = kHeaderSize + index * kPointerSize;
1646  WRITE_FIELD(this, offset, value);
1647  CONDITIONAL_WRITE_BARRIER(heap, this, offset, mode);
1648}
1649
1650
1651void FixedArray::set_null_unchecked(Heap* heap, int index) {
1652  ASSERT(index >= 0 && index < this->length());
1653  ASSERT(!HEAP->InNewSpace(heap->null_value()));
1654  WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1655}
1656
1657
1658Object** FixedArray::data_start() {
1659  return HeapObject::RawField(this, kHeaderSize);
1660}
1661
1662
1663bool DescriptorArray::IsEmpty() {
1664  ASSERT(this->length() > kFirstIndex ||
1665         this == HEAP->empty_descriptor_array());
1666  return length() <= kFirstIndex;
1667}
1668
1669
1670void DescriptorArray::fast_swap(FixedArray* array, int first, int second) {
1671  Object* tmp = array->get(first);
1672  fast_set(array, first, array->get(second));
1673  fast_set(array, second, tmp);
1674}
1675
1676
1677int DescriptorArray::Search(String* name) {
1678  SLOW_ASSERT(IsSortedNoDuplicates());
1679
1680  // Check for empty descriptor array.
1681  int nof = number_of_descriptors();
1682  if (nof == 0) return kNotFound;
1683
1684  // Fast case: do linear search for small arrays.
1685  const int kMaxElementsForLinearSearch = 8;
1686  if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) {
1687    return LinearSearch(name, nof);
1688  }
1689
1690  // Slow case: perform binary search.
1691  return BinarySearch(name, 0, nof - 1);
1692}
1693
1694
1695int DescriptorArray::SearchWithCache(String* name) {
1696  int number = GetIsolate()->descriptor_lookup_cache()->Lookup(this, name);
1697  if (number == DescriptorLookupCache::kAbsent) {
1698    number = Search(name);
1699    GetIsolate()->descriptor_lookup_cache()->Update(this, name, number);
1700  }
1701  return number;
1702}
1703
1704
1705String* DescriptorArray::GetKey(int descriptor_number) {
1706  ASSERT(descriptor_number < number_of_descriptors());
1707  return String::cast(get(ToKeyIndex(descriptor_number)));
1708}
1709
1710
1711Object* DescriptorArray::GetValue(int descriptor_number) {
1712  ASSERT(descriptor_number < number_of_descriptors());
1713  return GetContentArray()->get(ToValueIndex(descriptor_number));
1714}
1715
1716
1717Smi* DescriptorArray::GetDetails(int descriptor_number) {
1718  ASSERT(descriptor_number < number_of_descriptors());
1719  return Smi::cast(GetContentArray()->get(ToDetailsIndex(descriptor_number)));
1720}
1721
1722
1723PropertyType DescriptorArray::GetType(int descriptor_number) {
1724  ASSERT(descriptor_number < number_of_descriptors());
1725  return PropertyDetails(GetDetails(descriptor_number)).type();
1726}
1727
1728
1729int DescriptorArray::GetFieldIndex(int descriptor_number) {
1730  return Descriptor::IndexFromValue(GetValue(descriptor_number));
1731}
1732
1733
1734JSFunction* DescriptorArray::GetConstantFunction(int descriptor_number) {
1735  return JSFunction::cast(GetValue(descriptor_number));
1736}
1737
1738
1739Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
1740  ASSERT(GetType(descriptor_number) == CALLBACKS);
1741  return GetValue(descriptor_number);
1742}
1743
1744
1745AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
1746  ASSERT(GetType(descriptor_number) == CALLBACKS);
1747  Proxy* p = Proxy::cast(GetCallbacksObject(descriptor_number));
1748  return reinterpret_cast<AccessorDescriptor*>(p->proxy());
1749}
1750
1751
1752bool DescriptorArray::IsProperty(int descriptor_number) {
1753  return GetType(descriptor_number) < FIRST_PHANTOM_PROPERTY_TYPE;
1754}
1755
1756
1757bool DescriptorArray::IsTransition(int descriptor_number) {
1758  PropertyType t = GetType(descriptor_number);
1759  return t == MAP_TRANSITION || t == CONSTANT_TRANSITION ||
1760      t == EXTERNAL_ARRAY_TRANSITION;
1761}
1762
1763
1764bool DescriptorArray::IsNullDescriptor(int descriptor_number) {
1765  return GetType(descriptor_number) == NULL_DESCRIPTOR;
1766}
1767
1768
1769bool DescriptorArray::IsDontEnum(int descriptor_number) {
1770  return PropertyDetails(GetDetails(descriptor_number)).IsDontEnum();
1771}
1772
1773
1774void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
1775  desc->Init(GetKey(descriptor_number),
1776             GetValue(descriptor_number),
1777             GetDetails(descriptor_number));
1778}
1779
1780
1781void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
1782  // Range check.
1783  ASSERT(descriptor_number < number_of_descriptors());
1784
1785  // Make sure none of the elements in desc are in new space.
1786  ASSERT(!HEAP->InNewSpace(desc->GetKey()));
1787  ASSERT(!HEAP->InNewSpace(desc->GetValue()));
1788
1789  fast_set(this, ToKeyIndex(descriptor_number), desc->GetKey());
1790  FixedArray* content_array = GetContentArray();
1791  fast_set(content_array, ToValueIndex(descriptor_number), desc->GetValue());
1792  fast_set(content_array, ToDetailsIndex(descriptor_number),
1793           desc->GetDetails().AsSmi());
1794}
1795
1796
1797void DescriptorArray::CopyFrom(int index, DescriptorArray* src, int src_index) {
1798  Descriptor desc;
1799  src->Get(src_index, &desc);
1800  Set(index, &desc);
1801}
1802
1803
1804void DescriptorArray::Swap(int first, int second) {
1805  fast_swap(this, ToKeyIndex(first), ToKeyIndex(second));
1806  FixedArray* content_array = GetContentArray();
1807  fast_swap(content_array, ToValueIndex(first), ToValueIndex(second));
1808  fast_swap(content_array, ToDetailsIndex(first),  ToDetailsIndex(second));
1809}
1810
1811
1812template<typename Shape, typename Key>
1813int HashTable<Shape, Key>::FindEntry(Key key) {
1814  return FindEntry(GetIsolate(), key);
1815}
1816
1817
1818// Find entry for key otherwise return kNotFound.
1819template<typename Shape, typename Key>
1820int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
1821  uint32_t capacity = Capacity();
1822  uint32_t entry = FirstProbe(Shape::Hash(key), capacity);
1823  uint32_t count = 1;
1824  // EnsureCapacity will guarantee the hash table is never full.
1825  while (true) {
1826    Object* element = KeyAt(entry);
1827    if (element == isolate->heap()->undefined_value()) break;  // Empty entry.
1828    if (element != isolate->heap()->null_value() &&
1829        Shape::IsMatch(key, element)) return entry;
1830    entry = NextProbe(entry, count++, capacity);
1831  }
1832  return kNotFound;
1833}
1834
1835
1836bool NumberDictionary::requires_slow_elements() {
1837  Object* max_index_object = get(kMaxNumberKeyIndex);
1838  if (!max_index_object->IsSmi()) return false;
1839  return 0 !=
1840      (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
1841}
1842
1843uint32_t NumberDictionary::max_number_key() {
1844  ASSERT(!requires_slow_elements());
1845  Object* max_index_object = get(kMaxNumberKeyIndex);
1846  if (!max_index_object->IsSmi()) return 0;
1847  uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
1848  return value >> kRequiresSlowElementsTagSize;
1849}
1850
1851void NumberDictionary::set_requires_slow_elements() {
1852  set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
1853}
1854
1855
1856// ------------------------------------
1857// Cast operations
1858
1859
1860CAST_ACCESSOR(FixedArray)
1861CAST_ACCESSOR(DescriptorArray)
1862CAST_ACCESSOR(DeoptimizationInputData)
1863CAST_ACCESSOR(DeoptimizationOutputData)
1864CAST_ACCESSOR(SymbolTable)
1865CAST_ACCESSOR(JSFunctionResultCache)
1866CAST_ACCESSOR(NormalizedMapCache)
1867CAST_ACCESSOR(CompilationCacheTable)
1868CAST_ACCESSOR(CodeCacheHashTable)
1869CAST_ACCESSOR(MapCache)
1870CAST_ACCESSOR(String)
1871CAST_ACCESSOR(SeqString)
1872CAST_ACCESSOR(SeqAsciiString)
1873CAST_ACCESSOR(SeqTwoByteString)
1874CAST_ACCESSOR(ConsString)
1875CAST_ACCESSOR(ExternalString)
1876CAST_ACCESSOR(ExternalAsciiString)
1877CAST_ACCESSOR(ExternalTwoByteString)
1878CAST_ACCESSOR(JSObject)
1879CAST_ACCESSOR(Smi)
1880CAST_ACCESSOR(HeapObject)
1881CAST_ACCESSOR(HeapNumber)
1882CAST_ACCESSOR(Oddball)
1883CAST_ACCESSOR(JSGlobalPropertyCell)
1884CAST_ACCESSOR(SharedFunctionInfo)
1885CAST_ACCESSOR(Map)
1886CAST_ACCESSOR(JSFunction)
1887CAST_ACCESSOR(GlobalObject)
1888CAST_ACCESSOR(JSGlobalProxy)
1889CAST_ACCESSOR(JSGlobalObject)
1890CAST_ACCESSOR(JSBuiltinsObject)
1891CAST_ACCESSOR(Code)
1892CAST_ACCESSOR(JSArray)
1893CAST_ACCESSOR(JSRegExp)
1894CAST_ACCESSOR(Proxy)
1895CAST_ACCESSOR(ByteArray)
1896CAST_ACCESSOR(ExternalArray)
1897CAST_ACCESSOR(ExternalByteArray)
1898CAST_ACCESSOR(ExternalUnsignedByteArray)
1899CAST_ACCESSOR(ExternalShortArray)
1900CAST_ACCESSOR(ExternalUnsignedShortArray)
1901CAST_ACCESSOR(ExternalIntArray)
1902CAST_ACCESSOR(ExternalUnsignedIntArray)
1903CAST_ACCESSOR(ExternalFloatArray)
1904CAST_ACCESSOR(ExternalPixelArray)
1905CAST_ACCESSOR(Struct)
1906
1907
1908#define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
1909  STRUCT_LIST(MAKE_STRUCT_CAST)
1910#undef MAKE_STRUCT_CAST
1911
1912
1913template <typename Shape, typename Key>
1914HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
1915  ASSERT(obj->IsHashTable());
1916  return reinterpret_cast<HashTable*>(obj);
1917}
1918
1919
1920SMI_ACCESSORS(FixedArray, length, kLengthOffset)
1921SMI_ACCESSORS(ByteArray, length, kLengthOffset)
1922
1923INT_ACCESSORS(ExternalArray, length, kLengthOffset)
1924
1925
1926SMI_ACCESSORS(String, length, kLengthOffset)
1927
1928
1929uint32_t String::hash_field() {
1930  return READ_UINT32_FIELD(this, kHashFieldOffset);
1931}
1932
1933
1934void String::set_hash_field(uint32_t value) {
1935  WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
1936#if V8_HOST_ARCH_64_BIT
1937  WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
1938#endif
1939}
1940
1941
1942bool String::Equals(String* other) {
1943  if (other == this) return true;
1944  if (StringShape(this).IsSymbol() && StringShape(other).IsSymbol()) {
1945    return false;
1946  }
1947  return SlowEquals(other);
1948}
1949
1950
1951MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
1952  if (!StringShape(this).IsCons()) return this;
1953  ConsString* cons = ConsString::cast(this);
1954  if (cons->second()->length() == 0) return cons->first();
1955  return SlowTryFlatten(pretenure);
1956}
1957
1958
1959String* String::TryFlattenGetString(PretenureFlag pretenure) {
1960  MaybeObject* flat = TryFlatten(pretenure);
1961  Object* successfully_flattened;
1962  if (flat->ToObject(&successfully_flattened)) {
1963    return String::cast(successfully_flattened);
1964  }
1965  return this;
1966}
1967
1968
1969uint16_t String::Get(int index) {
1970  ASSERT(index >= 0 && index < length());
1971  switch (StringShape(this).full_representation_tag()) {
1972    case kSeqStringTag | kAsciiStringTag:
1973      return SeqAsciiString::cast(this)->SeqAsciiStringGet(index);
1974    case kSeqStringTag | kTwoByteStringTag:
1975      return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
1976    case kConsStringTag | kAsciiStringTag:
1977    case kConsStringTag | kTwoByteStringTag:
1978      return ConsString::cast(this)->ConsStringGet(index);
1979    case kExternalStringTag | kAsciiStringTag:
1980      return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
1981    case kExternalStringTag | kTwoByteStringTag:
1982      return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
1983    default:
1984      break;
1985  }
1986
1987  UNREACHABLE();
1988  return 0;
1989}
1990
1991
1992void String::Set(int index, uint16_t value) {
1993  ASSERT(index >= 0 && index < length());
1994  ASSERT(StringShape(this).IsSequential());
1995
1996  return this->IsAsciiRepresentation()
1997      ? SeqAsciiString::cast(this)->SeqAsciiStringSet(index, value)
1998      : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
1999}
2000
2001
2002bool String::IsFlat() {
2003  switch (StringShape(this).representation_tag()) {
2004    case kConsStringTag: {
2005      String* second = ConsString::cast(this)->second();
2006      // Only flattened strings have second part empty.
2007      return second->length() == 0;
2008    }
2009    default:
2010      return true;
2011  }
2012}
2013
2014
2015uint16_t SeqAsciiString::SeqAsciiStringGet(int index) {
2016  ASSERT(index >= 0 && index < length());
2017  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2018}
2019
2020
2021void SeqAsciiString::SeqAsciiStringSet(int index, uint16_t value) {
2022  ASSERT(index >= 0 && index < length() && value <= kMaxAsciiCharCode);
2023  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
2024                   static_cast<byte>(value));
2025}
2026
2027
2028Address SeqAsciiString::GetCharsAddress() {
2029  return FIELD_ADDR(this, kHeaderSize);
2030}
2031
2032
2033char* SeqAsciiString::GetChars() {
2034  return reinterpret_cast<char*>(GetCharsAddress());
2035}
2036
2037
2038Address SeqTwoByteString::GetCharsAddress() {
2039  return FIELD_ADDR(this, kHeaderSize);
2040}
2041
2042
2043uc16* SeqTwoByteString::GetChars() {
2044  return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
2045}
2046
2047
2048uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
2049  ASSERT(index >= 0 && index < length());
2050  return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
2051}
2052
2053
2054void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
2055  ASSERT(index >= 0 && index < length());
2056  WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
2057}
2058
2059
2060int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
2061  return SizeFor(length());
2062}
2063
2064
2065int SeqAsciiString::SeqAsciiStringSize(InstanceType instance_type) {
2066  return SizeFor(length());
2067}
2068
2069
2070String* ConsString::first() {
2071  return String::cast(READ_FIELD(this, kFirstOffset));
2072}
2073
2074
2075Object* ConsString::unchecked_first() {
2076  return READ_FIELD(this, kFirstOffset);
2077}
2078
2079
2080void ConsString::set_first(String* value, WriteBarrierMode mode) {
2081  WRITE_FIELD(this, kFirstOffset, value);
2082  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, mode);
2083}
2084
2085
2086String* ConsString::second() {
2087  return String::cast(READ_FIELD(this, kSecondOffset));
2088}
2089
2090
2091Object* ConsString::unchecked_second() {
2092  return READ_FIELD(this, kSecondOffset);
2093}
2094
2095
2096void ConsString::set_second(String* value, WriteBarrierMode mode) {
2097  WRITE_FIELD(this, kSecondOffset, value);
2098  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, mode);
2099}
2100
2101
2102ExternalAsciiString::Resource* ExternalAsciiString::resource() {
2103  return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2104}
2105
2106
2107void ExternalAsciiString::set_resource(
2108    ExternalAsciiString::Resource* resource) {
2109  *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)) = resource;
2110}
2111
2112
2113ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
2114  return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2115}
2116
2117
2118void ExternalTwoByteString::set_resource(
2119    ExternalTwoByteString::Resource* resource) {
2120  *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)) = resource;
2121}
2122
2123
2124void JSFunctionResultCache::MakeZeroSize() {
2125  set_finger_index(kEntriesIndex);
2126  set_size(kEntriesIndex);
2127}
2128
2129
2130void JSFunctionResultCache::Clear() {
2131  int cache_size = size();
2132  Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
2133  MemsetPointer(entries_start,
2134                GetHeap()->the_hole_value(),
2135                cache_size - kEntriesIndex);
2136  MakeZeroSize();
2137}
2138
2139
2140int JSFunctionResultCache::size() {
2141  return Smi::cast(get(kCacheSizeIndex))->value();
2142}
2143
2144
2145void JSFunctionResultCache::set_size(int size) {
2146  set(kCacheSizeIndex, Smi::FromInt(size));
2147}
2148
2149
2150int JSFunctionResultCache::finger_index() {
2151  return Smi::cast(get(kFingerIndex))->value();
2152}
2153
2154
2155void JSFunctionResultCache::set_finger_index(int finger_index) {
2156  set(kFingerIndex, Smi::FromInt(finger_index));
2157}
2158
2159
2160byte ByteArray::get(int index) {
2161  ASSERT(index >= 0 && index < this->length());
2162  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2163}
2164
2165
2166void ByteArray::set(int index, byte value) {
2167  ASSERT(index >= 0 && index < this->length());
2168  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
2169}
2170
2171
2172int ByteArray::get_int(int index) {
2173  ASSERT(index >= 0 && (index * kIntSize) < this->length());
2174  return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
2175}
2176
2177
2178ByteArray* ByteArray::FromDataStartAddress(Address address) {
2179  ASSERT_TAG_ALIGNED(address);
2180  return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
2181}
2182
2183
2184Address ByteArray::GetDataStartAddress() {
2185  return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
2186}
2187
2188
2189uint8_t* ExternalPixelArray::external_pixel_pointer() {
2190  return reinterpret_cast<uint8_t*>(external_pointer());
2191}
2192
2193
2194uint8_t ExternalPixelArray::get(int index) {
2195  ASSERT((index >= 0) && (index < this->length()));
2196  uint8_t* ptr = external_pixel_pointer();
2197  return ptr[index];
2198}
2199
2200
2201void ExternalPixelArray::set(int index, uint8_t value) {
2202  ASSERT((index >= 0) && (index < this->length()));
2203  uint8_t* ptr = external_pixel_pointer();
2204  ptr[index] = value;
2205}
2206
2207
2208void* ExternalArray::external_pointer() {
2209  intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
2210  return reinterpret_cast<void*>(ptr);
2211}
2212
2213
2214void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
2215  intptr_t ptr = reinterpret_cast<intptr_t>(value);
2216  WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
2217}
2218
2219
2220int8_t ExternalByteArray::get(int index) {
2221  ASSERT((index >= 0) && (index < this->length()));
2222  int8_t* ptr = static_cast<int8_t*>(external_pointer());
2223  return ptr[index];
2224}
2225
2226
2227void ExternalByteArray::set(int index, int8_t value) {
2228  ASSERT((index >= 0) && (index < this->length()));
2229  int8_t* ptr = static_cast<int8_t*>(external_pointer());
2230  ptr[index] = value;
2231}
2232
2233
2234uint8_t ExternalUnsignedByteArray::get(int index) {
2235  ASSERT((index >= 0) && (index < this->length()));
2236  uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2237  return ptr[index];
2238}
2239
2240
2241void ExternalUnsignedByteArray::set(int index, uint8_t value) {
2242  ASSERT((index >= 0) && (index < this->length()));
2243  uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2244  ptr[index] = value;
2245}
2246
2247
2248int16_t ExternalShortArray::get(int index) {
2249  ASSERT((index >= 0) && (index < this->length()));
2250  int16_t* ptr = static_cast<int16_t*>(external_pointer());
2251  return ptr[index];
2252}
2253
2254
2255void ExternalShortArray::set(int index, int16_t value) {
2256  ASSERT((index >= 0) && (index < this->length()));
2257  int16_t* ptr = static_cast<int16_t*>(external_pointer());
2258  ptr[index] = value;
2259}
2260
2261
2262uint16_t ExternalUnsignedShortArray::get(int index) {
2263  ASSERT((index >= 0) && (index < this->length()));
2264  uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2265  return ptr[index];
2266}
2267
2268
2269void ExternalUnsignedShortArray::set(int index, uint16_t value) {
2270  ASSERT((index >= 0) && (index < this->length()));
2271  uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2272  ptr[index] = value;
2273}
2274
2275
2276int32_t ExternalIntArray::get(int index) {
2277  ASSERT((index >= 0) && (index < this->length()));
2278  int32_t* ptr = static_cast<int32_t*>(external_pointer());
2279  return ptr[index];
2280}
2281
2282
2283void ExternalIntArray::set(int index, int32_t value) {
2284  ASSERT((index >= 0) && (index < this->length()));
2285  int32_t* ptr = static_cast<int32_t*>(external_pointer());
2286  ptr[index] = value;
2287}
2288
2289
2290uint32_t ExternalUnsignedIntArray::get(int index) {
2291  ASSERT((index >= 0) && (index < this->length()));
2292  uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2293  return ptr[index];
2294}
2295
2296
2297void ExternalUnsignedIntArray::set(int index, uint32_t value) {
2298  ASSERT((index >= 0) && (index < this->length()));
2299  uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2300  ptr[index] = value;
2301}
2302
2303
2304float ExternalFloatArray::get(int index) {
2305  ASSERT((index >= 0) && (index < this->length()));
2306  float* ptr = static_cast<float*>(external_pointer());
2307  return ptr[index];
2308}
2309
2310
2311void ExternalFloatArray::set(int index, float value) {
2312  ASSERT((index >= 0) && (index < this->length()));
2313  float* ptr = static_cast<float*>(external_pointer());
2314  ptr[index] = value;
2315}
2316
2317
2318int Map::visitor_id() {
2319  return READ_BYTE_FIELD(this, kVisitorIdOffset);
2320}
2321
2322
2323void Map::set_visitor_id(int id) {
2324  ASSERT(0 <= id && id < 256);
2325  WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
2326}
2327
2328
2329int Map::instance_size() {
2330  return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
2331}
2332
2333
2334int Map::inobject_properties() {
2335  return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
2336}
2337
2338
2339int Map::pre_allocated_property_fields() {
2340  return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
2341}
2342
2343
2344int HeapObject::SizeFromMap(Map* map) {
2345  int instance_size = map->instance_size();
2346  if (instance_size != kVariableSizeSentinel) return instance_size;
2347  // We can ignore the "symbol" bit becase it is only set for symbols
2348  // and implies a string type.
2349  int instance_type = static_cast<int>(map->instance_type()) & ~kIsSymbolMask;
2350  // Only inline the most frequent cases.
2351  if (instance_type == FIXED_ARRAY_TYPE) {
2352    return FixedArray::BodyDescriptor::SizeOf(map, this);
2353  }
2354  if (instance_type == ASCII_STRING_TYPE) {
2355    return SeqAsciiString::SizeFor(
2356        reinterpret_cast<SeqAsciiString*>(this)->length());
2357  }
2358  if (instance_type == BYTE_ARRAY_TYPE) {
2359    return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
2360  }
2361  if (instance_type == STRING_TYPE) {
2362    return SeqTwoByteString::SizeFor(
2363        reinterpret_cast<SeqTwoByteString*>(this)->length());
2364  }
2365  ASSERT(instance_type == CODE_TYPE);
2366  return reinterpret_cast<Code*>(this)->CodeSize();
2367}
2368
2369
2370void Map::set_instance_size(int value) {
2371  ASSERT_EQ(0, value & (kPointerSize - 1));
2372  value >>= kPointerSizeLog2;
2373  ASSERT(0 <= value && value < 256);
2374  WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
2375}
2376
2377
2378void Map::set_inobject_properties(int value) {
2379  ASSERT(0 <= value && value < 256);
2380  WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
2381}
2382
2383
2384void Map::set_pre_allocated_property_fields(int value) {
2385  ASSERT(0 <= value && value < 256);
2386  WRITE_BYTE_FIELD(this,
2387                   kPreAllocatedPropertyFieldsOffset,
2388                   static_cast<byte>(value));
2389}
2390
2391
2392InstanceType Map::instance_type() {
2393  return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
2394}
2395
2396
2397void Map::set_instance_type(InstanceType value) {
2398  WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
2399}
2400
2401
2402int Map::unused_property_fields() {
2403  return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
2404}
2405
2406
2407void Map::set_unused_property_fields(int value) {
2408  WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
2409}
2410
2411
2412byte Map::bit_field() {
2413  return READ_BYTE_FIELD(this, kBitFieldOffset);
2414}
2415
2416
2417void Map::set_bit_field(byte value) {
2418  WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
2419}
2420
2421
2422byte Map::bit_field2() {
2423  return READ_BYTE_FIELD(this, kBitField2Offset);
2424}
2425
2426
2427void Map::set_bit_field2(byte value) {
2428  WRITE_BYTE_FIELD(this, kBitField2Offset, value);
2429}
2430
2431
2432void Map::set_non_instance_prototype(bool value) {
2433  if (value) {
2434    set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
2435  } else {
2436    set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
2437  }
2438}
2439
2440
2441bool Map::has_non_instance_prototype() {
2442  return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
2443}
2444
2445
2446void Map::set_function_with_prototype(bool value) {
2447  if (value) {
2448    set_bit_field2(bit_field2() | (1 << kFunctionWithPrototype));
2449  } else {
2450    set_bit_field2(bit_field2() & ~(1 << kFunctionWithPrototype));
2451  }
2452}
2453
2454
2455bool Map::function_with_prototype() {
2456  return ((1 << kFunctionWithPrototype) & bit_field2()) != 0;
2457}
2458
2459
2460void Map::set_is_access_check_needed(bool access_check_needed) {
2461  if (access_check_needed) {
2462    set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
2463  } else {
2464    set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
2465  }
2466}
2467
2468
2469bool Map::is_access_check_needed() {
2470  return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
2471}
2472
2473
2474void Map::set_is_extensible(bool value) {
2475  if (value) {
2476    set_bit_field2(bit_field2() | (1 << kIsExtensible));
2477  } else {
2478    set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
2479  }
2480}
2481
2482bool Map::is_extensible() {
2483  return ((1 << kIsExtensible) & bit_field2()) != 0;
2484}
2485
2486
2487void Map::set_attached_to_shared_function_info(bool value) {
2488  if (value) {
2489    set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
2490  } else {
2491    set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
2492  }
2493}
2494
2495bool Map::attached_to_shared_function_info() {
2496  return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
2497}
2498
2499
2500void Map::set_is_shared(bool value) {
2501  if (value) {
2502    set_bit_field2(bit_field2() | (1 << kIsShared));
2503  } else {
2504    set_bit_field2(bit_field2() & ~(1 << kIsShared));
2505  }
2506}
2507
2508bool Map::is_shared() {
2509  return ((1 << kIsShared) & bit_field2()) != 0;
2510}
2511
2512
2513JSFunction* Map::unchecked_constructor() {
2514  return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset));
2515}
2516
2517
2518Code::Flags Code::flags() {
2519  return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
2520}
2521
2522
2523void Code::set_flags(Code::Flags flags) {
2524  STATIC_ASSERT(Code::NUMBER_OF_KINDS <= (kFlagsKindMask >> kFlagsKindShift)+1);
2525  // Make sure that all call stubs have an arguments count.
2526  ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
2527          ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
2528         ExtractArgumentsCountFromFlags(flags) >= 0);
2529  WRITE_INT_FIELD(this, kFlagsOffset, flags);
2530}
2531
2532
2533Code::Kind Code::kind() {
2534  return ExtractKindFromFlags(flags());
2535}
2536
2537
2538InLoopFlag Code::ic_in_loop() {
2539  return ExtractICInLoopFromFlags(flags());
2540}
2541
2542
2543InlineCacheState Code::ic_state() {
2544  InlineCacheState result = ExtractICStateFromFlags(flags());
2545  // Only allow uninitialized or debugger states for non-IC code
2546  // objects. This is used in the debugger to determine whether or not
2547  // a call to code object has been replaced with a debug break call.
2548  ASSERT(is_inline_cache_stub() ||
2549         result == UNINITIALIZED ||
2550         result == DEBUG_BREAK ||
2551         result == DEBUG_PREPARE_STEP_IN);
2552  return result;
2553}
2554
2555
2556Code::ExtraICState Code::extra_ic_state() {
2557  ASSERT(is_inline_cache_stub());
2558  return ExtractExtraICStateFromFlags(flags());
2559}
2560
2561
2562PropertyType Code::type() {
2563  ASSERT(ic_state() == MONOMORPHIC);
2564  return ExtractTypeFromFlags(flags());
2565}
2566
2567
2568int Code::arguments_count() {
2569  ASSERT(is_call_stub() || is_keyed_call_stub() || kind() == STUB);
2570  return ExtractArgumentsCountFromFlags(flags());
2571}
2572
2573
2574int Code::major_key() {
2575  ASSERT(kind() == STUB ||
2576         kind() == BINARY_OP_IC ||
2577         kind() == TYPE_RECORDING_BINARY_OP_IC ||
2578         kind() == COMPARE_IC);
2579  return READ_BYTE_FIELD(this, kStubMajorKeyOffset);
2580}
2581
2582
2583void Code::set_major_key(int major) {
2584  ASSERT(kind() == STUB ||
2585         kind() == BINARY_OP_IC ||
2586         kind() == TYPE_RECORDING_BINARY_OP_IC ||
2587         kind() == COMPARE_IC);
2588  ASSERT(0 <= major && major < 256);
2589  WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major);
2590}
2591
2592
2593bool Code::optimizable() {
2594  ASSERT(kind() == FUNCTION);
2595  return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
2596}
2597
2598
2599void Code::set_optimizable(bool value) {
2600  ASSERT(kind() == FUNCTION);
2601  WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
2602}
2603
2604
2605bool Code::has_deoptimization_support() {
2606  ASSERT(kind() == FUNCTION);
2607  return READ_BYTE_FIELD(this, kHasDeoptimizationSupportOffset) == 1;
2608}
2609
2610
2611void Code::set_has_deoptimization_support(bool value) {
2612  ASSERT(kind() == FUNCTION);
2613  WRITE_BYTE_FIELD(this, kHasDeoptimizationSupportOffset, value ? 1 : 0);
2614}
2615
2616
2617int Code::allow_osr_at_loop_nesting_level() {
2618  ASSERT(kind() == FUNCTION);
2619  return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
2620}
2621
2622
2623void Code::set_allow_osr_at_loop_nesting_level(int level) {
2624  ASSERT(kind() == FUNCTION);
2625  ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
2626  WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
2627}
2628
2629
2630unsigned Code::stack_slots() {
2631  ASSERT(kind() == OPTIMIZED_FUNCTION);
2632  return READ_UINT32_FIELD(this, kStackSlotsOffset);
2633}
2634
2635
2636void Code::set_stack_slots(unsigned slots) {
2637  ASSERT(kind() == OPTIMIZED_FUNCTION);
2638  WRITE_UINT32_FIELD(this, kStackSlotsOffset, slots);
2639}
2640
2641
2642unsigned Code::safepoint_table_offset() {
2643  ASSERT(kind() == OPTIMIZED_FUNCTION);
2644  return READ_UINT32_FIELD(this, kSafepointTableOffsetOffset);
2645}
2646
2647
2648void Code::set_safepoint_table_offset(unsigned offset) {
2649  ASSERT(kind() == OPTIMIZED_FUNCTION);
2650  ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
2651  WRITE_UINT32_FIELD(this, kSafepointTableOffsetOffset, offset);
2652}
2653
2654
2655unsigned Code::stack_check_table_offset() {
2656  ASSERT(kind() == FUNCTION);
2657  return READ_UINT32_FIELD(this, kStackCheckTableOffsetOffset);
2658}
2659
2660
2661void Code::set_stack_check_table_offset(unsigned offset) {
2662  ASSERT(kind() == FUNCTION);
2663  ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
2664  WRITE_UINT32_FIELD(this, kStackCheckTableOffsetOffset, offset);
2665}
2666
2667
2668CheckType Code::check_type() {
2669  ASSERT(is_call_stub() || is_keyed_call_stub());
2670  byte type = READ_BYTE_FIELD(this, kCheckTypeOffset);
2671  return static_cast<CheckType>(type);
2672}
2673
2674
2675void Code::set_check_type(CheckType value) {
2676  ASSERT(is_call_stub() || is_keyed_call_stub());
2677  WRITE_BYTE_FIELD(this, kCheckTypeOffset, value);
2678}
2679
2680
2681ExternalArrayType Code::external_array_type() {
2682  ASSERT(is_external_array_load_stub() || is_external_array_store_stub());
2683  byte type = READ_BYTE_FIELD(this, kExternalArrayTypeOffset);
2684  return static_cast<ExternalArrayType>(type);
2685}
2686
2687
2688void Code::set_external_array_type(ExternalArrayType value) {
2689  ASSERT(is_external_array_load_stub() || is_external_array_store_stub());
2690  WRITE_BYTE_FIELD(this, kExternalArrayTypeOffset, value);
2691}
2692
2693
2694byte Code::binary_op_type() {
2695  ASSERT(is_binary_op_stub());
2696  return READ_BYTE_FIELD(this, kBinaryOpTypeOffset);
2697}
2698
2699
2700void Code::set_binary_op_type(byte value) {
2701  ASSERT(is_binary_op_stub());
2702  WRITE_BYTE_FIELD(this, kBinaryOpTypeOffset, value);
2703}
2704
2705
2706byte Code::type_recording_binary_op_type() {
2707  ASSERT(is_type_recording_binary_op_stub());
2708  return READ_BYTE_FIELD(this, kBinaryOpTypeOffset);
2709}
2710
2711
2712void Code::set_type_recording_binary_op_type(byte value) {
2713  ASSERT(is_type_recording_binary_op_stub());
2714  WRITE_BYTE_FIELD(this, kBinaryOpTypeOffset, value);
2715}
2716
2717
2718byte Code::type_recording_binary_op_result_type() {
2719  ASSERT(is_type_recording_binary_op_stub());
2720  return READ_BYTE_FIELD(this, kBinaryOpReturnTypeOffset);
2721}
2722
2723
2724void Code::set_type_recording_binary_op_result_type(byte value) {
2725  ASSERT(is_type_recording_binary_op_stub());
2726  WRITE_BYTE_FIELD(this, kBinaryOpReturnTypeOffset, value);
2727}
2728
2729
2730byte Code::compare_state() {
2731  ASSERT(is_compare_ic_stub());
2732  return READ_BYTE_FIELD(this, kCompareStateOffset);
2733}
2734
2735
2736void Code::set_compare_state(byte value) {
2737  ASSERT(is_compare_ic_stub());
2738  WRITE_BYTE_FIELD(this, kCompareStateOffset, value);
2739}
2740
2741
2742bool Code::is_inline_cache_stub() {
2743  Kind kind = this->kind();
2744  return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
2745}
2746
2747
2748Code::Flags Code::ComputeFlags(Kind kind,
2749                               InLoopFlag in_loop,
2750                               InlineCacheState ic_state,
2751                               ExtraICState extra_ic_state,
2752                               PropertyType type,
2753                               int argc,
2754                               InlineCacheHolderFlag holder) {
2755  // Extra IC state is only allowed for monomorphic call IC stubs
2756  // or for store IC stubs.
2757  ASSERT(extra_ic_state == kNoExtraICState ||
2758         (kind == CALL_IC && (ic_state == MONOMORPHIC ||
2759                              ic_state == MONOMORPHIC_PROTOTYPE_FAILURE)) ||
2760         (kind == STORE_IC) ||
2761         (kind == KEYED_STORE_IC));
2762  // Compute the bit mask.
2763  int bits = kind << kFlagsKindShift;
2764  if (in_loop) bits |= kFlagsICInLoopMask;
2765  bits |= ic_state << kFlagsICStateShift;
2766  bits |= type << kFlagsTypeShift;
2767  bits |= extra_ic_state << kFlagsExtraICStateShift;
2768  bits |= argc << kFlagsArgumentsCountShift;
2769  if (holder == PROTOTYPE_MAP) bits |= kFlagsCacheInPrototypeMapMask;
2770  // Cast to flags and validate result before returning it.
2771  Flags result = static_cast<Flags>(bits);
2772  ASSERT(ExtractKindFromFlags(result) == kind);
2773  ASSERT(ExtractICStateFromFlags(result) == ic_state);
2774  ASSERT(ExtractICInLoopFromFlags(result) == in_loop);
2775  ASSERT(ExtractTypeFromFlags(result) == type);
2776  ASSERT(ExtractExtraICStateFromFlags(result) == extra_ic_state);
2777  ASSERT(ExtractArgumentsCountFromFlags(result) == argc);
2778  return result;
2779}
2780
2781
2782Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
2783                                          PropertyType type,
2784                                          ExtraICState extra_ic_state,
2785                                          InlineCacheHolderFlag holder,
2786                                          InLoopFlag in_loop,
2787                                          int argc) {
2788  return ComputeFlags(
2789      kind, in_loop, MONOMORPHIC, extra_ic_state, type, argc, holder);
2790}
2791
2792
2793Code::Kind Code::ExtractKindFromFlags(Flags flags) {
2794  int bits = (flags & kFlagsKindMask) >> kFlagsKindShift;
2795  return static_cast<Kind>(bits);
2796}
2797
2798
2799InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
2800  int bits = (flags & kFlagsICStateMask) >> kFlagsICStateShift;
2801  return static_cast<InlineCacheState>(bits);
2802}
2803
2804
2805Code::ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
2806  int bits = (flags & kFlagsExtraICStateMask) >> kFlagsExtraICStateShift;
2807  return static_cast<ExtraICState>(bits);
2808}
2809
2810
2811InLoopFlag Code::ExtractICInLoopFromFlags(Flags flags) {
2812  int bits = (flags & kFlagsICInLoopMask);
2813  return bits != 0 ? IN_LOOP : NOT_IN_LOOP;
2814}
2815
2816
2817PropertyType Code::ExtractTypeFromFlags(Flags flags) {
2818  int bits = (flags & kFlagsTypeMask) >> kFlagsTypeShift;
2819  return static_cast<PropertyType>(bits);
2820}
2821
2822
2823int Code::ExtractArgumentsCountFromFlags(Flags flags) {
2824  return (flags & kFlagsArgumentsCountMask) >> kFlagsArgumentsCountShift;
2825}
2826
2827
2828InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
2829  int bits = (flags & kFlagsCacheInPrototypeMapMask);
2830  return bits != 0 ? PROTOTYPE_MAP : OWN_MAP;
2831}
2832
2833
2834Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
2835  int bits = flags & ~kFlagsTypeMask;
2836  return static_cast<Flags>(bits);
2837}
2838
2839
2840Code* Code::GetCodeFromTargetAddress(Address address) {
2841  HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
2842  // GetCodeFromTargetAddress might be called when marking objects during mark
2843  // sweep. reinterpret_cast is therefore used instead of the more appropriate
2844  // Code::cast. Code::cast does not work when the object's map is
2845  // marked.
2846  Code* result = reinterpret_cast<Code*>(code);
2847  return result;
2848}
2849
2850
2851Isolate* Map::isolate() {
2852  return heap()->isolate();
2853}
2854
2855
2856Heap* Map::heap() {
2857  // NOTE: address() helper is not used to save one instruction.
2858  Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap_;
2859  ASSERT(heap != NULL);
2860  ASSERT(heap->isolate() == Isolate::Current());
2861  return heap;
2862}
2863
2864
2865Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
2866  return HeapObject::
2867      FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
2868}
2869
2870
2871Object* Map::prototype() {
2872  return READ_FIELD(this, kPrototypeOffset);
2873}
2874
2875
2876void Map::set_prototype(Object* value, WriteBarrierMode mode) {
2877  ASSERT(value->IsNull() || value->IsJSObject());
2878  WRITE_FIELD(this, kPrototypeOffset, value);
2879  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, mode);
2880}
2881
2882
2883MaybeObject* Map::GetFastElementsMap() {
2884  if (has_fast_elements()) return this;
2885  Object* obj;
2886  { MaybeObject* maybe_obj = CopyDropTransitions();
2887    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2888  }
2889  Map* new_map = Map::cast(obj);
2890  new_map->set_has_fast_elements(true);
2891  isolate()->counters()->map_slow_to_fast_elements()->Increment();
2892  return new_map;
2893}
2894
2895
2896MaybeObject* Map::GetSlowElementsMap() {
2897  if (!has_fast_elements()) return this;
2898  Object* obj;
2899  { MaybeObject* maybe_obj = CopyDropTransitions();
2900    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2901  }
2902  Map* new_map = Map::cast(obj);
2903  new_map->set_has_fast_elements(false);
2904  isolate()->counters()->map_fast_to_slow_elements()->Increment();
2905  return new_map;
2906}
2907
2908
2909ACCESSORS(Map, instance_descriptors, DescriptorArray,
2910          kInstanceDescriptorsOffset)
2911ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
2912ACCESSORS(Map, constructor, Object, kConstructorOffset)
2913
2914ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
2915ACCESSORS(JSFunction, literals, FixedArray, kLiteralsOffset)
2916ACCESSORS_GCSAFE(JSFunction, next_function_link, Object,
2917                 kNextFunctionLinkOffset)
2918
2919ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
2920ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
2921ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
2922
2923ACCESSORS(JSGlobalProxy, context, Object, kContextOffset)
2924
2925ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
2926ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
2927ACCESSORS(AccessorInfo, data, Object, kDataOffset)
2928ACCESSORS(AccessorInfo, name, Object, kNameOffset)
2929ACCESSORS(AccessorInfo, flag, Smi, kFlagOffset)
2930
2931ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
2932ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
2933ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
2934
2935ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
2936ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
2937ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
2938ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
2939ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
2940ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
2941
2942ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
2943ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
2944
2945ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
2946ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
2947
2948ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
2949ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
2950ACCESSORS(FunctionTemplateInfo, property_accessors, Object,
2951          kPropertyAccessorsOffset)
2952ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
2953          kPrototypeTemplateOffset)
2954ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
2955ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
2956          kNamedPropertyHandlerOffset)
2957ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
2958          kIndexedPropertyHandlerOffset)
2959ACCESSORS(FunctionTemplateInfo, instance_template, Object,
2960          kInstanceTemplateOffset)
2961ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
2962ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
2963ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
2964          kInstanceCallHandlerOffset)
2965ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
2966          kAccessCheckInfoOffset)
2967ACCESSORS(FunctionTemplateInfo, flag, Smi, kFlagOffset)
2968
2969ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
2970ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
2971          kInternalFieldCountOffset)
2972
2973ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
2974ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
2975
2976ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
2977
2978ACCESSORS(Script, source, Object, kSourceOffset)
2979ACCESSORS(Script, name, Object, kNameOffset)
2980ACCESSORS(Script, id, Object, kIdOffset)
2981ACCESSORS(Script, line_offset, Smi, kLineOffsetOffset)
2982ACCESSORS(Script, column_offset, Smi, kColumnOffsetOffset)
2983ACCESSORS(Script, data, Object, kDataOffset)
2984ACCESSORS(Script, context_data, Object, kContextOffset)
2985ACCESSORS(Script, wrapper, Proxy, kWrapperOffset)
2986ACCESSORS(Script, type, Smi, kTypeOffset)
2987ACCESSORS(Script, compilation_type, Smi, kCompilationTypeOffset)
2988ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
2989ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
2990ACCESSORS(Script, eval_from_instructions_offset, Smi,
2991          kEvalFrominstructionsOffsetOffset)
2992
2993#ifdef ENABLE_DEBUGGER_SUPPORT
2994ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
2995ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
2996ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
2997ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
2998
2999ACCESSORS(BreakPointInfo, code_position, Smi, kCodePositionIndex)
3000ACCESSORS(BreakPointInfo, source_position, Smi, kSourcePositionIndex)
3001ACCESSORS(BreakPointInfo, statement_position, Smi, kStatementPositionIndex)
3002ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
3003#endif
3004
3005ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
3006ACCESSORS_GCSAFE(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
3007ACCESSORS_GCSAFE(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
3008ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
3009          kInstanceClassNameOffset)
3010ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
3011ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
3012ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
3013ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
3014ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
3015          kThisPropertyAssignmentsOffset)
3016
3017BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
3018               kHiddenPrototypeBit)
3019BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
3020BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
3021               kNeedsAccessCheckBit)
3022BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
3023               kIsExpressionBit)
3024BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
3025               kIsTopLevelBit)
3026BOOL_GETTER(SharedFunctionInfo, compiler_hints,
3027            has_only_simple_this_property_assignments,
3028            kHasOnlySimpleThisPropertyAssignments)
3029BOOL_ACCESSORS(SharedFunctionInfo,
3030               compiler_hints,
3031               try_full_codegen,
3032               kTryFullCodegen)
3033BOOL_ACCESSORS(SharedFunctionInfo,
3034               compiler_hints,
3035               allows_lazy_compilation,
3036               kAllowLazyCompilation)
3037
3038
3039#if V8_HOST_ARCH_32_BIT
3040SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
3041SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
3042              kFormalParameterCountOffset)
3043SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
3044              kExpectedNofPropertiesOffset)
3045SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3046SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
3047              kStartPositionAndTypeOffset)
3048SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
3049SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
3050              kFunctionTokenPositionOffset)
3051SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
3052              kCompilerHintsOffset)
3053SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
3054              kThisPropertyAssignmentsCountOffset)
3055SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
3056#else
3057
3058#define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset)             \
3059  STATIC_ASSERT(holder::offset % kPointerSize == 0);              \
3060  int holder::name() {                                            \
3061    int value = READ_INT_FIELD(this, offset);                     \
3062    ASSERT(kHeapObjectTag == 1);                                  \
3063    ASSERT((value & kHeapObjectTag) == 0);                        \
3064    return value >> 1;                                            \
3065  }                                                               \
3066  void holder::set_##name(int value) {                            \
3067    ASSERT(kHeapObjectTag == 1);                                  \
3068    ASSERT((value & 0xC0000000) == 0xC0000000 ||                  \
3069           (value & 0xC0000000) == 0x000000000);                  \
3070    WRITE_INT_FIELD(this,                                         \
3071                    offset,                                       \
3072                    (value << 1) & ~kHeapObjectTag);              \
3073  }
3074
3075#define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset)             \
3076  STATIC_ASSERT(holder::offset % kPointerSize == kIntSize);       \
3077  INT_ACCESSORS(holder, name, offset)
3078
3079
3080PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
3081PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3082                        formal_parameter_count,
3083                        kFormalParameterCountOffset)
3084
3085PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3086                        expected_nof_properties,
3087                        kExpectedNofPropertiesOffset)
3088PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3089
3090PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
3091PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3092                        start_position_and_type,
3093                        kStartPositionAndTypeOffset)
3094
3095PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3096                        function_token_position,
3097                        kFunctionTokenPositionOffset)
3098PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3099                        compiler_hints,
3100                        kCompilerHintsOffset)
3101
3102PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3103                        this_property_assignments_count,
3104                        kThisPropertyAssignmentsCountOffset)
3105PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
3106#endif
3107
3108
3109int SharedFunctionInfo::construction_count() {
3110  return READ_BYTE_FIELD(this, kConstructionCountOffset);
3111}
3112
3113
3114void SharedFunctionInfo::set_construction_count(int value) {
3115  ASSERT(0 <= value && value < 256);
3116  WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
3117}
3118
3119
3120bool SharedFunctionInfo::live_objects_may_exist() {
3121  return (compiler_hints() & (1 << kLiveObjectsMayExist)) != 0;
3122}
3123
3124
3125void SharedFunctionInfo::set_live_objects_may_exist(bool value) {
3126  if (value) {
3127    set_compiler_hints(compiler_hints() | (1 << kLiveObjectsMayExist));
3128  } else {
3129    set_compiler_hints(compiler_hints() & ~(1 << kLiveObjectsMayExist));
3130  }
3131}
3132
3133
3134bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
3135  return initial_map() != HEAP->undefined_value();
3136}
3137
3138
3139bool SharedFunctionInfo::optimization_disabled() {
3140  return BooleanBit::get(compiler_hints(), kOptimizationDisabled);
3141}
3142
3143
3144void SharedFunctionInfo::set_optimization_disabled(bool disable) {
3145  set_compiler_hints(BooleanBit::set(compiler_hints(),
3146                                     kOptimizationDisabled,
3147                                     disable));
3148  // If disabling optimizations we reflect that in the code object so
3149  // it will not be counted as optimizable code.
3150  if ((code()->kind() == Code::FUNCTION) && disable) {
3151    code()->set_optimizable(false);
3152  }
3153}
3154
3155
3156bool SharedFunctionInfo::strict_mode() {
3157  return BooleanBit::get(compiler_hints(), kStrictModeFunction);
3158}
3159
3160
3161void SharedFunctionInfo::set_strict_mode(bool value) {
3162  set_compiler_hints(BooleanBit::set(compiler_hints(),
3163                                     kStrictModeFunction,
3164                                     value));
3165}
3166
3167
3168ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
3169ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
3170
3171bool Script::HasValidSource() {
3172  Object* src = this->source();
3173  if (!src->IsString()) return true;
3174  String* src_str = String::cast(src);
3175  if (!StringShape(src_str).IsExternal()) return true;
3176  if (src_str->IsAsciiRepresentation()) {
3177    return ExternalAsciiString::cast(src)->resource() != NULL;
3178  } else if (src_str->IsTwoByteRepresentation()) {
3179    return ExternalTwoByteString::cast(src)->resource() != NULL;
3180  }
3181  return true;
3182}
3183
3184
3185void SharedFunctionInfo::DontAdaptArguments() {
3186  ASSERT(code()->kind() == Code::BUILTIN);
3187  set_formal_parameter_count(kDontAdaptArgumentsSentinel);
3188}
3189
3190
3191int SharedFunctionInfo::start_position() {
3192  return start_position_and_type() >> kStartPositionShift;
3193}
3194
3195
3196void SharedFunctionInfo::set_start_position(int start_position) {
3197  set_start_position_and_type((start_position << kStartPositionShift)
3198    | (start_position_and_type() & ~kStartPositionMask));
3199}
3200
3201
3202Code* SharedFunctionInfo::code() {
3203  return Code::cast(READ_FIELD(this, kCodeOffset));
3204}
3205
3206
3207Code* SharedFunctionInfo::unchecked_code() {
3208  return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
3209}
3210
3211
3212void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
3213  WRITE_FIELD(this, kCodeOffset, value);
3214  ASSERT(!Isolate::Current()->heap()->InNewSpace(value));
3215}
3216
3217
3218SerializedScopeInfo* SharedFunctionInfo::scope_info() {
3219  return reinterpret_cast<SerializedScopeInfo*>(
3220      READ_FIELD(this, kScopeInfoOffset));
3221}
3222
3223
3224void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value,
3225                                        WriteBarrierMode mode) {
3226  WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
3227  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kScopeInfoOffset, mode);
3228}
3229
3230
3231Smi* SharedFunctionInfo::deopt_counter() {
3232  return reinterpret_cast<Smi*>(READ_FIELD(this, kDeoptCounterOffset));
3233}
3234
3235
3236void SharedFunctionInfo::set_deopt_counter(Smi* value) {
3237  WRITE_FIELD(this, kDeoptCounterOffset, value);
3238}
3239
3240
3241bool SharedFunctionInfo::is_compiled() {
3242  return code() !=
3243      Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);
3244}
3245
3246
3247bool SharedFunctionInfo::IsApiFunction() {
3248  return function_data()->IsFunctionTemplateInfo();
3249}
3250
3251
3252FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
3253  ASSERT(IsApiFunction());
3254  return FunctionTemplateInfo::cast(function_data());
3255}
3256
3257
3258bool SharedFunctionInfo::HasBuiltinFunctionId() {
3259  return function_data()->IsSmi();
3260}
3261
3262
3263BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
3264  ASSERT(HasBuiltinFunctionId());
3265  return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
3266}
3267
3268
3269int SharedFunctionInfo::code_age() {
3270  return (compiler_hints() >> kCodeAgeShift) & kCodeAgeMask;
3271}
3272
3273
3274void SharedFunctionInfo::set_code_age(int code_age) {
3275  set_compiler_hints(compiler_hints() |
3276                     ((code_age & kCodeAgeMask) << kCodeAgeShift));
3277}
3278
3279
3280bool SharedFunctionInfo::has_deoptimization_support() {
3281  Code* code = this->code();
3282  return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
3283}
3284
3285
3286bool JSFunction::IsBuiltin() {
3287  return context()->global()->IsJSBuiltinsObject();
3288}
3289
3290
3291bool JSFunction::NeedsArgumentsAdaption() {
3292  return shared()->formal_parameter_count() !=
3293      SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3294}
3295
3296
3297bool JSFunction::IsOptimized() {
3298  return code()->kind() == Code::OPTIMIZED_FUNCTION;
3299}
3300
3301
3302bool JSFunction::IsMarkedForLazyRecompilation() {
3303  return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
3304}
3305
3306
3307Code* JSFunction::code() {
3308  return Code::cast(unchecked_code());
3309}
3310
3311
3312Code* JSFunction::unchecked_code() {
3313  return reinterpret_cast<Code*>(
3314      Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
3315}
3316
3317
3318void JSFunction::set_code(Code* value) {
3319  // Skip the write barrier because code is never in new space.
3320  ASSERT(!HEAP->InNewSpace(value));
3321  Address entry = value->entry();
3322  WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
3323}
3324
3325
3326void JSFunction::ReplaceCode(Code* code) {
3327  bool was_optimized = IsOptimized();
3328  bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
3329
3330  set_code(code);
3331
3332  // Add/remove the function from the list of optimized functions for this
3333  // context based on the state change.
3334  if (!was_optimized && is_optimized) {
3335    context()->global_context()->AddOptimizedFunction(this);
3336  }
3337  if (was_optimized && !is_optimized) {
3338    context()->global_context()->RemoveOptimizedFunction(this);
3339  }
3340}
3341
3342
3343Context* JSFunction::context() {
3344  return Context::cast(READ_FIELD(this, kContextOffset));
3345}
3346
3347
3348Object* JSFunction::unchecked_context() {
3349  return READ_FIELD(this, kContextOffset);
3350}
3351
3352
3353SharedFunctionInfo* JSFunction::unchecked_shared() {
3354  return reinterpret_cast<SharedFunctionInfo*>(
3355      READ_FIELD(this, kSharedFunctionInfoOffset));
3356}
3357
3358
3359void JSFunction::set_context(Object* value) {
3360  ASSERT(value->IsUndefined() || value->IsContext());
3361  WRITE_FIELD(this, kContextOffset, value);
3362  WRITE_BARRIER(this, kContextOffset);
3363}
3364
3365ACCESSORS(JSFunction, prototype_or_initial_map, Object,
3366          kPrototypeOrInitialMapOffset)
3367
3368
3369Map* JSFunction::initial_map() {
3370  return Map::cast(prototype_or_initial_map());
3371}
3372
3373
3374void JSFunction::set_initial_map(Map* value) {
3375  set_prototype_or_initial_map(value);
3376}
3377
3378
3379bool JSFunction::has_initial_map() {
3380  return prototype_or_initial_map()->IsMap();
3381}
3382
3383
3384bool JSFunction::has_instance_prototype() {
3385  return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
3386}
3387
3388
3389bool JSFunction::has_prototype() {
3390  return map()->has_non_instance_prototype() || has_instance_prototype();
3391}
3392
3393
3394Object* JSFunction::instance_prototype() {
3395  ASSERT(has_instance_prototype());
3396  if (has_initial_map()) return initial_map()->prototype();
3397  // When there is no initial map and the prototype is a JSObject, the
3398  // initial map field is used for the prototype field.
3399  return prototype_or_initial_map();
3400}
3401
3402
3403Object* JSFunction::prototype() {
3404  ASSERT(has_prototype());
3405  // If the function's prototype property has been set to a non-JSObject
3406  // value, that value is stored in the constructor field of the map.
3407  if (map()->has_non_instance_prototype()) return map()->constructor();
3408  return instance_prototype();
3409}
3410
3411bool JSFunction::should_have_prototype() {
3412  return map()->function_with_prototype();
3413}
3414
3415
3416bool JSFunction::is_compiled() {
3417  return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
3418}
3419
3420
3421int JSFunction::NumberOfLiterals() {
3422  return literals()->length();
3423}
3424
3425
3426Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
3427  ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
3428  return READ_FIELD(this, OffsetOfFunctionWithId(id));
3429}
3430
3431
3432void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
3433                                              Object* value) {
3434  ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
3435  WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
3436  WRITE_BARRIER(this, OffsetOfFunctionWithId(id));
3437}
3438
3439
3440Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
3441  ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
3442  return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
3443}
3444
3445
3446void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
3447                                                   Code* value) {
3448  ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
3449  WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
3450  ASSERT(!HEAP->InNewSpace(value));
3451}
3452
3453
3454Address Proxy::proxy() {
3455  return AddressFrom<Address>(READ_INTPTR_FIELD(this, kProxyOffset));
3456}
3457
3458
3459void Proxy::set_proxy(Address value) {
3460  WRITE_INTPTR_FIELD(this, kProxyOffset, OffsetFrom(value));
3461}
3462
3463
3464ACCESSORS(JSValue, value, Object, kValueOffset)
3465
3466
3467JSValue* JSValue::cast(Object* obj) {
3468  ASSERT(obj->IsJSValue());
3469  ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
3470  return reinterpret_cast<JSValue*>(obj);
3471}
3472
3473
3474ACCESSORS(JSMessageObject, type, String, kTypeOffset)
3475ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
3476ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
3477ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
3478ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
3479SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
3480SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
3481
3482
3483JSMessageObject* JSMessageObject::cast(Object* obj) {
3484  ASSERT(obj->IsJSMessageObject());
3485  ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
3486  return reinterpret_cast<JSMessageObject*>(obj);
3487}
3488
3489
3490INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
3491ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
3492ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
3493
3494
3495byte* Code::instruction_start()  {
3496  return FIELD_ADDR(this, kHeaderSize);
3497}
3498
3499
3500byte* Code::instruction_end()  {
3501  return instruction_start() + instruction_size();
3502}
3503
3504
3505int Code::body_size() {
3506  return RoundUp(instruction_size(), kObjectAlignment);
3507}
3508
3509
3510FixedArray* Code::unchecked_deoptimization_data() {
3511  return reinterpret_cast<FixedArray*>(
3512      READ_FIELD(this, kDeoptimizationDataOffset));
3513}
3514
3515
3516ByteArray* Code::unchecked_relocation_info() {
3517  return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
3518}
3519
3520
3521byte* Code::relocation_start() {
3522  return unchecked_relocation_info()->GetDataStartAddress();
3523}
3524
3525
3526int Code::relocation_size() {
3527  return unchecked_relocation_info()->length();
3528}
3529
3530
3531byte* Code::entry() {
3532  return instruction_start();
3533}
3534
3535
3536bool Code::contains(byte* pc) {
3537  return (instruction_start() <= pc) &&
3538      (pc <= instruction_start() + instruction_size());
3539}
3540
3541
3542ACCESSORS(JSArray, length, Object, kLengthOffset)
3543
3544
3545ACCESSORS(JSRegExp, data, Object, kDataOffset)
3546
3547
3548JSRegExp::Type JSRegExp::TypeTag() {
3549  Object* data = this->data();
3550  if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
3551  Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
3552  return static_cast<JSRegExp::Type>(smi->value());
3553}
3554
3555
3556int JSRegExp::CaptureCount() {
3557  switch (TypeTag()) {
3558    case ATOM:
3559      return 0;
3560    case IRREGEXP:
3561      return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
3562    default:
3563      UNREACHABLE();
3564      return -1;
3565  }
3566}
3567
3568
3569JSRegExp::Flags JSRegExp::GetFlags() {
3570  ASSERT(this->data()->IsFixedArray());
3571  Object* data = this->data();
3572  Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
3573  return Flags(smi->value());
3574}
3575
3576
3577String* JSRegExp::Pattern() {
3578  ASSERT(this->data()->IsFixedArray());
3579  Object* data = this->data();
3580  String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
3581  return pattern;
3582}
3583
3584
3585Object* JSRegExp::DataAt(int index) {
3586  ASSERT(TypeTag() != NOT_COMPILED);
3587  return FixedArray::cast(data())->get(index);
3588}
3589
3590
3591void JSRegExp::SetDataAt(int index, Object* value) {
3592  ASSERT(TypeTag() != NOT_COMPILED);
3593  ASSERT(index >= kDataIndex);  // Only implementation data can be set this way.
3594  FixedArray::cast(data())->set(index, value);
3595}
3596
3597
3598JSObject::ElementsKind JSObject::GetElementsKind() {
3599  if (map()->has_fast_elements()) {
3600    ASSERT(elements()->map() == GetHeap()->fixed_array_map() ||
3601           elements()->map() == GetHeap()->fixed_cow_array_map());
3602    return FAST_ELEMENTS;
3603  }
3604  HeapObject* array = elements();
3605  if (array->IsFixedArray()) {
3606    // FAST_ELEMENTS or DICTIONARY_ELEMENTS are both stored in a
3607    // FixedArray, but FAST_ELEMENTS is already handled above.
3608    ASSERT(array->IsDictionary());
3609    return DICTIONARY_ELEMENTS;
3610  }
3611  ASSERT(!map()->has_fast_elements());
3612  if (array->IsExternalArray()) {
3613    switch (array->map()->instance_type()) {
3614      case EXTERNAL_BYTE_ARRAY_TYPE:
3615        return EXTERNAL_BYTE_ELEMENTS;
3616      case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
3617        return EXTERNAL_UNSIGNED_BYTE_ELEMENTS;
3618      case EXTERNAL_SHORT_ARRAY_TYPE:
3619        return EXTERNAL_SHORT_ELEMENTS;
3620      case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE:
3621        return EXTERNAL_UNSIGNED_SHORT_ELEMENTS;
3622      case EXTERNAL_INT_ARRAY_TYPE:
3623        return EXTERNAL_INT_ELEMENTS;
3624      case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
3625        return EXTERNAL_UNSIGNED_INT_ELEMENTS;
3626      case EXTERNAL_PIXEL_ARRAY_TYPE:
3627        return EXTERNAL_PIXEL_ELEMENTS;
3628      default:
3629        break;
3630    }
3631  }
3632  ASSERT(array->map()->instance_type() == EXTERNAL_FLOAT_ARRAY_TYPE);
3633  return EXTERNAL_FLOAT_ELEMENTS;
3634}
3635
3636
3637bool JSObject::HasFastElements() {
3638  return GetElementsKind() == FAST_ELEMENTS;
3639}
3640
3641
3642bool JSObject::HasDictionaryElements() {
3643  return GetElementsKind() == DICTIONARY_ELEMENTS;
3644}
3645
3646
3647bool JSObject::HasExternalArrayElements() {
3648  HeapObject* array = elements();
3649  ASSERT(array != NULL);
3650  return array->IsExternalArray();
3651}
3652
3653
3654#define EXTERNAL_ELEMENTS_CHECK(name, type)          \
3655bool JSObject::HasExternal##name##Elements() {       \
3656  HeapObject* array = elements();                    \
3657  ASSERT(array != NULL);                             \
3658  if (!array->IsHeapObject())                        \
3659    return false;                                    \
3660  return array->map()->instance_type() == type;      \
3661}
3662
3663
3664EXTERNAL_ELEMENTS_CHECK(Byte, EXTERNAL_BYTE_ARRAY_TYPE)
3665EXTERNAL_ELEMENTS_CHECK(UnsignedByte, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
3666EXTERNAL_ELEMENTS_CHECK(Short, EXTERNAL_SHORT_ARRAY_TYPE)
3667EXTERNAL_ELEMENTS_CHECK(UnsignedShort,
3668                        EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
3669EXTERNAL_ELEMENTS_CHECK(Int, EXTERNAL_INT_ARRAY_TYPE)
3670EXTERNAL_ELEMENTS_CHECK(UnsignedInt,
3671                        EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
3672EXTERNAL_ELEMENTS_CHECK(Float,
3673                        EXTERNAL_FLOAT_ARRAY_TYPE)
3674EXTERNAL_ELEMENTS_CHECK(Pixel, EXTERNAL_PIXEL_ARRAY_TYPE)
3675
3676
3677bool JSObject::HasNamedInterceptor() {
3678  return map()->has_named_interceptor();
3679}
3680
3681
3682bool JSObject::HasIndexedInterceptor() {
3683  return map()->has_indexed_interceptor();
3684}
3685
3686
3687bool JSObject::AllowsSetElementsLength() {
3688  bool result = elements()->IsFixedArray();
3689  ASSERT(result == !HasExternalArrayElements());
3690  return result;
3691}
3692
3693
3694MaybeObject* JSObject::EnsureWritableFastElements() {
3695  ASSERT(HasFastElements());
3696  FixedArray* elems = FixedArray::cast(elements());
3697  Isolate* isolate = GetIsolate();
3698  if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
3699  Object* writable_elems;
3700  { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
3701      elems, isolate->heap()->fixed_array_map());
3702    if (!maybe_writable_elems->ToObject(&writable_elems)) {
3703      return maybe_writable_elems;
3704    }
3705  }
3706  set_elements(FixedArray::cast(writable_elems));
3707  isolate->counters()->cow_arrays_converted()->Increment();
3708  return writable_elems;
3709}
3710
3711
3712StringDictionary* JSObject::property_dictionary() {
3713  ASSERT(!HasFastProperties());
3714  return StringDictionary::cast(properties());
3715}
3716
3717
3718NumberDictionary* JSObject::element_dictionary() {
3719  ASSERT(HasDictionaryElements());
3720  return NumberDictionary::cast(elements());
3721}
3722
3723
3724bool String::IsHashFieldComputed(uint32_t field) {
3725  return (field & kHashNotComputedMask) == 0;
3726}
3727
3728
3729bool String::HasHashCode() {
3730  return IsHashFieldComputed(hash_field());
3731}
3732
3733
3734uint32_t String::Hash() {
3735  // Fast case: has hash code already been computed?
3736  uint32_t field = hash_field();
3737  if (IsHashFieldComputed(field)) return field >> kHashShift;
3738  // Slow case: compute hash code and set it.
3739  return ComputeAndSetHash();
3740}
3741
3742
3743StringHasher::StringHasher(int length)
3744  : length_(length),
3745    raw_running_hash_(0),
3746    array_index_(0),
3747    is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
3748    is_first_char_(true),
3749    is_valid_(true) { }
3750
3751
3752bool StringHasher::has_trivial_hash() {
3753  return length_ > String::kMaxHashCalcLength;
3754}
3755
3756
3757void StringHasher::AddCharacter(uc32 c) {
3758  // Use the Jenkins one-at-a-time hash function to update the hash
3759  // for the given character.
3760  raw_running_hash_ += c;
3761  raw_running_hash_ += (raw_running_hash_ << 10);
3762  raw_running_hash_ ^= (raw_running_hash_ >> 6);
3763  // Incremental array index computation.
3764  if (is_array_index_) {
3765    if (c < '0' || c > '9') {
3766      is_array_index_ = false;
3767    } else {
3768      int d = c - '0';
3769      if (is_first_char_) {
3770        is_first_char_ = false;
3771        if (c == '0' && length_ > 1) {
3772          is_array_index_ = false;
3773          return;
3774        }
3775      }
3776      if (array_index_ > 429496729U - ((d + 2) >> 3)) {
3777        is_array_index_ = false;
3778      } else {
3779        array_index_ = array_index_ * 10 + d;
3780      }
3781    }
3782  }
3783}
3784
3785
3786void StringHasher::AddCharacterNoIndex(uc32 c) {
3787  ASSERT(!is_array_index());
3788  raw_running_hash_ += c;
3789  raw_running_hash_ += (raw_running_hash_ << 10);
3790  raw_running_hash_ ^= (raw_running_hash_ >> 6);
3791}
3792
3793
3794uint32_t StringHasher::GetHash() {
3795  // Get the calculated raw hash value and do some more bit ops to distribute
3796  // the hash further. Ensure that we never return zero as the hash value.
3797  uint32_t result = raw_running_hash_;
3798  result += (result << 3);
3799  result ^= (result >> 11);
3800  result += (result << 15);
3801  if (result == 0) {
3802    result = 27;
3803  }
3804  return result;
3805}
3806
3807
3808template <typename schar>
3809uint32_t HashSequentialString(const schar* chars, int length) {
3810  StringHasher hasher(length);
3811  if (!hasher.has_trivial_hash()) {
3812    int i;
3813    for (i = 0; hasher.is_array_index() && (i < length); i++) {
3814      hasher.AddCharacter(chars[i]);
3815    }
3816    for (; i < length; i++) {
3817      hasher.AddCharacterNoIndex(chars[i]);
3818    }
3819  }
3820  return hasher.GetHashField();
3821}
3822
3823
3824bool String::AsArrayIndex(uint32_t* index) {
3825  uint32_t field = hash_field();
3826  if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
3827    return false;
3828  }
3829  return SlowAsArrayIndex(index);
3830}
3831
3832
3833Object* JSObject::GetPrototype() {
3834  return JSObject::cast(this)->map()->prototype();
3835}
3836
3837
3838PropertyAttributes JSObject::GetPropertyAttribute(String* key) {
3839  return GetPropertyAttributeWithReceiver(this, key);
3840}
3841
3842// TODO(504): this may be useful in other places too where JSGlobalProxy
3843// is used.
3844Object* JSObject::BypassGlobalProxy() {
3845  if (IsJSGlobalProxy()) {
3846    Object* proto = GetPrototype();
3847    if (proto->IsNull()) return GetHeap()->undefined_value();
3848    ASSERT(proto->IsJSGlobalObject());
3849    return proto;
3850  }
3851  return this;
3852}
3853
3854
3855bool JSObject::HasHiddenPropertiesObject() {
3856  ASSERT(!IsJSGlobalProxy());
3857  return GetPropertyAttributePostInterceptor(this,
3858                                             GetHeap()->hidden_symbol(),
3859                                             false) != ABSENT;
3860}
3861
3862
3863Object* JSObject::GetHiddenPropertiesObject() {
3864  ASSERT(!IsJSGlobalProxy());
3865  PropertyAttributes attributes;
3866  // You can't install a getter on a property indexed by the hidden symbol,
3867  // so we can be sure that GetLocalPropertyPostInterceptor returns a real
3868  // object.
3869  Object* result =
3870      GetLocalPropertyPostInterceptor(this,
3871                                      GetHeap()->hidden_symbol(),
3872                                      &attributes)->ToObjectUnchecked();
3873  return result;
3874}
3875
3876
3877MaybeObject* JSObject::SetHiddenPropertiesObject(Object* hidden_obj) {
3878  ASSERT(!IsJSGlobalProxy());
3879  return SetPropertyPostInterceptor(GetHeap()->hidden_symbol(),
3880                                    hidden_obj,
3881                                    DONT_ENUM,
3882                                    kNonStrictMode);
3883}
3884
3885
3886bool JSObject::HasElement(uint32_t index) {
3887  return HasElementWithReceiver(this, index);
3888}
3889
3890
3891bool AccessorInfo::all_can_read() {
3892  return BooleanBit::get(flag(), kAllCanReadBit);
3893}
3894
3895
3896void AccessorInfo::set_all_can_read(bool value) {
3897  set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
3898}
3899
3900
3901bool AccessorInfo::all_can_write() {
3902  return BooleanBit::get(flag(), kAllCanWriteBit);
3903}
3904
3905
3906void AccessorInfo::set_all_can_write(bool value) {
3907  set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
3908}
3909
3910
3911bool AccessorInfo::prohibits_overwriting() {
3912  return BooleanBit::get(flag(), kProhibitsOverwritingBit);
3913}
3914
3915
3916void AccessorInfo::set_prohibits_overwriting(bool value) {
3917  set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
3918}
3919
3920
3921PropertyAttributes AccessorInfo::property_attributes() {
3922  return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
3923}
3924
3925
3926void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
3927  ASSERT(AttributesField::is_valid(attributes));
3928  int rest_value = flag()->value() & ~AttributesField::mask();
3929  set_flag(Smi::FromInt(rest_value | AttributesField::encode(attributes)));
3930}
3931
3932template<typename Shape, typename Key>
3933void Dictionary<Shape, Key>::SetEntry(int entry,
3934                                      Object* key,
3935                                      Object* value,
3936                                      PropertyDetails details) {
3937  ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0);
3938  int index = HashTable<Shape, Key>::EntryToIndex(entry);
3939  AssertNoAllocation no_gc;
3940  WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
3941  FixedArray::set(index, key, mode);
3942  FixedArray::set(index+1, value, mode);
3943  FixedArray::fast_set(this, index+2, details.AsSmi());
3944}
3945
3946
3947bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
3948  ASSERT(other->IsNumber());
3949  return key == static_cast<uint32_t>(other->Number());
3950}
3951
3952
3953uint32_t NumberDictionaryShape::Hash(uint32_t key) {
3954  return ComputeIntegerHash(key);
3955}
3956
3957
3958uint32_t NumberDictionaryShape::HashForObject(uint32_t key, Object* other) {
3959  ASSERT(other->IsNumber());
3960  return ComputeIntegerHash(static_cast<uint32_t>(other->Number()));
3961}
3962
3963
3964MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) {
3965  return Isolate::Current()->heap()->NumberFromUint32(key);
3966}
3967
3968
3969bool StringDictionaryShape::IsMatch(String* key, Object* other) {
3970  // We know that all entries in a hash table had their hash keys created.
3971  // Use that knowledge to have fast failure.
3972  if (key->Hash() != String::cast(other)->Hash()) return false;
3973  return key->Equals(String::cast(other));
3974}
3975
3976
3977uint32_t StringDictionaryShape::Hash(String* key) {
3978  return key->Hash();
3979}
3980
3981
3982uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) {
3983  return String::cast(other)->Hash();
3984}
3985
3986
3987MaybeObject* StringDictionaryShape::AsObject(String* key) {
3988  return key;
3989}
3990
3991
3992void Map::ClearCodeCache(Heap* heap) {
3993  // No write barrier is needed since empty_fixed_array is not in new space.
3994  // Please note this function is used during marking:
3995  //  - MarkCompactCollector::MarkUnmarkedObject
3996  ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
3997  WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
3998}
3999
4000
4001void JSArray::EnsureSize(int required_size) {
4002  ASSERT(HasFastElements());
4003  FixedArray* elts = FixedArray::cast(elements());
4004  const int kArraySizeThatFitsComfortablyInNewSpace = 128;
4005  if (elts->length() < required_size) {
4006    // Doubling in size would be overkill, but leave some slack to avoid
4007    // constantly growing.
4008    Expand(required_size + (required_size >> 3));
4009    // It's a performance benefit to keep a frequently used array in new-space.
4010  } else if (!GetHeap()->new_space()->Contains(elts) &&
4011             required_size < kArraySizeThatFitsComfortablyInNewSpace) {
4012    // Expand will allocate a new backing store in new space even if the size
4013    // we asked for isn't larger than what we had before.
4014    Expand(required_size);
4015  }
4016}
4017
4018
4019void JSArray::set_length(Smi* length) {
4020  set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
4021}
4022
4023
4024void JSArray::SetContent(FixedArray* storage) {
4025  set_length(Smi::FromInt(storage->length()));
4026  set_elements(storage);
4027}
4028
4029
4030MaybeObject* FixedArray::Copy() {
4031  if (length() == 0) return this;
4032  return GetHeap()->CopyFixedArray(this);
4033}
4034
4035
4036Relocatable::Relocatable(Isolate* isolate) {
4037  ASSERT(isolate == Isolate::Current());
4038  isolate_ = isolate;
4039  prev_ = isolate->relocatable_top();
4040  isolate->set_relocatable_top(this);
4041}
4042
4043
4044Relocatable::~Relocatable() {
4045  ASSERT(isolate_ == Isolate::Current());
4046  ASSERT_EQ(isolate_->relocatable_top(), this);
4047  isolate_->set_relocatable_top(prev_);
4048}
4049
4050
4051int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
4052  return map->instance_size();
4053}
4054
4055
4056void Proxy::ProxyIterateBody(ObjectVisitor* v) {
4057  v->VisitExternalReference(
4058      reinterpret_cast<Address *>(FIELD_ADDR(this, kProxyOffset)));
4059}
4060
4061
4062template<typename StaticVisitor>
4063void Proxy::ProxyIterateBody() {
4064  StaticVisitor::VisitExternalReference(
4065      reinterpret_cast<Address *>(FIELD_ADDR(this, kProxyOffset)));
4066}
4067
4068
4069void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
4070  typedef v8::String::ExternalAsciiStringResource Resource;
4071  v->VisitExternalAsciiString(
4072      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4073}
4074
4075
4076template<typename StaticVisitor>
4077void ExternalAsciiString::ExternalAsciiStringIterateBody() {
4078  typedef v8::String::ExternalAsciiStringResource Resource;
4079  StaticVisitor::VisitExternalAsciiString(
4080      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4081}
4082
4083
4084void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
4085  typedef v8::String::ExternalStringResource Resource;
4086  v->VisitExternalTwoByteString(
4087      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4088}
4089
4090
4091template<typename StaticVisitor>
4092void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
4093  typedef v8::String::ExternalStringResource Resource;
4094  StaticVisitor::VisitExternalTwoByteString(
4095      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4096}
4097
4098#define SLOT_ADDR(obj, offset) \
4099  reinterpret_cast<Object**>((obj)->address() + offset)
4100
4101template<int start_offset, int end_offset, int size>
4102void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
4103    HeapObject* obj,
4104    ObjectVisitor* v) {
4105    v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, end_offset));
4106}
4107
4108
4109template<int start_offset>
4110void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
4111                                                       int object_size,
4112                                                       ObjectVisitor* v) {
4113  v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, object_size));
4114}
4115
4116#undef SLOT_ADDR
4117
4118
4119#undef CAST_ACCESSOR
4120#undef INT_ACCESSORS
4121#undef SMI_ACCESSORS
4122#undef ACCESSORS
4123#undef FIELD_ADDR
4124#undef READ_FIELD
4125#undef WRITE_FIELD
4126#undef WRITE_BARRIER
4127#undef CONDITIONAL_WRITE_BARRIER
4128#undef READ_MEMADDR_FIELD
4129#undef WRITE_MEMADDR_FIELD
4130#undef READ_DOUBLE_FIELD
4131#undef WRITE_DOUBLE_FIELD
4132#undef READ_INT_FIELD
4133#undef WRITE_INT_FIELD
4134#undef READ_SHORT_FIELD
4135#undef WRITE_SHORT_FIELD
4136#undef READ_BYTE_FIELD
4137#undef WRITE_BYTE_FIELD
4138
4139
4140} }  // namespace v8::internal
4141
4142#endif  // V8_OBJECTS_INL_H_
4143