objects-inl.h revision 85b71799222b55eb5dd74ea26efe0c64ab655c8c
1// Copyright 2011 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27//
28// Review notes:
29//
30// - The use of macros in these inline functions may seem superfluous
31// but it is absolutely needed to make sure gcc generates optimal
32// code. gcc is not happy when attempting to inline too deep.
33//
34
35#ifndef V8_OBJECTS_INL_H_
36#define V8_OBJECTS_INL_H_
37
38#include "elements.h"
39#include "objects.h"
40#include "contexts.h"
41#include "conversions-inl.h"
42#include "heap.h"
43#include "isolate.h"
44#include "property.h"
45#include "spaces.h"
46#include "v8memory.h"
47
48namespace v8 {
49namespace internal {
50
51PropertyDetails::PropertyDetails(Smi* smi) {
52  value_ = smi->value();
53}
54
55
56Smi* PropertyDetails::AsSmi() {
57  return Smi::FromInt(value_);
58}
59
60
61PropertyDetails PropertyDetails::AsDeleted() {
62  Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
63  return PropertyDetails(smi);
64}
65
66
67#define CAST_ACCESSOR(type)                     \
68  type* type::cast(Object* object) {            \
69    ASSERT(object->Is##type());                 \
70    return reinterpret_cast<type*>(object);     \
71  }
72
73
74#define INT_ACCESSORS(holder, name, offset)                             \
75  int holder::name() { return READ_INT_FIELD(this, offset); }           \
76  void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
77
78
79#define ACCESSORS(holder, name, type, offset)                           \
80  type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
81  void holder::set_##name(type* value, WriteBarrierMode mode) {         \
82    WRITE_FIELD(this, offset, value);                                   \
83    CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode);           \
84  }
85
86
87// GC-safe accessors do not use HeapObject::GetHeap(), but access TLS instead.
88#define ACCESSORS_GCSAFE(holder, name, type, offset)                    \
89  type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
90  void holder::set_##name(type* value, WriteBarrierMode mode) {         \
91    WRITE_FIELD(this, offset, value);                                   \
92    CONDITIONAL_WRITE_BARRIER(HEAP, this, offset, mode);                \
93  }
94
95
96#define SMI_ACCESSORS(holder, name, offset)             \
97  int holder::name() {                                  \
98    Object* value = READ_FIELD(this, offset);           \
99    return Smi::cast(value)->value();                   \
100  }                                                     \
101  void holder::set_##name(int value) {                  \
102    WRITE_FIELD(this, offset, Smi::FromInt(value));     \
103  }
104
105
106#define BOOL_GETTER(holder, field, name, offset)           \
107  bool holder::name() {                                    \
108    return BooleanBit::get(field(), offset);               \
109  }                                                        \
110
111
112#define BOOL_ACCESSORS(holder, field, name, offset)        \
113  bool holder::name() {                                    \
114    return BooleanBit::get(field(), offset);               \
115  }                                                        \
116  void holder::set_##name(bool value) {                    \
117    set_##field(BooleanBit::set(field(), offset, value));  \
118  }
119
120
121bool Object::IsInstanceOf(FunctionTemplateInfo* expected) {
122  // There is a constraint on the object; check.
123  if (!this->IsJSObject()) return false;
124  // Fetch the constructor function of the object.
125  Object* cons_obj = JSObject::cast(this)->map()->constructor();
126  if (!cons_obj->IsJSFunction()) return false;
127  JSFunction* fun = JSFunction::cast(cons_obj);
128  // Iterate through the chain of inheriting function templates to
129  // see if the required one occurs.
130  for (Object* type = fun->shared()->function_data();
131       type->IsFunctionTemplateInfo();
132       type = FunctionTemplateInfo::cast(type)->parent_template()) {
133    if (type == expected) return true;
134  }
135  // Didn't find the required type in the inheritance chain.
136  return false;
137}
138
139
140bool Object::IsSmi() {
141  return HAS_SMI_TAG(this);
142}
143
144
145bool Object::IsHeapObject() {
146  return Internals::HasHeapObjectTag(this);
147}
148
149
150bool Object::IsHeapNumber() {
151  return Object::IsHeapObject()
152    && HeapObject::cast(this)->map()->instance_type() == HEAP_NUMBER_TYPE;
153}
154
155
156bool Object::IsString() {
157  return Object::IsHeapObject()
158    && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
159}
160
161
162bool Object::IsSpecObject() {
163  return Object::IsHeapObject()
164    && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
165}
166
167
168bool Object::IsSymbol() {
169  if (!this->IsHeapObject()) return false;
170  uint32_t type = HeapObject::cast(this)->map()->instance_type();
171  // Because the symbol tag is non-zero and no non-string types have the
172  // symbol bit set we can test for symbols with a very simple test
173  // operation.
174  STATIC_ASSERT(kSymbolTag != 0);
175  ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
176  return (type & kIsSymbolMask) != 0;
177}
178
179
180bool Object::IsConsString() {
181  if (!IsString()) return false;
182  return StringShape(String::cast(this)).IsCons();
183}
184
185
186bool Object::IsSlicedString() {
187  if (!IsString()) return false;
188  return StringShape(String::cast(this)).IsSliced();
189}
190
191
192bool Object::IsSeqString() {
193  if (!IsString()) return false;
194  return StringShape(String::cast(this)).IsSequential();
195}
196
197
198bool Object::IsSeqAsciiString() {
199  if (!IsString()) return false;
200  return StringShape(String::cast(this)).IsSequential() &&
201         String::cast(this)->IsAsciiRepresentation();
202}
203
204
205bool Object::IsSeqTwoByteString() {
206  if (!IsString()) return false;
207  return StringShape(String::cast(this)).IsSequential() &&
208         String::cast(this)->IsTwoByteRepresentation();
209}
210
211
212bool Object::IsExternalString() {
213  if (!IsString()) return false;
214  return StringShape(String::cast(this)).IsExternal();
215}
216
217
218bool Object::IsExternalAsciiString() {
219  if (!IsString()) return false;
220  return StringShape(String::cast(this)).IsExternal() &&
221         String::cast(this)->IsAsciiRepresentation();
222}
223
224
225bool Object::IsExternalTwoByteString() {
226  if (!IsString()) return false;
227  return StringShape(String::cast(this)).IsExternal() &&
228         String::cast(this)->IsTwoByteRepresentation();
229}
230
231bool Object::HasValidElements() {
232  // Dictionary is covered under FixedArray.
233  return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray();
234}
235
236StringShape::StringShape(String* str)
237  : type_(str->map()->instance_type()) {
238  set_valid();
239  ASSERT((type_ & kIsNotStringMask) == kStringTag);
240}
241
242
243StringShape::StringShape(Map* map)
244  : type_(map->instance_type()) {
245  set_valid();
246  ASSERT((type_ & kIsNotStringMask) == kStringTag);
247}
248
249
250StringShape::StringShape(InstanceType t)
251  : type_(static_cast<uint32_t>(t)) {
252  set_valid();
253  ASSERT((type_ & kIsNotStringMask) == kStringTag);
254}
255
256
257bool StringShape::IsSymbol() {
258  ASSERT(valid());
259  STATIC_ASSERT(kSymbolTag != 0);
260  return (type_ & kIsSymbolMask) != 0;
261}
262
263
264bool String::IsAsciiRepresentation() {
265  uint32_t type = map()->instance_type();
266  return (type & kStringEncodingMask) == kAsciiStringTag;
267}
268
269
270bool String::IsTwoByteRepresentation() {
271  uint32_t type = map()->instance_type();
272  return (type & kStringEncodingMask) == kTwoByteStringTag;
273}
274
275
276bool String::IsAsciiRepresentationUnderneath() {
277  uint32_t type = map()->instance_type();
278  STATIC_ASSERT(kIsIndirectStringTag != 0);
279  STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
280  ASSERT(IsFlat());
281  switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
282    case kAsciiStringTag:
283      return true;
284    case kTwoByteStringTag:
285      return false;
286    default:  // Cons or sliced string.  Need to go deeper.
287      return GetUnderlying()->IsAsciiRepresentation();
288  }
289}
290
291
292bool String::IsTwoByteRepresentationUnderneath() {
293  uint32_t type = map()->instance_type();
294  STATIC_ASSERT(kIsIndirectStringTag != 0);
295  STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
296  ASSERT(IsFlat());
297  switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
298    case kAsciiStringTag:
299      return false;
300    case kTwoByteStringTag:
301      return true;
302    default:  // Cons or sliced string.  Need to go deeper.
303      return GetUnderlying()->IsTwoByteRepresentation();
304  }
305}
306
307
308bool String::HasOnlyAsciiChars() {
309  uint32_t type = map()->instance_type();
310  return (type & kStringEncodingMask) == kAsciiStringTag ||
311         (type & kAsciiDataHintMask) == kAsciiDataHintTag;
312}
313
314
315bool StringShape::IsCons() {
316  return (type_ & kStringRepresentationMask) == kConsStringTag;
317}
318
319
320bool StringShape::IsSliced() {
321  return (type_ & kStringRepresentationMask) == kSlicedStringTag;
322}
323
324
325bool StringShape::IsIndirect() {
326  return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
327}
328
329
330bool StringShape::IsExternal() {
331  return (type_ & kStringRepresentationMask) == kExternalStringTag;
332}
333
334
335bool StringShape::IsSequential() {
336  return (type_ & kStringRepresentationMask) == kSeqStringTag;
337}
338
339
340StringRepresentationTag StringShape::representation_tag() {
341  uint32_t tag = (type_ & kStringRepresentationMask);
342  return static_cast<StringRepresentationTag>(tag);
343}
344
345
346uint32_t StringShape::encoding_tag() {
347  return type_ & kStringEncodingMask;
348}
349
350
351uint32_t StringShape::full_representation_tag() {
352  return (type_ & (kStringRepresentationMask | kStringEncodingMask));
353}
354
355
356STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
357             Internals::kFullStringRepresentationMask);
358
359
360bool StringShape::IsSequentialAscii() {
361  return full_representation_tag() == (kSeqStringTag | kAsciiStringTag);
362}
363
364
365bool StringShape::IsSequentialTwoByte() {
366  return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
367}
368
369
370bool StringShape::IsExternalAscii() {
371  return full_representation_tag() == (kExternalStringTag | kAsciiStringTag);
372}
373
374
375bool StringShape::IsExternalTwoByte() {
376  return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
377}
378
379
380STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
381             Internals::kExternalTwoByteRepresentationTag);
382
383
384uc32 FlatStringReader::Get(int index) {
385  ASSERT(0 <= index && index <= length_);
386  if (is_ascii_) {
387    return static_cast<const byte*>(start_)[index];
388  } else {
389    return static_cast<const uc16*>(start_)[index];
390  }
391}
392
393
394bool Object::IsNumber() {
395  return IsSmi() || IsHeapNumber();
396}
397
398
399bool Object::IsByteArray() {
400  return Object::IsHeapObject()
401    && HeapObject::cast(this)->map()->instance_type() == BYTE_ARRAY_TYPE;
402}
403
404
405bool Object::IsExternalPixelArray() {
406  return Object::IsHeapObject() &&
407      HeapObject::cast(this)->map()->instance_type() ==
408          EXTERNAL_PIXEL_ARRAY_TYPE;
409}
410
411
412bool Object::IsExternalArray() {
413  if (!Object::IsHeapObject())
414    return false;
415  InstanceType instance_type =
416      HeapObject::cast(this)->map()->instance_type();
417  return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
418          instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
419}
420
421
422bool Object::IsExternalByteArray() {
423  return Object::IsHeapObject() &&
424      HeapObject::cast(this)->map()->instance_type() ==
425      EXTERNAL_BYTE_ARRAY_TYPE;
426}
427
428
429bool Object::IsExternalUnsignedByteArray() {
430  return Object::IsHeapObject() &&
431      HeapObject::cast(this)->map()->instance_type() ==
432      EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE;
433}
434
435
436bool Object::IsExternalShortArray() {
437  return Object::IsHeapObject() &&
438      HeapObject::cast(this)->map()->instance_type() ==
439      EXTERNAL_SHORT_ARRAY_TYPE;
440}
441
442
443bool Object::IsExternalUnsignedShortArray() {
444  return Object::IsHeapObject() &&
445      HeapObject::cast(this)->map()->instance_type() ==
446      EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE;
447}
448
449
450bool Object::IsExternalIntArray() {
451  return Object::IsHeapObject() &&
452      HeapObject::cast(this)->map()->instance_type() ==
453      EXTERNAL_INT_ARRAY_TYPE;
454}
455
456
457bool Object::IsExternalUnsignedIntArray() {
458  return Object::IsHeapObject() &&
459      HeapObject::cast(this)->map()->instance_type() ==
460      EXTERNAL_UNSIGNED_INT_ARRAY_TYPE;
461}
462
463
464bool Object::IsExternalFloatArray() {
465  return Object::IsHeapObject() &&
466      HeapObject::cast(this)->map()->instance_type() ==
467      EXTERNAL_FLOAT_ARRAY_TYPE;
468}
469
470
471bool Object::IsExternalDoubleArray() {
472  return Object::IsHeapObject() &&
473      HeapObject::cast(this)->map()->instance_type() ==
474      EXTERNAL_DOUBLE_ARRAY_TYPE;
475}
476
477
478bool MaybeObject::IsFailure() {
479  return HAS_FAILURE_TAG(this);
480}
481
482
483bool MaybeObject::IsRetryAfterGC() {
484  return HAS_FAILURE_TAG(this)
485    && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC;
486}
487
488
489bool MaybeObject::IsOutOfMemory() {
490  return HAS_FAILURE_TAG(this)
491      && Failure::cast(this)->IsOutOfMemoryException();
492}
493
494
495bool MaybeObject::IsException() {
496  return this == Failure::Exception();
497}
498
499
500bool MaybeObject::IsTheHole() {
501  return !IsFailure() && ToObjectUnchecked()->IsTheHole();
502}
503
504
505Failure* Failure::cast(MaybeObject* obj) {
506  ASSERT(HAS_FAILURE_TAG(obj));
507  return reinterpret_cast<Failure*>(obj);
508}
509
510
511bool Object::IsJSReceiver() {
512  return IsHeapObject() &&
513      HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
514}
515
516
517bool Object::IsJSObject() {
518  return IsJSReceiver() && !IsJSProxy();
519}
520
521
522bool Object::IsJSProxy() {
523  return Object::IsHeapObject() &&
524     (HeapObject::cast(this)->map()->instance_type() == JS_PROXY_TYPE ||
525      HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_PROXY_TYPE);
526}
527
528
529bool Object::IsJSFunctionProxy() {
530  return Object::IsHeapObject() &&
531      HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_PROXY_TYPE;
532}
533
534
535bool Object::IsJSWeakMap() {
536  return Object::IsJSObject() &&
537      HeapObject::cast(this)->map()->instance_type() == JS_WEAK_MAP_TYPE;
538}
539
540
541bool Object::IsJSContextExtensionObject() {
542  return IsHeapObject()
543      && (HeapObject::cast(this)->map()->instance_type() ==
544          JS_CONTEXT_EXTENSION_OBJECT_TYPE);
545}
546
547
548bool Object::IsMap() {
549  return Object::IsHeapObject()
550      && HeapObject::cast(this)->map()->instance_type() == MAP_TYPE;
551}
552
553
554bool Object::IsFixedArray() {
555  return Object::IsHeapObject()
556      && HeapObject::cast(this)->map()->instance_type() == FIXED_ARRAY_TYPE;
557}
558
559
560bool Object::IsFixedDoubleArray() {
561  return Object::IsHeapObject()
562      && HeapObject::cast(this)->map()->instance_type() ==
563          FIXED_DOUBLE_ARRAY_TYPE;
564}
565
566
567bool Object::IsDescriptorArray() {
568  return IsFixedArray();
569}
570
571
572bool Object::IsDeoptimizationInputData() {
573  // Must be a fixed array.
574  if (!IsFixedArray()) return false;
575
576  // There's no sure way to detect the difference between a fixed array and
577  // a deoptimization data array.  Since this is used for asserts we can
578  // check that the length is zero or else the fixed size plus a multiple of
579  // the entry size.
580  int length = FixedArray::cast(this)->length();
581  if (length == 0) return true;
582
583  length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
584  return length >= 0 &&
585      length % DeoptimizationInputData::kDeoptEntrySize == 0;
586}
587
588
589bool Object::IsDeoptimizationOutputData() {
590  if (!IsFixedArray()) return false;
591  // There's actually no way to see the difference between a fixed array and
592  // a deoptimization data array.  Since this is used for asserts we can check
593  // that the length is plausible though.
594  if (FixedArray::cast(this)->length() % 2 != 0) return false;
595  return true;
596}
597
598
599bool Object::IsContext() {
600  if (Object::IsHeapObject()) {
601    Map* map = HeapObject::cast(this)->map();
602    Heap* heap = map->GetHeap();
603    return (map == heap->function_context_map() ||
604            map == heap->catch_context_map() ||
605            map == heap->with_context_map() ||
606            map == heap->global_context_map() ||
607            map == heap->block_context_map());
608  }
609  return false;
610}
611
612
613bool Object::IsGlobalContext() {
614  return Object::IsHeapObject() &&
615      HeapObject::cast(this)->map() ==
616      HeapObject::cast(this)->GetHeap()->global_context_map();
617}
618
619
620bool Object::IsSerializedScopeInfo() {
621  return Object::IsHeapObject() &&
622      HeapObject::cast(this)->map() ==
623      HeapObject::cast(this)->GetHeap()->serialized_scope_info_map();
624}
625
626
627bool Object::IsJSFunction() {
628  return Object::IsHeapObject()
629      && HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_TYPE;
630}
631
632
633template <> inline bool Is<JSFunction>(Object* obj) {
634  return obj->IsJSFunction();
635}
636
637
638bool Object::IsCode() {
639  return Object::IsHeapObject()
640      && HeapObject::cast(this)->map()->instance_type() == CODE_TYPE;
641}
642
643
644bool Object::IsOddball() {
645  ASSERT(HEAP->is_safe_to_read_maps());
646  return Object::IsHeapObject()
647    && HeapObject::cast(this)->map()->instance_type() == ODDBALL_TYPE;
648}
649
650
651bool Object::IsJSGlobalPropertyCell() {
652  return Object::IsHeapObject()
653      && HeapObject::cast(this)->map()->instance_type()
654      == JS_GLOBAL_PROPERTY_CELL_TYPE;
655}
656
657
658bool Object::IsSharedFunctionInfo() {
659  return Object::IsHeapObject() &&
660      (HeapObject::cast(this)->map()->instance_type() ==
661       SHARED_FUNCTION_INFO_TYPE);
662}
663
664
665bool Object::IsJSValue() {
666  return Object::IsHeapObject()
667      && HeapObject::cast(this)->map()->instance_type() == JS_VALUE_TYPE;
668}
669
670
671bool Object::IsJSMessageObject() {
672  return Object::IsHeapObject()
673      && (HeapObject::cast(this)->map()->instance_type() ==
674          JS_MESSAGE_OBJECT_TYPE);
675}
676
677
678bool Object::IsStringWrapper() {
679  return IsJSValue() && JSValue::cast(this)->value()->IsString();
680}
681
682
683bool Object::IsForeign() {
684  return Object::IsHeapObject()
685      && HeapObject::cast(this)->map()->instance_type() == FOREIGN_TYPE;
686}
687
688
689bool Object::IsBoolean() {
690  return IsOddball() &&
691      ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
692}
693
694
695bool Object::IsJSArray() {
696  return Object::IsHeapObject()
697      && HeapObject::cast(this)->map()->instance_type() == JS_ARRAY_TYPE;
698}
699
700
701bool Object::IsJSRegExp() {
702  return Object::IsHeapObject()
703      && HeapObject::cast(this)->map()->instance_type() == JS_REGEXP_TYPE;
704}
705
706
707template <> inline bool Is<JSArray>(Object* obj) {
708  return obj->IsJSArray();
709}
710
711
712bool Object::IsHashTable() {
713  return Object::IsHeapObject() &&
714      HeapObject::cast(this)->map() ==
715      HeapObject::cast(this)->GetHeap()->hash_table_map();
716}
717
718
719bool Object::IsDictionary() {
720  return IsHashTable() &&
721      this != HeapObject::cast(this)->GetHeap()->symbol_table();
722}
723
724
725bool Object::IsSymbolTable() {
726  return IsHashTable() && this ==
727         HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
728}
729
730
731bool Object::IsJSFunctionResultCache() {
732  if (!IsFixedArray()) return false;
733  FixedArray* self = FixedArray::cast(this);
734  int length = self->length();
735  if (length < JSFunctionResultCache::kEntriesIndex) return false;
736  if ((length - JSFunctionResultCache::kEntriesIndex)
737      % JSFunctionResultCache::kEntrySize != 0) {
738    return false;
739  }
740#ifdef DEBUG
741  reinterpret_cast<JSFunctionResultCache*>(this)->JSFunctionResultCacheVerify();
742#endif
743  return true;
744}
745
746
747bool Object::IsNormalizedMapCache() {
748  if (!IsFixedArray()) return false;
749  if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
750    return false;
751  }
752#ifdef DEBUG
753  reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
754#endif
755  return true;
756}
757
758
759bool Object::IsCompilationCacheTable() {
760  return IsHashTable();
761}
762
763
764bool Object::IsCodeCacheHashTable() {
765  return IsHashTable();
766}
767
768
769bool Object::IsPolymorphicCodeCacheHashTable() {
770  return IsHashTable();
771}
772
773
774bool Object::IsMapCache() {
775  return IsHashTable();
776}
777
778
779bool Object::IsPrimitive() {
780  return IsOddball() || IsNumber() || IsString();
781}
782
783
784bool Object::IsJSGlobalProxy() {
785  bool result = IsHeapObject() &&
786                (HeapObject::cast(this)->map()->instance_type() ==
787                 JS_GLOBAL_PROXY_TYPE);
788  ASSERT(!result || IsAccessCheckNeeded());
789  return result;
790}
791
792
793bool Object::IsGlobalObject() {
794  if (!IsHeapObject()) return false;
795
796  InstanceType type = HeapObject::cast(this)->map()->instance_type();
797  return type == JS_GLOBAL_OBJECT_TYPE ||
798         type == JS_BUILTINS_OBJECT_TYPE;
799}
800
801
802bool Object::IsJSGlobalObject() {
803  return IsHeapObject() &&
804      (HeapObject::cast(this)->map()->instance_type() ==
805       JS_GLOBAL_OBJECT_TYPE);
806}
807
808
809bool Object::IsJSBuiltinsObject() {
810  return IsHeapObject() &&
811      (HeapObject::cast(this)->map()->instance_type() ==
812       JS_BUILTINS_OBJECT_TYPE);
813}
814
815
816bool Object::IsUndetectableObject() {
817  return IsHeapObject()
818    && HeapObject::cast(this)->map()->is_undetectable();
819}
820
821
822bool Object::IsAccessCheckNeeded() {
823  return IsHeapObject()
824    && HeapObject::cast(this)->map()->is_access_check_needed();
825}
826
827
828bool Object::IsStruct() {
829  if (!IsHeapObject()) return false;
830  switch (HeapObject::cast(this)->map()->instance_type()) {
831#define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
832  STRUCT_LIST(MAKE_STRUCT_CASE)
833#undef MAKE_STRUCT_CASE
834    default: return false;
835  }
836}
837
838
839#define MAKE_STRUCT_PREDICATE(NAME, Name, name)                  \
840  bool Object::Is##Name() {                                      \
841    return Object::IsHeapObject()                                \
842      && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
843  }
844  STRUCT_LIST(MAKE_STRUCT_PREDICATE)
845#undef MAKE_STRUCT_PREDICATE
846
847
848bool Object::IsUndefined() {
849  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
850}
851
852
853bool Object::IsNull() {
854  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
855}
856
857
858bool Object::IsTheHole() {
859  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
860}
861
862
863bool Object::IsTrue() {
864  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
865}
866
867
868bool Object::IsFalse() {
869  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
870}
871
872
873bool Object::IsArgumentsMarker() {
874  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
875}
876
877
878double Object::Number() {
879  ASSERT(IsNumber());
880  return IsSmi()
881    ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
882    : reinterpret_cast<HeapNumber*>(this)->value();
883}
884
885
886MaybeObject* Object::ToSmi() {
887  if (IsSmi()) return this;
888  if (IsHeapNumber()) {
889    double value = HeapNumber::cast(this)->value();
890    int int_value = FastD2I(value);
891    if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
892      return Smi::FromInt(int_value);
893    }
894  }
895  return Failure::Exception();
896}
897
898
899bool Object::HasSpecificClassOf(String* name) {
900  return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
901}
902
903
904MaybeObject* Object::GetElement(uint32_t index) {
905  // GetElement can trigger a getter which can cause allocation.
906  // This was not always the case. This ASSERT is here to catch
907  // leftover incorrect uses.
908  ASSERT(HEAP->IsAllocationAllowed());
909  return GetElementWithReceiver(this, index);
910}
911
912
913Object* Object::GetElementNoExceptionThrown(uint32_t index) {
914  MaybeObject* maybe = GetElementWithReceiver(this, index);
915  ASSERT(!maybe->IsFailure());
916  Object* result = NULL;  // Initialization to please compiler.
917  maybe->ToObject(&result);
918  return result;
919}
920
921
922MaybeObject* Object::GetProperty(String* key) {
923  PropertyAttributes attributes;
924  return GetPropertyWithReceiver(this, key, &attributes);
925}
926
927
928MaybeObject* Object::GetProperty(String* key, PropertyAttributes* attributes) {
929  return GetPropertyWithReceiver(this, key, attributes);
930}
931
932
933#define FIELD_ADDR(p, offset) \
934  (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
935
936#define READ_FIELD(p, offset) \
937  (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
938
939#define WRITE_FIELD(p, offset, value) \
940  (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
941
942// TODO(isolates): Pass heap in to these macros.
943#define WRITE_BARRIER(object, offset) \
944  object->GetHeap()->RecordWrite(object->address(), offset);
945
946// CONDITIONAL_WRITE_BARRIER must be issued after the actual
947// write due to the assert validating the written value.
948#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, mode) \
949  if (mode == UPDATE_WRITE_BARRIER) { \
950    heap->RecordWrite(object->address(), offset); \
951  } else { \
952    ASSERT(mode == SKIP_WRITE_BARRIER); \
953    ASSERT(heap->InNewSpace(object) || \
954           !heap->InNewSpace(READ_FIELD(object, offset)) || \
955           Page::FromAddress(object->address())->           \
956               IsRegionDirty(object->address() + offset));  \
957  }
958
959#ifndef V8_TARGET_ARCH_MIPS
960  #define READ_DOUBLE_FIELD(p, offset) \
961    (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
962#else  // V8_TARGET_ARCH_MIPS
963  // Prevent gcc from using load-double (mips ldc1) on (possibly)
964  // non-64-bit aligned HeapNumber::value.
965  static inline double read_double_field(void* p, int offset) {
966    union conversion {
967      double d;
968      uint32_t u[2];
969    } c;
970    c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
971    c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
972    return c.d;
973  }
974  #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
975#endif  // V8_TARGET_ARCH_MIPS
976
977
978#ifndef V8_TARGET_ARCH_MIPS
979  #define WRITE_DOUBLE_FIELD(p, offset, value) \
980    (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
981#else  // V8_TARGET_ARCH_MIPS
982  // Prevent gcc from using store-double (mips sdc1) on (possibly)
983  // non-64-bit aligned HeapNumber::value.
984  static inline void write_double_field(void* p, int offset,
985                                        double value) {
986    union conversion {
987      double d;
988      uint32_t u[2];
989    } c;
990    c.d = value;
991    (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
992    (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
993  }
994  #define WRITE_DOUBLE_FIELD(p, offset, value) \
995    write_double_field(p, offset, value)
996#endif  // V8_TARGET_ARCH_MIPS
997
998
999#define READ_INT_FIELD(p, offset) \
1000  (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
1001
1002#define WRITE_INT_FIELD(p, offset, value) \
1003  (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1004
1005#define READ_INTPTR_FIELD(p, offset) \
1006  (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
1007
1008#define WRITE_INTPTR_FIELD(p, offset, value) \
1009  (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1010
1011#define READ_UINT32_FIELD(p, offset) \
1012  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
1013
1014#define WRITE_UINT32_FIELD(p, offset, value) \
1015  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1016
1017#define READ_SHORT_FIELD(p, offset) \
1018  (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
1019
1020#define WRITE_SHORT_FIELD(p, offset, value) \
1021  (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1022
1023#define READ_BYTE_FIELD(p, offset) \
1024  (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
1025
1026#define WRITE_BYTE_FIELD(p, offset, value) \
1027  (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1028
1029
1030Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1031  return &READ_FIELD(obj, byte_offset);
1032}
1033
1034
1035int Smi::value() {
1036  return Internals::SmiValue(this);
1037}
1038
1039
1040Smi* Smi::FromInt(int value) {
1041  ASSERT(Smi::IsValid(value));
1042  int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
1043  intptr_t tagged_value =
1044      (static_cast<intptr_t>(value) << smi_shift_bits) | kSmiTag;
1045  return reinterpret_cast<Smi*>(tagged_value);
1046}
1047
1048
1049Smi* Smi::FromIntptr(intptr_t value) {
1050  ASSERT(Smi::IsValid(value));
1051  int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
1052  return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1053}
1054
1055
1056Failure::Type Failure::type() const {
1057  return static_cast<Type>(value() & kFailureTypeTagMask);
1058}
1059
1060
1061bool Failure::IsInternalError() const {
1062  return type() == INTERNAL_ERROR;
1063}
1064
1065
1066bool Failure::IsOutOfMemoryException() const {
1067  return type() == OUT_OF_MEMORY_EXCEPTION;
1068}
1069
1070
1071AllocationSpace Failure::allocation_space() const {
1072  ASSERT_EQ(RETRY_AFTER_GC, type());
1073  return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
1074                                      & kSpaceTagMask);
1075}
1076
1077
1078Failure* Failure::InternalError() {
1079  return Construct(INTERNAL_ERROR);
1080}
1081
1082
1083Failure* Failure::Exception() {
1084  return Construct(EXCEPTION);
1085}
1086
1087
1088Failure* Failure::OutOfMemoryException() {
1089  return Construct(OUT_OF_MEMORY_EXCEPTION);
1090}
1091
1092
1093intptr_t Failure::value() const {
1094  return static_cast<intptr_t>(
1095      reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
1096}
1097
1098
1099Failure* Failure::RetryAfterGC() {
1100  return RetryAfterGC(NEW_SPACE);
1101}
1102
1103
1104Failure* Failure::RetryAfterGC(AllocationSpace space) {
1105  ASSERT((space & ~kSpaceTagMask) == 0);
1106  return Construct(RETRY_AFTER_GC, space);
1107}
1108
1109
1110Failure* Failure::Construct(Type type, intptr_t value) {
1111  uintptr_t info =
1112      (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
1113  ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
1114  return reinterpret_cast<Failure*>((info << kFailureTagSize) | kFailureTag);
1115}
1116
1117
1118bool Smi::IsValid(intptr_t value) {
1119#ifdef DEBUG
1120  bool in_range = (value >= kMinValue) && (value <= kMaxValue);
1121#endif
1122
1123#ifdef V8_TARGET_ARCH_X64
1124  // To be representable as a long smi, the value must be a 32-bit integer.
1125  bool result = (value == static_cast<int32_t>(value));
1126#else
1127  // To be representable as an tagged small integer, the two
1128  // most-significant bits of 'value' must be either 00 or 11 due to
1129  // sign-extension. To check this we add 01 to the two
1130  // most-significant bits, and check if the most-significant bit is 0
1131  //
1132  // CAUTION: The original code below:
1133  // bool result = ((value + 0x40000000) & 0x80000000) == 0;
1134  // may lead to incorrect results according to the C language spec, and
1135  // in fact doesn't work correctly with gcc4.1.1 in some cases: The
1136  // compiler may produce undefined results in case of signed integer
1137  // overflow. The computation must be done w/ unsigned ints.
1138  bool result = (static_cast<uintptr_t>(value + 0x40000000U) < 0x80000000U);
1139#endif
1140  ASSERT(result == in_range);
1141  return result;
1142}
1143
1144
1145MapWord MapWord::FromMap(Map* map) {
1146  return MapWord(reinterpret_cast<uintptr_t>(map));
1147}
1148
1149
1150Map* MapWord::ToMap() {
1151  return reinterpret_cast<Map*>(value_);
1152}
1153
1154
1155bool MapWord::IsForwardingAddress() {
1156  return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1157}
1158
1159
1160MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1161  Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1162  return MapWord(reinterpret_cast<uintptr_t>(raw));
1163}
1164
1165
1166HeapObject* MapWord::ToForwardingAddress() {
1167  ASSERT(IsForwardingAddress());
1168  return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1169}
1170
1171
1172bool MapWord::IsMarked() {
1173  return (value_ & kMarkingMask) == 0;
1174}
1175
1176
1177void MapWord::SetMark() {
1178  value_ &= ~kMarkingMask;
1179}
1180
1181
1182void MapWord::ClearMark() {
1183  value_ |= kMarkingMask;
1184}
1185
1186
1187bool MapWord::IsOverflowed() {
1188  return (value_ & kOverflowMask) != 0;
1189}
1190
1191
1192void MapWord::SetOverflow() {
1193  value_ |= kOverflowMask;
1194}
1195
1196
1197void MapWord::ClearOverflow() {
1198  value_ &= ~kOverflowMask;
1199}
1200
1201
1202MapWord MapWord::EncodeAddress(Address map_address, int offset) {
1203  // Offset is the distance in live bytes from the first live object in the
1204  // same page. The offset between two objects in the same page should not
1205  // exceed the object area size of a page.
1206  ASSERT(0 <= offset && offset < Page::kObjectAreaSize);
1207
1208  uintptr_t compact_offset = offset >> kObjectAlignmentBits;
1209  ASSERT(compact_offset < (1 << kForwardingOffsetBits));
1210
1211  Page* map_page = Page::FromAddress(map_address);
1212  ASSERT_MAP_PAGE_INDEX(map_page->mc_page_index);
1213
1214  uintptr_t map_page_offset =
1215      map_page->Offset(map_address) >> kMapAlignmentBits;
1216
1217  uintptr_t encoding =
1218      (compact_offset << kForwardingOffsetShift) |
1219      (map_page_offset << kMapPageOffsetShift) |
1220      (map_page->mc_page_index << kMapPageIndexShift);
1221  return MapWord(encoding);
1222}
1223
1224
1225Address MapWord::DecodeMapAddress(MapSpace* map_space) {
1226  int map_page_index =
1227      static_cast<int>((value_ & kMapPageIndexMask) >> kMapPageIndexShift);
1228  ASSERT_MAP_PAGE_INDEX(map_page_index);
1229
1230  int map_page_offset = static_cast<int>(
1231      ((value_ & kMapPageOffsetMask) >> kMapPageOffsetShift) <<
1232      kMapAlignmentBits);
1233
1234  return (map_space->PageAddress(map_page_index) + map_page_offset);
1235}
1236
1237
1238int MapWord::DecodeOffset() {
1239  // The offset field is represented in the kForwardingOffsetBits
1240  // most-significant bits.
1241  uintptr_t offset = (value_ >> kForwardingOffsetShift) << kObjectAlignmentBits;
1242  ASSERT(offset < static_cast<uintptr_t>(Page::kObjectAreaSize));
1243  return static_cast<int>(offset);
1244}
1245
1246
1247MapWord MapWord::FromEncodedAddress(Address address) {
1248  return MapWord(reinterpret_cast<uintptr_t>(address));
1249}
1250
1251
1252Address MapWord::ToEncodedAddress() {
1253  return reinterpret_cast<Address>(value_);
1254}
1255
1256
1257#ifdef DEBUG
1258void HeapObject::VerifyObjectField(int offset) {
1259  VerifyPointer(READ_FIELD(this, offset));
1260}
1261
1262void HeapObject::VerifySmiField(int offset) {
1263  ASSERT(READ_FIELD(this, offset)->IsSmi());
1264}
1265#endif
1266
1267
1268Heap* HeapObject::GetHeap() {
1269  // During GC, the map pointer in HeapObject is used in various ways that
1270  // prevent us from retrieving Heap from the map.
1271  // Assert that we are not in GC, implement GC code in a way that it doesn't
1272  // pull heap from the map.
1273  ASSERT(HEAP->is_safe_to_read_maps());
1274  return map()->heap();
1275}
1276
1277
1278Isolate* HeapObject::GetIsolate() {
1279  return GetHeap()->isolate();
1280}
1281
1282
1283Map* HeapObject::map() {
1284  return map_word().ToMap();
1285}
1286
1287
1288void HeapObject::set_map(Map* value) {
1289  set_map_word(MapWord::FromMap(value));
1290}
1291
1292
1293MapWord HeapObject::map_word() {
1294  return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1295}
1296
1297
1298void HeapObject::set_map_word(MapWord map_word) {
1299  // WRITE_FIELD does not invoke write barrier, but there is no need
1300  // here.
1301  WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1302}
1303
1304
1305HeapObject* HeapObject::FromAddress(Address address) {
1306  ASSERT_TAG_ALIGNED(address);
1307  return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1308}
1309
1310
1311Address HeapObject::address() {
1312  return reinterpret_cast<Address>(this) - kHeapObjectTag;
1313}
1314
1315
1316int HeapObject::Size() {
1317  return SizeFromMap(map());
1318}
1319
1320
1321void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1322  v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1323                   reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1324}
1325
1326
1327void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1328  v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1329}
1330
1331
1332bool HeapObject::IsMarked() {
1333  return map_word().IsMarked();
1334}
1335
1336
1337void HeapObject::SetMark() {
1338  ASSERT(!IsMarked());
1339  MapWord first_word = map_word();
1340  first_word.SetMark();
1341  set_map_word(first_word);
1342}
1343
1344
1345void HeapObject::ClearMark() {
1346  ASSERT(IsMarked());
1347  MapWord first_word = map_word();
1348  first_word.ClearMark();
1349  set_map_word(first_word);
1350}
1351
1352
1353bool HeapObject::IsOverflowed() {
1354  return map_word().IsOverflowed();
1355}
1356
1357
1358void HeapObject::SetOverflow() {
1359  MapWord first_word = map_word();
1360  first_word.SetOverflow();
1361  set_map_word(first_word);
1362}
1363
1364
1365void HeapObject::ClearOverflow() {
1366  ASSERT(IsOverflowed());
1367  MapWord first_word = map_word();
1368  first_word.ClearOverflow();
1369  set_map_word(first_word);
1370}
1371
1372
1373double HeapNumber::value() {
1374  return READ_DOUBLE_FIELD(this, kValueOffset);
1375}
1376
1377
1378void HeapNumber::set_value(double value) {
1379  WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1380}
1381
1382
1383int HeapNumber::get_exponent() {
1384  return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1385          kExponentShift) - kExponentBias;
1386}
1387
1388
1389int HeapNumber::get_sign() {
1390  return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1391}
1392
1393
1394ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1395
1396
1397FixedArrayBase* JSObject::elements() {
1398  Object* array = READ_FIELD(this, kElementsOffset);
1399  ASSERT(array->HasValidElements());
1400  return static_cast<FixedArrayBase*>(array);
1401}
1402
1403
1404void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1405  ASSERT(map()->has_fast_elements() ==
1406         (value->map() == GetHeap()->fixed_array_map() ||
1407          value->map() == GetHeap()->fixed_cow_array_map()));
1408  ASSERT(map()->has_fast_double_elements() ==
1409         value->IsFixedDoubleArray());
1410  ASSERT(value->HasValidElements());
1411  WRITE_FIELD(this, kElementsOffset, value);
1412  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, mode);
1413}
1414
1415
1416void JSObject::initialize_properties() {
1417  ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1418  WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1419}
1420
1421
1422void JSObject::initialize_elements() {
1423  ASSERT(map()->has_fast_elements());
1424  ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1425  WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1426}
1427
1428
1429MaybeObject* JSObject::ResetElements() {
1430  Object* obj;
1431  { MaybeObject* maybe_obj = map()->GetFastElementsMap();
1432    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1433  }
1434  set_map(Map::cast(obj));
1435  initialize_elements();
1436  return this;
1437}
1438
1439
1440ACCESSORS(Oddball, to_string, String, kToStringOffset)
1441ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1442
1443
1444byte Oddball::kind() {
1445  return READ_BYTE_FIELD(this, kKindOffset);
1446}
1447
1448
1449void Oddball::set_kind(byte value) {
1450  WRITE_BYTE_FIELD(this, kKindOffset, value);
1451}
1452
1453
1454Object* JSGlobalPropertyCell::value() {
1455  return READ_FIELD(this, kValueOffset);
1456}
1457
1458
1459void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) {
1460  // The write barrier is not used for global property cells.
1461  ASSERT(!val->IsJSGlobalPropertyCell());
1462  WRITE_FIELD(this, kValueOffset, val);
1463}
1464
1465
1466int JSObject::GetHeaderSize() {
1467  InstanceType type = map()->instance_type();
1468  // Check for the most common kind of JavaScript object before
1469  // falling into the generic switch. This speeds up the internal
1470  // field operations considerably on average.
1471  if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1472  switch (type) {
1473    case JS_GLOBAL_PROXY_TYPE:
1474      return JSGlobalProxy::kSize;
1475    case JS_GLOBAL_OBJECT_TYPE:
1476      return JSGlobalObject::kSize;
1477    case JS_BUILTINS_OBJECT_TYPE:
1478      return JSBuiltinsObject::kSize;
1479    case JS_FUNCTION_TYPE:
1480      return JSFunction::kSize;
1481    case JS_VALUE_TYPE:
1482      return JSValue::kSize;
1483    case JS_ARRAY_TYPE:
1484      return JSValue::kSize;
1485    case JS_WEAK_MAP_TYPE:
1486      return JSWeakMap::kSize;
1487    case JS_REGEXP_TYPE:
1488      return JSValue::kSize;
1489    case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1490      return JSObject::kHeaderSize;
1491    case JS_MESSAGE_OBJECT_TYPE:
1492      return JSMessageObject::kSize;
1493    default:
1494      UNREACHABLE();
1495      return 0;
1496  }
1497}
1498
1499
1500int JSObject::GetInternalFieldCount() {
1501  ASSERT(1 << kPointerSizeLog2 == kPointerSize);
1502  // Make sure to adjust for the number of in-object properties. These
1503  // properties do contribute to the size, but are not internal fields.
1504  return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1505         map()->inobject_properties();
1506}
1507
1508
1509int JSObject::GetInternalFieldOffset(int index) {
1510  ASSERT(index < GetInternalFieldCount() && index >= 0);
1511  return GetHeaderSize() + (kPointerSize * index);
1512}
1513
1514
1515Object* JSObject::GetInternalField(int index) {
1516  ASSERT(index < GetInternalFieldCount() && index >= 0);
1517  // Internal objects do follow immediately after the header, whereas in-object
1518  // properties are at the end of the object. Therefore there is no need
1519  // to adjust the index here.
1520  return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1521}
1522
1523
1524void JSObject::SetInternalField(int index, Object* value) {
1525  ASSERT(index < GetInternalFieldCount() && index >= 0);
1526  // Internal objects do follow immediately after the header, whereas in-object
1527  // properties are at the end of the object. Therefore there is no need
1528  // to adjust the index here.
1529  int offset = GetHeaderSize() + (kPointerSize * index);
1530  WRITE_FIELD(this, offset, value);
1531  WRITE_BARRIER(this, offset);
1532}
1533
1534
1535// Access fast-case object properties at index. The use of these routines
1536// is needed to correctly distinguish between properties stored in-object and
1537// properties stored in the properties array.
1538Object* JSObject::FastPropertyAt(int index) {
1539  // Adjust for the number of properties stored in the object.
1540  index -= map()->inobject_properties();
1541  if (index < 0) {
1542    int offset = map()->instance_size() + (index * kPointerSize);
1543    return READ_FIELD(this, offset);
1544  } else {
1545    ASSERT(index < properties()->length());
1546    return properties()->get(index);
1547  }
1548}
1549
1550
1551Object* JSObject::FastPropertyAtPut(int index, Object* value) {
1552  // Adjust for the number of properties stored in the object.
1553  index -= map()->inobject_properties();
1554  if (index < 0) {
1555    int offset = map()->instance_size() + (index * kPointerSize);
1556    WRITE_FIELD(this, offset, value);
1557    WRITE_BARRIER(this, offset);
1558  } else {
1559    ASSERT(index < properties()->length());
1560    properties()->set(index, value);
1561  }
1562  return value;
1563}
1564
1565
1566int JSObject::GetInObjectPropertyOffset(int index) {
1567  // Adjust for the number of properties stored in the object.
1568  index -= map()->inobject_properties();
1569  ASSERT(index < 0);
1570  return map()->instance_size() + (index * kPointerSize);
1571}
1572
1573
1574Object* JSObject::InObjectPropertyAt(int index) {
1575  // Adjust for the number of properties stored in the object.
1576  index -= map()->inobject_properties();
1577  ASSERT(index < 0);
1578  int offset = map()->instance_size() + (index * kPointerSize);
1579  return READ_FIELD(this, offset);
1580}
1581
1582
1583Object* JSObject::InObjectPropertyAtPut(int index,
1584                                        Object* value,
1585                                        WriteBarrierMode mode) {
1586  // Adjust for the number of properties stored in the object.
1587  index -= map()->inobject_properties();
1588  ASSERT(index < 0);
1589  int offset = map()->instance_size() + (index * kPointerSize);
1590  WRITE_FIELD(this, offset, value);
1591  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode);
1592  return value;
1593}
1594
1595
1596
1597void JSObject::InitializeBody(int object_size, Object* value) {
1598  ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
1599  for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1600    WRITE_FIELD(this, offset, value);
1601  }
1602}
1603
1604
1605bool JSObject::HasFastProperties() {
1606  return !properties()->IsDictionary();
1607}
1608
1609
1610int JSObject::MaxFastProperties() {
1611  // Allow extra fast properties if the object has more than
1612  // kMaxFastProperties in-object properties. When this is the case,
1613  // it is very unlikely that the object is being used as a dictionary
1614  // and there is a good chance that allowing more map transitions
1615  // will be worth it.
1616  return Max(map()->inobject_properties(), kMaxFastProperties);
1617}
1618
1619
1620void Struct::InitializeBody(int object_size) {
1621  Object* value = GetHeap()->undefined_value();
1622  for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1623    WRITE_FIELD(this, offset, value);
1624  }
1625}
1626
1627
1628bool Object::ToArrayIndex(uint32_t* index) {
1629  if (IsSmi()) {
1630    int value = Smi::cast(this)->value();
1631    if (value < 0) return false;
1632    *index = value;
1633    return true;
1634  }
1635  if (IsHeapNumber()) {
1636    double value = HeapNumber::cast(this)->value();
1637    uint32_t uint_value = static_cast<uint32_t>(value);
1638    if (value == static_cast<double>(uint_value)) {
1639      *index = uint_value;
1640      return true;
1641    }
1642  }
1643  return false;
1644}
1645
1646
1647bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
1648  if (!this->IsJSValue()) return false;
1649
1650  JSValue* js_value = JSValue::cast(this);
1651  if (!js_value->value()->IsString()) return false;
1652
1653  String* str = String::cast(js_value->value());
1654  if (index >= (uint32_t)str->length()) return false;
1655
1656  return true;
1657}
1658
1659
1660FixedArrayBase* FixedArrayBase::cast(Object* object) {
1661  ASSERT(object->IsFixedArray() || object->IsFixedDoubleArray());
1662  return reinterpret_cast<FixedArrayBase*>(object);
1663}
1664
1665
1666Object* FixedArray::get(int index) {
1667  ASSERT(index >= 0 && index < this->length());
1668  return READ_FIELD(this, kHeaderSize + index * kPointerSize);
1669}
1670
1671
1672void FixedArray::set(int index, Smi* value) {
1673  ASSERT(map() != HEAP->fixed_cow_array_map());
1674  ASSERT(index >= 0 && index < this->length());
1675  ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1676  int offset = kHeaderSize + index * kPointerSize;
1677  WRITE_FIELD(this, offset, value);
1678}
1679
1680
1681void FixedArray::set(int index, Object* value) {
1682  ASSERT(map() != HEAP->fixed_cow_array_map());
1683  ASSERT(index >= 0 && index < this->length());
1684  int offset = kHeaderSize + index * kPointerSize;
1685  WRITE_FIELD(this, offset, value);
1686  WRITE_BARRIER(this, offset);
1687}
1688
1689
1690inline bool FixedDoubleArray::is_the_hole_nan(double value) {
1691  return BitCast<uint64_t, double>(value) == kHoleNanInt64;
1692}
1693
1694
1695inline double FixedDoubleArray::hole_nan_as_double() {
1696  return BitCast<double, uint64_t>(kHoleNanInt64);
1697}
1698
1699
1700inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
1701  ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
1702  ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
1703  return OS::nan_value();
1704}
1705
1706
1707double FixedDoubleArray::get_scalar(int index) {
1708  ASSERT(map() != HEAP->fixed_cow_array_map() &&
1709         map() != HEAP->fixed_array_map());
1710  ASSERT(index >= 0 && index < this->length());
1711  double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
1712  ASSERT(!is_the_hole_nan(result));
1713  return result;
1714}
1715
1716
1717MaybeObject* FixedDoubleArray::get(int index) {
1718  if (is_the_hole(index)) {
1719    return GetHeap()->the_hole_value();
1720  } else {
1721    return GetHeap()->NumberFromDouble(get_scalar(index));
1722  }
1723}
1724
1725
1726void FixedDoubleArray::set(int index, double value) {
1727  ASSERT(map() != HEAP->fixed_cow_array_map() &&
1728         map() != HEAP->fixed_array_map());
1729  int offset = kHeaderSize + index * kDoubleSize;
1730  if (isnan(value)) value = canonical_not_the_hole_nan_as_double();
1731  WRITE_DOUBLE_FIELD(this, offset, value);
1732}
1733
1734
1735void FixedDoubleArray::set_the_hole(int index) {
1736  ASSERT(map() != HEAP->fixed_cow_array_map() &&
1737         map() != HEAP->fixed_array_map());
1738  int offset = kHeaderSize + index * kDoubleSize;
1739  WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1740}
1741
1742
1743bool FixedDoubleArray::is_the_hole(int index) {
1744  int offset = kHeaderSize + index * kDoubleSize;
1745  return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
1746}
1747
1748
1749void FixedDoubleArray::Initialize(FixedDoubleArray* from) {
1750  int old_length = from->length();
1751  ASSERT(old_length < length());
1752  if (old_length * kDoubleSize >= OS::kMinComplexMemCopy) {
1753    OS::MemCopy(FIELD_ADDR(this, kHeaderSize),
1754                FIELD_ADDR(from, kHeaderSize),
1755                old_length * kDoubleSize);
1756  } else {
1757    for (int i = 0; i < old_length; ++i) {
1758      if (from->is_the_hole(i)) {
1759        set_the_hole(i);
1760      } else {
1761        set(i, from->get_scalar(i));
1762      }
1763    }
1764  }
1765  int offset = kHeaderSize + old_length * kDoubleSize;
1766  for (int current = from->length(); current < length(); ++current) {
1767    WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1768    offset += kDoubleSize;
1769  }
1770}
1771
1772
1773void FixedDoubleArray::Initialize(FixedArray* from) {
1774  int old_length = from->length();
1775  ASSERT(old_length < length());
1776  for (int i = 0; i < old_length; i++) {
1777    Object* hole_or_object = from->get(i);
1778    if (hole_or_object->IsTheHole()) {
1779      set_the_hole(i);
1780    } else {
1781      set(i, hole_or_object->Number());
1782    }
1783  }
1784  int offset = kHeaderSize + old_length * kDoubleSize;
1785  for (int current = from->length(); current < length(); ++current) {
1786    WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1787    offset += kDoubleSize;
1788  }
1789}
1790
1791
1792void FixedDoubleArray::Initialize(SeededNumberDictionary* from) {
1793  int offset = kHeaderSize;
1794  for (int current = 0; current < length(); ++current) {
1795    WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1796    offset += kDoubleSize;
1797  }
1798  for (int i = 0; i < from->Capacity(); i++) {
1799    Object* key = from->KeyAt(i);
1800    if (key->IsNumber()) {
1801      uint32_t entry = static_cast<uint32_t>(key->Number());
1802      set(entry, from->ValueAt(i)->Number());
1803    }
1804  }
1805}
1806
1807
1808WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
1809  if (GetHeap()->InNewSpace(this)) return SKIP_WRITE_BARRIER;
1810  return UPDATE_WRITE_BARRIER;
1811}
1812
1813
1814void FixedArray::set(int index,
1815                     Object* value,
1816                     WriteBarrierMode mode) {
1817  ASSERT(map() != HEAP->fixed_cow_array_map());
1818  ASSERT(index >= 0 && index < this->length());
1819  int offset = kHeaderSize + index * kPointerSize;
1820  WRITE_FIELD(this, offset, value);
1821  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode);
1822}
1823
1824
1825void FixedArray::fast_set(FixedArray* array, int index, Object* value) {
1826  ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1827  ASSERT(index >= 0 && index < array->length());
1828  ASSERT(!HEAP->InNewSpace(value));
1829  WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
1830}
1831
1832
1833void FixedArray::set_undefined(int index) {
1834  ASSERT(map() != HEAP->fixed_cow_array_map());
1835  set_undefined(GetHeap(), index);
1836}
1837
1838
1839void FixedArray::set_undefined(Heap* heap, int index) {
1840  ASSERT(index >= 0 && index < this->length());
1841  ASSERT(!heap->InNewSpace(heap->undefined_value()));
1842  WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
1843              heap->undefined_value());
1844}
1845
1846
1847void FixedArray::set_null(int index) {
1848  set_null(GetHeap(), index);
1849}
1850
1851
1852void FixedArray::set_null(Heap* heap, int index) {
1853  ASSERT(index >= 0 && index < this->length());
1854  ASSERT(!heap->InNewSpace(heap->null_value()));
1855  WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1856}
1857
1858
1859void FixedArray::set_the_hole(int index) {
1860  ASSERT(map() != HEAP->fixed_cow_array_map());
1861  ASSERT(index >= 0 && index < this->length());
1862  ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
1863  WRITE_FIELD(this,
1864              kHeaderSize + index * kPointerSize,
1865              GetHeap()->the_hole_value());
1866}
1867
1868
1869void FixedArray::set_unchecked(int index, Smi* value) {
1870  ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1871  int offset = kHeaderSize + index * kPointerSize;
1872  WRITE_FIELD(this, offset, value);
1873}
1874
1875
1876void FixedArray::set_unchecked(Heap* heap,
1877                               int index,
1878                               Object* value,
1879                               WriteBarrierMode mode) {
1880  int offset = kHeaderSize + index * kPointerSize;
1881  WRITE_FIELD(this, offset, value);
1882  CONDITIONAL_WRITE_BARRIER(heap, this, offset, mode);
1883}
1884
1885
1886void FixedArray::set_null_unchecked(Heap* heap, int index) {
1887  ASSERT(index >= 0 && index < this->length());
1888  ASSERT(!HEAP->InNewSpace(heap->null_value()));
1889  WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1890}
1891
1892
1893Object** FixedArray::data_start() {
1894  return HeapObject::RawField(this, kHeaderSize);
1895}
1896
1897
1898bool DescriptorArray::IsEmpty() {
1899  ASSERT(this->IsSmi() ||
1900         this->length() > kFirstIndex ||
1901         this == HEAP->empty_descriptor_array());
1902  return this->IsSmi() || length() <= kFirstIndex;
1903}
1904
1905
1906int DescriptorArray::bit_field3_storage() {
1907  Object* storage = READ_FIELD(this, kBitField3StorageOffset);
1908  return Smi::cast(storage)->value();
1909}
1910
1911void DescriptorArray::set_bit_field3_storage(int value) {
1912  ASSERT(!IsEmpty());
1913  WRITE_FIELD(this, kBitField3StorageOffset, Smi::FromInt(value));
1914}
1915
1916
1917void DescriptorArray::fast_swap(FixedArray* array, int first, int second) {
1918  Object* tmp = array->get(first);
1919  fast_set(array, first, array->get(second));
1920  fast_set(array, second, tmp);
1921}
1922
1923
1924int DescriptorArray::Search(String* name) {
1925  SLOW_ASSERT(IsSortedNoDuplicates());
1926
1927  // Check for empty descriptor array.
1928  int nof = number_of_descriptors();
1929  if (nof == 0) return kNotFound;
1930
1931  // Fast case: do linear search for small arrays.
1932  const int kMaxElementsForLinearSearch = 8;
1933  if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) {
1934    return LinearSearch(name, nof);
1935  }
1936
1937  // Slow case: perform binary search.
1938  return BinarySearch(name, 0, nof - 1);
1939}
1940
1941
1942int DescriptorArray::SearchWithCache(String* name) {
1943  int number = GetIsolate()->descriptor_lookup_cache()->Lookup(this, name);
1944  if (number == DescriptorLookupCache::kAbsent) {
1945    number = Search(name);
1946    GetIsolate()->descriptor_lookup_cache()->Update(this, name, number);
1947  }
1948  return number;
1949}
1950
1951
1952String* DescriptorArray::GetKey(int descriptor_number) {
1953  ASSERT(descriptor_number < number_of_descriptors());
1954  return String::cast(get(ToKeyIndex(descriptor_number)));
1955}
1956
1957
1958Object* DescriptorArray::GetValue(int descriptor_number) {
1959  ASSERT(descriptor_number < number_of_descriptors());
1960  return GetContentArray()->get(ToValueIndex(descriptor_number));
1961}
1962
1963
1964Smi* DescriptorArray::GetDetails(int descriptor_number) {
1965  ASSERT(descriptor_number < number_of_descriptors());
1966  return Smi::cast(GetContentArray()->get(ToDetailsIndex(descriptor_number)));
1967}
1968
1969
1970PropertyType DescriptorArray::GetType(int descriptor_number) {
1971  ASSERT(descriptor_number < number_of_descriptors());
1972  return PropertyDetails(GetDetails(descriptor_number)).type();
1973}
1974
1975
1976int DescriptorArray::GetFieldIndex(int descriptor_number) {
1977  return Descriptor::IndexFromValue(GetValue(descriptor_number));
1978}
1979
1980
1981JSFunction* DescriptorArray::GetConstantFunction(int descriptor_number) {
1982  return JSFunction::cast(GetValue(descriptor_number));
1983}
1984
1985
1986Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
1987  ASSERT(GetType(descriptor_number) == CALLBACKS);
1988  return GetValue(descriptor_number);
1989}
1990
1991
1992AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
1993  ASSERT(GetType(descriptor_number) == CALLBACKS);
1994  Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
1995  return reinterpret_cast<AccessorDescriptor*>(p->address());
1996}
1997
1998
1999bool DescriptorArray::IsProperty(int descriptor_number) {
2000  return GetType(descriptor_number) < FIRST_PHANTOM_PROPERTY_TYPE;
2001}
2002
2003
2004bool DescriptorArray::IsTransition(int descriptor_number) {
2005  PropertyType t = GetType(descriptor_number);
2006  return t == MAP_TRANSITION || t == CONSTANT_TRANSITION ||
2007      t == ELEMENTS_TRANSITION;
2008}
2009
2010
2011bool DescriptorArray::IsNullDescriptor(int descriptor_number) {
2012  return GetType(descriptor_number) == NULL_DESCRIPTOR;
2013}
2014
2015
2016bool DescriptorArray::IsDontEnum(int descriptor_number) {
2017  return PropertyDetails(GetDetails(descriptor_number)).IsDontEnum();
2018}
2019
2020
2021void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
2022  desc->Init(GetKey(descriptor_number),
2023             GetValue(descriptor_number),
2024             PropertyDetails(GetDetails(descriptor_number)));
2025}
2026
2027
2028void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
2029  // Range check.
2030  ASSERT(descriptor_number < number_of_descriptors());
2031
2032  // Make sure none of the elements in desc are in new space.
2033  ASSERT(!HEAP->InNewSpace(desc->GetKey()));
2034  ASSERT(!HEAP->InNewSpace(desc->GetValue()));
2035
2036  fast_set(this, ToKeyIndex(descriptor_number), desc->GetKey());
2037  FixedArray* content_array = GetContentArray();
2038  fast_set(content_array, ToValueIndex(descriptor_number), desc->GetValue());
2039  fast_set(content_array, ToDetailsIndex(descriptor_number),
2040           desc->GetDetails().AsSmi());
2041}
2042
2043
2044void DescriptorArray::CopyFrom(int index, DescriptorArray* src, int src_index) {
2045  Descriptor desc;
2046  src->Get(src_index, &desc);
2047  Set(index, &desc);
2048}
2049
2050
2051void DescriptorArray::Swap(int first, int second) {
2052  fast_swap(this, ToKeyIndex(first), ToKeyIndex(second));
2053  FixedArray* content_array = GetContentArray();
2054  fast_swap(content_array, ToValueIndex(first), ToValueIndex(second));
2055  fast_swap(content_array, ToDetailsIndex(first),  ToDetailsIndex(second));
2056}
2057
2058
2059template<typename Shape, typename Key>
2060int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) {
2061  const int kMinCapacity = 32;
2062  int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
2063  if (capacity < kMinCapacity) {
2064    capacity = kMinCapacity;  // Guarantee min capacity.
2065  }
2066  return capacity;
2067}
2068
2069
2070template<typename Shape, typename Key>
2071int HashTable<Shape, Key>::FindEntry(Key key) {
2072  return FindEntry(GetIsolate(), key);
2073}
2074
2075
2076// Find entry for key otherwise return kNotFound.
2077template<typename Shape, typename Key>
2078int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
2079  uint32_t capacity = Capacity();
2080  uint32_t entry = FirstProbe(HashTable<Shape, Key>::Hash(key), capacity);
2081  uint32_t count = 1;
2082  // EnsureCapacity will guarantee the hash table is never full.
2083  while (true) {
2084    Object* element = KeyAt(entry);
2085    // Empty entry.
2086    if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
2087    if (element != isolate->heap()->raw_unchecked_null_value() &&
2088        Shape::IsMatch(key, element)) return entry;
2089    entry = NextProbe(entry, count++, capacity);
2090  }
2091  return kNotFound;
2092}
2093
2094
2095bool SeededNumberDictionary::requires_slow_elements() {
2096  Object* max_index_object = get(kMaxNumberKeyIndex);
2097  if (!max_index_object->IsSmi()) return false;
2098  return 0 !=
2099      (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
2100}
2101
2102uint32_t SeededNumberDictionary::max_number_key() {
2103  ASSERT(!requires_slow_elements());
2104  Object* max_index_object = get(kMaxNumberKeyIndex);
2105  if (!max_index_object->IsSmi()) return 0;
2106  uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
2107  return value >> kRequiresSlowElementsTagSize;
2108}
2109
2110void SeededNumberDictionary::set_requires_slow_elements() {
2111  set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
2112}
2113
2114
2115// ------------------------------------
2116// Cast operations
2117
2118
2119CAST_ACCESSOR(FixedArray)
2120CAST_ACCESSOR(FixedDoubleArray)
2121CAST_ACCESSOR(DescriptorArray)
2122CAST_ACCESSOR(DeoptimizationInputData)
2123CAST_ACCESSOR(DeoptimizationOutputData)
2124CAST_ACCESSOR(SymbolTable)
2125CAST_ACCESSOR(JSFunctionResultCache)
2126CAST_ACCESSOR(NormalizedMapCache)
2127CAST_ACCESSOR(CompilationCacheTable)
2128CAST_ACCESSOR(CodeCacheHashTable)
2129CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
2130CAST_ACCESSOR(MapCache)
2131CAST_ACCESSOR(String)
2132CAST_ACCESSOR(SeqString)
2133CAST_ACCESSOR(SeqAsciiString)
2134CAST_ACCESSOR(SeqTwoByteString)
2135CAST_ACCESSOR(SlicedString)
2136CAST_ACCESSOR(ConsString)
2137CAST_ACCESSOR(ExternalString)
2138CAST_ACCESSOR(ExternalAsciiString)
2139CAST_ACCESSOR(ExternalTwoByteString)
2140CAST_ACCESSOR(JSReceiver)
2141CAST_ACCESSOR(JSObject)
2142CAST_ACCESSOR(Smi)
2143CAST_ACCESSOR(HeapObject)
2144CAST_ACCESSOR(HeapNumber)
2145CAST_ACCESSOR(Oddball)
2146CAST_ACCESSOR(JSGlobalPropertyCell)
2147CAST_ACCESSOR(SharedFunctionInfo)
2148CAST_ACCESSOR(Map)
2149CAST_ACCESSOR(JSFunction)
2150CAST_ACCESSOR(GlobalObject)
2151CAST_ACCESSOR(JSGlobalProxy)
2152CAST_ACCESSOR(JSGlobalObject)
2153CAST_ACCESSOR(JSBuiltinsObject)
2154CAST_ACCESSOR(Code)
2155CAST_ACCESSOR(JSArray)
2156CAST_ACCESSOR(JSRegExp)
2157CAST_ACCESSOR(JSProxy)
2158CAST_ACCESSOR(JSFunctionProxy)
2159CAST_ACCESSOR(JSWeakMap)
2160CAST_ACCESSOR(Foreign)
2161CAST_ACCESSOR(ByteArray)
2162CAST_ACCESSOR(ExternalArray)
2163CAST_ACCESSOR(ExternalByteArray)
2164CAST_ACCESSOR(ExternalUnsignedByteArray)
2165CAST_ACCESSOR(ExternalShortArray)
2166CAST_ACCESSOR(ExternalUnsignedShortArray)
2167CAST_ACCESSOR(ExternalIntArray)
2168CAST_ACCESSOR(ExternalUnsignedIntArray)
2169CAST_ACCESSOR(ExternalFloatArray)
2170CAST_ACCESSOR(ExternalDoubleArray)
2171CAST_ACCESSOR(ExternalPixelArray)
2172CAST_ACCESSOR(Struct)
2173
2174
2175#define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
2176  STRUCT_LIST(MAKE_STRUCT_CAST)
2177#undef MAKE_STRUCT_CAST
2178
2179
2180template <typename Shape, typename Key>
2181HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
2182  ASSERT(obj->IsHashTable());
2183  return reinterpret_cast<HashTable*>(obj);
2184}
2185
2186
2187SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
2188
2189SMI_ACCESSORS(String, length, kLengthOffset)
2190
2191
2192uint32_t String::hash_field() {
2193  return READ_UINT32_FIELD(this, kHashFieldOffset);
2194}
2195
2196
2197void String::set_hash_field(uint32_t value) {
2198  WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
2199#if V8_HOST_ARCH_64_BIT
2200  WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
2201#endif
2202}
2203
2204
2205bool String::Equals(String* other) {
2206  if (other == this) return true;
2207  if (StringShape(this).IsSymbol() && StringShape(other).IsSymbol()) {
2208    return false;
2209  }
2210  return SlowEquals(other);
2211}
2212
2213
2214MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
2215  if (!StringShape(this).IsCons()) return this;
2216  ConsString* cons = ConsString::cast(this);
2217  if (cons->IsFlat()) return cons->first();
2218  return SlowTryFlatten(pretenure);
2219}
2220
2221
2222String* String::TryFlattenGetString(PretenureFlag pretenure) {
2223  MaybeObject* flat = TryFlatten(pretenure);
2224  Object* successfully_flattened;
2225  if (!flat->ToObject(&successfully_flattened)) return this;
2226  return String::cast(successfully_flattened);
2227}
2228
2229
2230uint16_t String::Get(int index) {
2231  ASSERT(index >= 0 && index < length());
2232  switch (StringShape(this).full_representation_tag()) {
2233    case kSeqStringTag | kAsciiStringTag:
2234      return SeqAsciiString::cast(this)->SeqAsciiStringGet(index);
2235    case kSeqStringTag | kTwoByteStringTag:
2236      return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
2237    case kConsStringTag | kAsciiStringTag:
2238    case kConsStringTag | kTwoByteStringTag:
2239      return ConsString::cast(this)->ConsStringGet(index);
2240    case kExternalStringTag | kAsciiStringTag:
2241      return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
2242    case kExternalStringTag | kTwoByteStringTag:
2243      return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
2244    case kSlicedStringTag | kAsciiStringTag:
2245    case kSlicedStringTag | kTwoByteStringTag:
2246      return SlicedString::cast(this)->SlicedStringGet(index);
2247    default:
2248      break;
2249  }
2250
2251  UNREACHABLE();
2252  return 0;
2253}
2254
2255
2256void String::Set(int index, uint16_t value) {
2257  ASSERT(index >= 0 && index < length());
2258  ASSERT(StringShape(this).IsSequential());
2259
2260  return this->IsAsciiRepresentation()
2261      ? SeqAsciiString::cast(this)->SeqAsciiStringSet(index, value)
2262      : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
2263}
2264
2265
2266bool String::IsFlat() {
2267  if (!StringShape(this).IsCons()) return true;
2268  return ConsString::cast(this)->second()->length() == 0;
2269}
2270
2271
2272String* String::GetUnderlying() {
2273  // Giving direct access to underlying string only makes sense if the
2274  // wrapping string is already flattened.
2275  ASSERT(this->IsFlat());
2276  ASSERT(StringShape(this).IsIndirect());
2277  STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
2278  const int kUnderlyingOffset = SlicedString::kParentOffset;
2279  return String::cast(READ_FIELD(this, kUnderlyingOffset));
2280}
2281
2282
2283uint16_t SeqAsciiString::SeqAsciiStringGet(int index) {
2284  ASSERT(index >= 0 && index < length());
2285  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2286}
2287
2288
2289void SeqAsciiString::SeqAsciiStringSet(int index, uint16_t value) {
2290  ASSERT(index >= 0 && index < length() && value <= kMaxAsciiCharCode);
2291  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
2292                   static_cast<byte>(value));
2293}
2294
2295
2296Address SeqAsciiString::GetCharsAddress() {
2297  return FIELD_ADDR(this, kHeaderSize);
2298}
2299
2300
2301char* SeqAsciiString::GetChars() {
2302  return reinterpret_cast<char*>(GetCharsAddress());
2303}
2304
2305
2306Address SeqTwoByteString::GetCharsAddress() {
2307  return FIELD_ADDR(this, kHeaderSize);
2308}
2309
2310
2311uc16* SeqTwoByteString::GetChars() {
2312  return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
2313}
2314
2315
2316uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
2317  ASSERT(index >= 0 && index < length());
2318  return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
2319}
2320
2321
2322void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
2323  ASSERT(index >= 0 && index < length());
2324  WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
2325}
2326
2327
2328int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
2329  return SizeFor(length());
2330}
2331
2332
2333int SeqAsciiString::SeqAsciiStringSize(InstanceType instance_type) {
2334  return SizeFor(length());
2335}
2336
2337
2338String* SlicedString::parent() {
2339  return String::cast(READ_FIELD(this, kParentOffset));
2340}
2341
2342
2343void SlicedString::set_parent(String* parent) {
2344  ASSERT(parent->IsSeqString());
2345  WRITE_FIELD(this, kParentOffset, parent);
2346}
2347
2348
2349SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
2350
2351
2352String* ConsString::first() {
2353  return String::cast(READ_FIELD(this, kFirstOffset));
2354}
2355
2356
2357Object* ConsString::unchecked_first() {
2358  return READ_FIELD(this, kFirstOffset);
2359}
2360
2361
2362void ConsString::set_first(String* value, WriteBarrierMode mode) {
2363  WRITE_FIELD(this, kFirstOffset, value);
2364  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, mode);
2365}
2366
2367
2368String* ConsString::second() {
2369  return String::cast(READ_FIELD(this, kSecondOffset));
2370}
2371
2372
2373Object* ConsString::unchecked_second() {
2374  return READ_FIELD(this, kSecondOffset);
2375}
2376
2377
2378void ConsString::set_second(String* value, WriteBarrierMode mode) {
2379  WRITE_FIELD(this, kSecondOffset, value);
2380  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, mode);
2381}
2382
2383
2384ExternalAsciiString::Resource* ExternalAsciiString::resource() {
2385  return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2386}
2387
2388
2389void ExternalAsciiString::set_resource(
2390    ExternalAsciiString::Resource* resource) {
2391  *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)) = resource;
2392}
2393
2394
2395ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
2396  return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2397}
2398
2399
2400void ExternalTwoByteString::set_resource(
2401    ExternalTwoByteString::Resource* resource) {
2402  *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)) = resource;
2403}
2404
2405
2406void JSFunctionResultCache::MakeZeroSize() {
2407  set_finger_index(kEntriesIndex);
2408  set_size(kEntriesIndex);
2409}
2410
2411
2412void JSFunctionResultCache::Clear() {
2413  int cache_size = size();
2414  Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
2415  MemsetPointer(entries_start,
2416                GetHeap()->the_hole_value(),
2417                cache_size - kEntriesIndex);
2418  MakeZeroSize();
2419}
2420
2421
2422int JSFunctionResultCache::size() {
2423  return Smi::cast(get(kCacheSizeIndex))->value();
2424}
2425
2426
2427void JSFunctionResultCache::set_size(int size) {
2428  set(kCacheSizeIndex, Smi::FromInt(size));
2429}
2430
2431
2432int JSFunctionResultCache::finger_index() {
2433  return Smi::cast(get(kFingerIndex))->value();
2434}
2435
2436
2437void JSFunctionResultCache::set_finger_index(int finger_index) {
2438  set(kFingerIndex, Smi::FromInt(finger_index));
2439}
2440
2441
2442byte ByteArray::get(int index) {
2443  ASSERT(index >= 0 && index < this->length());
2444  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2445}
2446
2447
2448void ByteArray::set(int index, byte value) {
2449  ASSERT(index >= 0 && index < this->length());
2450  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
2451}
2452
2453
2454int ByteArray::get_int(int index) {
2455  ASSERT(index >= 0 && (index * kIntSize) < this->length());
2456  return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
2457}
2458
2459
2460ByteArray* ByteArray::FromDataStartAddress(Address address) {
2461  ASSERT_TAG_ALIGNED(address);
2462  return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
2463}
2464
2465
2466Address ByteArray::GetDataStartAddress() {
2467  return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
2468}
2469
2470
2471uint8_t* ExternalPixelArray::external_pixel_pointer() {
2472  return reinterpret_cast<uint8_t*>(external_pointer());
2473}
2474
2475
2476uint8_t ExternalPixelArray::get_scalar(int index) {
2477  ASSERT((index >= 0) && (index < this->length()));
2478  uint8_t* ptr = external_pixel_pointer();
2479  return ptr[index];
2480}
2481
2482
2483MaybeObject* ExternalPixelArray::get(int index) {
2484  return Smi::FromInt(static_cast<int>(get_scalar(index)));
2485}
2486
2487
2488void ExternalPixelArray::set(int index, uint8_t value) {
2489  ASSERT((index >= 0) && (index < this->length()));
2490  uint8_t* ptr = external_pixel_pointer();
2491  ptr[index] = value;
2492}
2493
2494
2495void* ExternalArray::external_pointer() {
2496  intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
2497  return reinterpret_cast<void*>(ptr);
2498}
2499
2500
2501void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
2502  intptr_t ptr = reinterpret_cast<intptr_t>(value);
2503  WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
2504}
2505
2506
2507int8_t ExternalByteArray::get_scalar(int index) {
2508  ASSERT((index >= 0) && (index < this->length()));
2509  int8_t* ptr = static_cast<int8_t*>(external_pointer());
2510  return ptr[index];
2511}
2512
2513
2514MaybeObject* ExternalByteArray::get(int index) {
2515  return Smi::FromInt(static_cast<int>(get_scalar(index)));
2516}
2517
2518
2519void ExternalByteArray::set(int index, int8_t value) {
2520  ASSERT((index >= 0) && (index < this->length()));
2521  int8_t* ptr = static_cast<int8_t*>(external_pointer());
2522  ptr[index] = value;
2523}
2524
2525
2526uint8_t ExternalUnsignedByteArray::get_scalar(int index) {
2527  ASSERT((index >= 0) && (index < this->length()));
2528  uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2529  return ptr[index];
2530}
2531
2532
2533MaybeObject* ExternalUnsignedByteArray::get(int index) {
2534  return Smi::FromInt(static_cast<int>(get_scalar(index)));
2535}
2536
2537
2538void ExternalUnsignedByteArray::set(int index, uint8_t value) {
2539  ASSERT((index >= 0) && (index < this->length()));
2540  uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2541  ptr[index] = value;
2542}
2543
2544
2545int16_t ExternalShortArray::get_scalar(int index) {
2546  ASSERT((index >= 0) && (index < this->length()));
2547  int16_t* ptr = static_cast<int16_t*>(external_pointer());
2548  return ptr[index];
2549}
2550
2551
2552MaybeObject* ExternalShortArray::get(int index) {
2553  return Smi::FromInt(static_cast<int>(get_scalar(index)));
2554}
2555
2556
2557void ExternalShortArray::set(int index, int16_t value) {
2558  ASSERT((index >= 0) && (index < this->length()));
2559  int16_t* ptr = static_cast<int16_t*>(external_pointer());
2560  ptr[index] = value;
2561}
2562
2563
2564uint16_t ExternalUnsignedShortArray::get_scalar(int index) {
2565  ASSERT((index >= 0) && (index < this->length()));
2566  uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2567  return ptr[index];
2568}
2569
2570
2571MaybeObject* ExternalUnsignedShortArray::get(int index) {
2572  return Smi::FromInt(static_cast<int>(get_scalar(index)));
2573}
2574
2575
2576void ExternalUnsignedShortArray::set(int index, uint16_t value) {
2577  ASSERT((index >= 0) && (index < this->length()));
2578  uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2579  ptr[index] = value;
2580}
2581
2582
2583int32_t ExternalIntArray::get_scalar(int index) {
2584  ASSERT((index >= 0) && (index < this->length()));
2585  int32_t* ptr = static_cast<int32_t*>(external_pointer());
2586  return ptr[index];
2587}
2588
2589
2590MaybeObject* ExternalIntArray::get(int index) {
2591    return GetHeap()->NumberFromInt32(get_scalar(index));
2592}
2593
2594
2595void ExternalIntArray::set(int index, int32_t value) {
2596  ASSERT((index >= 0) && (index < this->length()));
2597  int32_t* ptr = static_cast<int32_t*>(external_pointer());
2598  ptr[index] = value;
2599}
2600
2601
2602uint32_t ExternalUnsignedIntArray::get_scalar(int index) {
2603  ASSERT((index >= 0) && (index < this->length()));
2604  uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2605  return ptr[index];
2606}
2607
2608
2609MaybeObject* ExternalUnsignedIntArray::get(int index) {
2610    return GetHeap()->NumberFromUint32(get_scalar(index));
2611}
2612
2613
2614void ExternalUnsignedIntArray::set(int index, uint32_t value) {
2615  ASSERT((index >= 0) && (index < this->length()));
2616  uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2617  ptr[index] = value;
2618}
2619
2620
2621float ExternalFloatArray::get_scalar(int index) {
2622  ASSERT((index >= 0) && (index < this->length()));
2623  float* ptr = static_cast<float*>(external_pointer());
2624  return ptr[index];
2625}
2626
2627
2628MaybeObject* ExternalFloatArray::get(int index) {
2629    return GetHeap()->NumberFromDouble(get_scalar(index));
2630}
2631
2632
2633void ExternalFloatArray::set(int index, float value) {
2634  ASSERT((index >= 0) && (index < this->length()));
2635  float* ptr = static_cast<float*>(external_pointer());
2636  ptr[index] = value;
2637}
2638
2639
2640double ExternalDoubleArray::get_scalar(int index) {
2641  ASSERT((index >= 0) && (index < this->length()));
2642  double* ptr = static_cast<double*>(external_pointer());
2643  return ptr[index];
2644}
2645
2646
2647MaybeObject* ExternalDoubleArray::get(int index) {
2648    return GetHeap()->NumberFromDouble(get_scalar(index));
2649}
2650
2651
2652void ExternalDoubleArray::set(int index, double value) {
2653  ASSERT((index >= 0) && (index < this->length()));
2654  double* ptr = static_cast<double*>(external_pointer());
2655  ptr[index] = value;
2656}
2657
2658
2659int Map::visitor_id() {
2660  return READ_BYTE_FIELD(this, kVisitorIdOffset);
2661}
2662
2663
2664void Map::set_visitor_id(int id) {
2665  ASSERT(0 <= id && id < 256);
2666  WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
2667}
2668
2669
2670int Map::instance_size() {
2671  return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
2672}
2673
2674
2675int Map::inobject_properties() {
2676  return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
2677}
2678
2679
2680int Map::pre_allocated_property_fields() {
2681  return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
2682}
2683
2684
2685int HeapObject::SizeFromMap(Map* map) {
2686  int instance_size = map->instance_size();
2687  if (instance_size != kVariableSizeSentinel) return instance_size;
2688  // We can ignore the "symbol" bit becase it is only set for symbols
2689  // and implies a string type.
2690  int instance_type = static_cast<int>(map->instance_type()) & ~kIsSymbolMask;
2691  // Only inline the most frequent cases.
2692  if (instance_type == FIXED_ARRAY_TYPE) {
2693    return FixedArray::BodyDescriptor::SizeOf(map, this);
2694  }
2695  if (instance_type == ASCII_STRING_TYPE) {
2696    return SeqAsciiString::SizeFor(
2697        reinterpret_cast<SeqAsciiString*>(this)->length());
2698  }
2699  if (instance_type == BYTE_ARRAY_TYPE) {
2700    return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
2701  }
2702  if (instance_type == STRING_TYPE) {
2703    return SeqTwoByteString::SizeFor(
2704        reinterpret_cast<SeqTwoByteString*>(this)->length());
2705  }
2706  if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
2707    return FixedDoubleArray::SizeFor(
2708        reinterpret_cast<FixedDoubleArray*>(this)->length());
2709  }
2710  ASSERT(instance_type == CODE_TYPE);
2711  return reinterpret_cast<Code*>(this)->CodeSize();
2712}
2713
2714
2715void Map::set_instance_size(int value) {
2716  ASSERT_EQ(0, value & (kPointerSize - 1));
2717  value >>= kPointerSizeLog2;
2718  ASSERT(0 <= value && value < 256);
2719  WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
2720}
2721
2722
2723void Map::set_inobject_properties(int value) {
2724  ASSERT(0 <= value && value < 256);
2725  WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
2726}
2727
2728
2729void Map::set_pre_allocated_property_fields(int value) {
2730  ASSERT(0 <= value && value < 256);
2731  WRITE_BYTE_FIELD(this,
2732                   kPreAllocatedPropertyFieldsOffset,
2733                   static_cast<byte>(value));
2734}
2735
2736
2737InstanceType Map::instance_type() {
2738  return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
2739}
2740
2741
2742void Map::set_instance_type(InstanceType value) {
2743  WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
2744}
2745
2746
2747int Map::unused_property_fields() {
2748  return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
2749}
2750
2751
2752void Map::set_unused_property_fields(int value) {
2753  WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
2754}
2755
2756
2757byte Map::bit_field() {
2758  return READ_BYTE_FIELD(this, kBitFieldOffset);
2759}
2760
2761
2762void Map::set_bit_field(byte value) {
2763  WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
2764}
2765
2766
2767byte Map::bit_field2() {
2768  return READ_BYTE_FIELD(this, kBitField2Offset);
2769}
2770
2771
2772void Map::set_bit_field2(byte value) {
2773  WRITE_BYTE_FIELD(this, kBitField2Offset, value);
2774}
2775
2776
2777void Map::set_non_instance_prototype(bool value) {
2778  if (value) {
2779    set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
2780  } else {
2781    set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
2782  }
2783}
2784
2785
2786bool Map::has_non_instance_prototype() {
2787  return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
2788}
2789
2790
2791void Map::set_function_with_prototype(bool value) {
2792  if (value) {
2793    set_bit_field2(bit_field2() | (1 << kFunctionWithPrototype));
2794  } else {
2795    set_bit_field2(bit_field2() & ~(1 << kFunctionWithPrototype));
2796  }
2797}
2798
2799
2800bool Map::function_with_prototype() {
2801  return ((1 << kFunctionWithPrototype) & bit_field2()) != 0;
2802}
2803
2804
2805void Map::set_is_access_check_needed(bool access_check_needed) {
2806  if (access_check_needed) {
2807    set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
2808  } else {
2809    set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
2810  }
2811}
2812
2813
2814bool Map::is_access_check_needed() {
2815  return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
2816}
2817
2818
2819void Map::set_is_extensible(bool value) {
2820  if (value) {
2821    set_bit_field2(bit_field2() | (1 << kIsExtensible));
2822  } else {
2823    set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
2824  }
2825}
2826
2827bool Map::is_extensible() {
2828  return ((1 << kIsExtensible) & bit_field2()) != 0;
2829}
2830
2831
2832void Map::set_attached_to_shared_function_info(bool value) {
2833  if (value) {
2834    set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
2835  } else {
2836    set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
2837  }
2838}
2839
2840bool Map::attached_to_shared_function_info() {
2841  return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
2842}
2843
2844
2845void Map::set_is_shared(bool value) {
2846  if (value) {
2847    set_bit_field3(bit_field3() | (1 << kIsShared));
2848  } else {
2849    set_bit_field3(bit_field3() & ~(1 << kIsShared));
2850  }
2851}
2852
2853bool Map::is_shared() {
2854  return ((1 << kIsShared) & bit_field3()) != 0;
2855}
2856
2857
2858JSFunction* Map::unchecked_constructor() {
2859  return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset));
2860}
2861
2862
2863FixedArray* Map::unchecked_prototype_transitions() {
2864  return reinterpret_cast<FixedArray*>(
2865      READ_FIELD(this, kPrototypeTransitionsOffset));
2866}
2867
2868
2869Code::Flags Code::flags() {
2870  return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
2871}
2872
2873
2874void Code::set_flags(Code::Flags flags) {
2875  STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
2876  // Make sure that all call stubs have an arguments count.
2877  ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
2878          ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
2879         ExtractArgumentsCountFromFlags(flags) >= 0);
2880  WRITE_INT_FIELD(this, kFlagsOffset, flags);
2881}
2882
2883
2884Code::Kind Code::kind() {
2885  return ExtractKindFromFlags(flags());
2886}
2887
2888
2889InlineCacheState Code::ic_state() {
2890  InlineCacheState result = ExtractICStateFromFlags(flags());
2891  // Only allow uninitialized or debugger states for non-IC code
2892  // objects. This is used in the debugger to determine whether or not
2893  // a call to code object has been replaced with a debug break call.
2894  ASSERT(is_inline_cache_stub() ||
2895         result == UNINITIALIZED ||
2896         result == DEBUG_BREAK ||
2897         result == DEBUG_PREPARE_STEP_IN);
2898  return result;
2899}
2900
2901
2902Code::ExtraICState Code::extra_ic_state() {
2903  ASSERT(is_inline_cache_stub());
2904  return ExtractExtraICStateFromFlags(flags());
2905}
2906
2907
2908PropertyType Code::type() {
2909  return ExtractTypeFromFlags(flags());
2910}
2911
2912
2913int Code::arguments_count() {
2914  ASSERT(is_call_stub() || is_keyed_call_stub() || kind() == STUB);
2915  return ExtractArgumentsCountFromFlags(flags());
2916}
2917
2918
2919int Code::major_key() {
2920  ASSERT(kind() == STUB ||
2921         kind() == UNARY_OP_IC ||
2922         kind() == BINARY_OP_IC ||
2923         kind() == COMPARE_IC ||
2924         kind() == TO_BOOLEAN_IC);
2925  return READ_BYTE_FIELD(this, kStubMajorKeyOffset);
2926}
2927
2928
2929void Code::set_major_key(int major) {
2930  ASSERT(kind() == STUB ||
2931         kind() == UNARY_OP_IC ||
2932         kind() == BINARY_OP_IC ||
2933         kind() == COMPARE_IC ||
2934         kind() == TO_BOOLEAN_IC);
2935  ASSERT(0 <= major && major < 256);
2936  WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major);
2937}
2938
2939
2940bool Code::optimizable() {
2941  ASSERT(kind() == FUNCTION);
2942  return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
2943}
2944
2945
2946void Code::set_optimizable(bool value) {
2947  ASSERT(kind() == FUNCTION);
2948  WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
2949}
2950
2951
2952bool Code::has_deoptimization_support() {
2953  ASSERT(kind() == FUNCTION);
2954  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2955  return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
2956}
2957
2958
2959void Code::set_has_deoptimization_support(bool value) {
2960  ASSERT(kind() == FUNCTION);
2961  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2962  flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
2963  WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
2964}
2965
2966
2967bool Code::has_debug_break_slots() {
2968  ASSERT(kind() == FUNCTION);
2969  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2970  return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
2971}
2972
2973
2974void Code::set_has_debug_break_slots(bool value) {
2975  ASSERT(kind() == FUNCTION);
2976  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2977  flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
2978  WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
2979}
2980
2981
2982int Code::allow_osr_at_loop_nesting_level() {
2983  ASSERT(kind() == FUNCTION);
2984  return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
2985}
2986
2987
2988void Code::set_allow_osr_at_loop_nesting_level(int level) {
2989  ASSERT(kind() == FUNCTION);
2990  ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
2991  WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
2992}
2993
2994
2995unsigned Code::stack_slots() {
2996  ASSERT(kind() == OPTIMIZED_FUNCTION);
2997  return READ_UINT32_FIELD(this, kStackSlotsOffset);
2998}
2999
3000
3001void Code::set_stack_slots(unsigned slots) {
3002  ASSERT(kind() == OPTIMIZED_FUNCTION);
3003  WRITE_UINT32_FIELD(this, kStackSlotsOffset, slots);
3004}
3005
3006
3007unsigned Code::safepoint_table_offset() {
3008  ASSERT(kind() == OPTIMIZED_FUNCTION);
3009  return READ_UINT32_FIELD(this, kSafepointTableOffsetOffset);
3010}
3011
3012
3013void Code::set_safepoint_table_offset(unsigned offset) {
3014  ASSERT(kind() == OPTIMIZED_FUNCTION);
3015  ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
3016  WRITE_UINT32_FIELD(this, kSafepointTableOffsetOffset, offset);
3017}
3018
3019
3020unsigned Code::stack_check_table_offset() {
3021  ASSERT(kind() == FUNCTION);
3022  return READ_UINT32_FIELD(this, kStackCheckTableOffsetOffset);
3023}
3024
3025
3026void Code::set_stack_check_table_offset(unsigned offset) {
3027  ASSERT(kind() == FUNCTION);
3028  ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
3029  WRITE_UINT32_FIELD(this, kStackCheckTableOffsetOffset, offset);
3030}
3031
3032
3033CheckType Code::check_type() {
3034  ASSERT(is_call_stub() || is_keyed_call_stub());
3035  byte type = READ_BYTE_FIELD(this, kCheckTypeOffset);
3036  return static_cast<CheckType>(type);
3037}
3038
3039
3040void Code::set_check_type(CheckType value) {
3041  ASSERT(is_call_stub() || is_keyed_call_stub());
3042  WRITE_BYTE_FIELD(this, kCheckTypeOffset, value);
3043}
3044
3045
3046byte Code::unary_op_type() {
3047  ASSERT(is_unary_op_stub());
3048  return READ_BYTE_FIELD(this, kUnaryOpTypeOffset);
3049}
3050
3051
3052void Code::set_unary_op_type(byte value) {
3053  ASSERT(is_unary_op_stub());
3054  WRITE_BYTE_FIELD(this, kUnaryOpTypeOffset, value);
3055}
3056
3057
3058byte Code::binary_op_type() {
3059  ASSERT(is_binary_op_stub());
3060  return READ_BYTE_FIELD(this, kBinaryOpTypeOffset);
3061}
3062
3063
3064void Code::set_binary_op_type(byte value) {
3065  ASSERT(is_binary_op_stub());
3066  WRITE_BYTE_FIELD(this, kBinaryOpTypeOffset, value);
3067}
3068
3069
3070byte Code::binary_op_result_type() {
3071  ASSERT(is_binary_op_stub());
3072  return READ_BYTE_FIELD(this, kBinaryOpReturnTypeOffset);
3073}
3074
3075
3076void Code::set_binary_op_result_type(byte value) {
3077  ASSERT(is_binary_op_stub());
3078  WRITE_BYTE_FIELD(this, kBinaryOpReturnTypeOffset, value);
3079}
3080
3081
3082byte Code::compare_state() {
3083  ASSERT(is_compare_ic_stub());
3084  return READ_BYTE_FIELD(this, kCompareStateOffset);
3085}
3086
3087
3088void Code::set_compare_state(byte value) {
3089  ASSERT(is_compare_ic_stub());
3090  WRITE_BYTE_FIELD(this, kCompareStateOffset, value);
3091}
3092
3093
3094byte Code::to_boolean_state() {
3095  ASSERT(is_to_boolean_ic_stub());
3096  return READ_BYTE_FIELD(this, kToBooleanTypeOffset);
3097}
3098
3099
3100void Code::set_to_boolean_state(byte value) {
3101  ASSERT(is_to_boolean_ic_stub());
3102  WRITE_BYTE_FIELD(this, kToBooleanTypeOffset, value);
3103}
3104
3105bool Code::is_inline_cache_stub() {
3106  Kind kind = this->kind();
3107  return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
3108}
3109
3110
3111Code::Flags Code::ComputeFlags(Kind kind,
3112                               InlineCacheState ic_state,
3113                               ExtraICState extra_ic_state,
3114                               PropertyType type,
3115                               int argc,
3116                               InlineCacheHolderFlag holder) {
3117  // Extra IC state is only allowed for call IC stubs or for store IC
3118  // stubs.
3119  ASSERT(extra_ic_state == kNoExtraICState ||
3120         kind == CALL_IC ||
3121         kind == STORE_IC ||
3122         kind == KEYED_STORE_IC);
3123  // Compute the bit mask.
3124  int bits = KindField::encode(kind)
3125      | ICStateField::encode(ic_state)
3126      | TypeField::encode(type)
3127      | ExtraICStateField::encode(extra_ic_state)
3128      | (argc << kArgumentsCountShift)
3129      | CacheHolderField::encode(holder);
3130  return static_cast<Flags>(bits);
3131}
3132
3133
3134Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
3135                                          PropertyType type,
3136                                          ExtraICState extra_ic_state,
3137                                          InlineCacheHolderFlag holder,
3138                                          int argc) {
3139  return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, argc, holder);
3140}
3141
3142
3143Code::Kind Code::ExtractKindFromFlags(Flags flags) {
3144  return KindField::decode(flags);
3145}
3146
3147
3148InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
3149  return ICStateField::decode(flags);
3150}
3151
3152
3153Code::ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
3154  return ExtraICStateField::decode(flags);
3155}
3156
3157
3158PropertyType Code::ExtractTypeFromFlags(Flags flags) {
3159  return TypeField::decode(flags);
3160}
3161
3162
3163int Code::ExtractArgumentsCountFromFlags(Flags flags) {
3164  return (flags & kArgumentsCountMask) >> kArgumentsCountShift;
3165}
3166
3167
3168InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
3169  return CacheHolderField::decode(flags);
3170}
3171
3172
3173Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
3174  int bits = flags & ~TypeField::kMask;
3175  return static_cast<Flags>(bits);
3176}
3177
3178
3179Code* Code::GetCodeFromTargetAddress(Address address) {
3180  HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
3181  // GetCodeFromTargetAddress might be called when marking objects during mark
3182  // sweep. reinterpret_cast is therefore used instead of the more appropriate
3183  // Code::cast. Code::cast does not work when the object's map is
3184  // marked.
3185  Code* result = reinterpret_cast<Code*>(code);
3186  return result;
3187}
3188
3189
3190Isolate* Map::isolate() {
3191  return heap()->isolate();
3192}
3193
3194
3195Heap* Map::heap() {
3196  // NOTE: address() helper is not used to save one instruction.
3197  Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap_;
3198  ASSERT(heap != NULL);
3199  ASSERT(heap->isolate() == Isolate::Current());
3200  return heap;
3201}
3202
3203
3204Heap* Code::heap() {
3205  // NOTE: address() helper is not used to save one instruction.
3206  Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap_;
3207  ASSERT(heap != NULL);
3208  ASSERT(heap->isolate() == Isolate::Current());
3209  return heap;
3210}
3211
3212
3213Isolate* Code::isolate() {
3214  return heap()->isolate();
3215}
3216
3217
3218Heap* JSGlobalPropertyCell::heap() {
3219  // NOTE: address() helper is not used to save one instruction.
3220  Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap_;
3221  ASSERT(heap != NULL);
3222  ASSERT(heap->isolate() == Isolate::Current());
3223  return heap;
3224}
3225
3226
3227Isolate* JSGlobalPropertyCell::isolate() {
3228  return heap()->isolate();
3229}
3230
3231
3232Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
3233  return HeapObject::
3234      FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
3235}
3236
3237
3238Object* Map::prototype() {
3239  return READ_FIELD(this, kPrototypeOffset);
3240}
3241
3242
3243void Map::set_prototype(Object* value, WriteBarrierMode mode) {
3244  ASSERT(value->IsNull() || value->IsJSReceiver());
3245  WRITE_FIELD(this, kPrototypeOffset, value);
3246  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, mode);
3247}
3248
3249
3250MaybeObject* Map::GetFastElementsMap() {
3251  if (has_fast_elements()) return this;
3252  Object* obj;
3253  { MaybeObject* maybe_obj = CopyDropTransitions();
3254    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3255  }
3256  Map* new_map = Map::cast(obj);
3257  new_map->set_elements_kind(FAST_ELEMENTS);
3258  isolate()->counters()->map_to_fast_elements()->Increment();
3259  return new_map;
3260}
3261
3262
3263MaybeObject* Map::GetFastDoubleElementsMap() {
3264  if (has_fast_double_elements()) return this;
3265  Object* obj;
3266  { MaybeObject* maybe_obj = CopyDropTransitions();
3267    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3268  }
3269  Map* new_map = Map::cast(obj);
3270  new_map->set_elements_kind(FAST_DOUBLE_ELEMENTS);
3271  isolate()->counters()->map_to_fast_double_elements()->Increment();
3272  return new_map;
3273}
3274
3275
3276MaybeObject* Map::GetSlowElementsMap() {
3277  if (!has_fast_elements() && !has_fast_double_elements()) return this;
3278  Object* obj;
3279  { MaybeObject* maybe_obj = CopyDropTransitions();
3280    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3281  }
3282  Map* new_map = Map::cast(obj);
3283  new_map->set_elements_kind(DICTIONARY_ELEMENTS);
3284  isolate()->counters()->map_to_slow_elements()->Increment();
3285  return new_map;
3286}
3287
3288
3289DescriptorArray* Map::instance_descriptors() {
3290  Object* object = READ_FIELD(this, kInstanceDescriptorsOrBitField3Offset);
3291  if (object->IsSmi()) {
3292    return HEAP->empty_descriptor_array();
3293  } else {
3294    return DescriptorArray::cast(object);
3295  }
3296}
3297
3298
3299void Map::init_instance_descriptors() {
3300  WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, Smi::FromInt(0));
3301}
3302
3303
3304void Map::clear_instance_descriptors() {
3305  Object* object = READ_FIELD(this,
3306                              kInstanceDescriptorsOrBitField3Offset);
3307  if (!object->IsSmi()) {
3308    WRITE_FIELD(
3309        this,
3310        kInstanceDescriptorsOrBitField3Offset,
3311        Smi::FromInt(DescriptorArray::cast(object)->bit_field3_storage()));
3312  }
3313}
3314
3315
3316void Map::set_instance_descriptors(DescriptorArray* value,
3317                                   WriteBarrierMode mode) {
3318  Object* object = READ_FIELD(this,
3319                              kInstanceDescriptorsOrBitField3Offset);
3320  if (value == isolate()->heap()->empty_descriptor_array()) {
3321    clear_instance_descriptors();
3322    return;
3323  } else {
3324    if (object->IsSmi()) {
3325      value->set_bit_field3_storage(Smi::cast(object)->value());
3326    } else {
3327      value->set_bit_field3_storage(
3328          DescriptorArray::cast(object)->bit_field3_storage());
3329    }
3330  }
3331  ASSERT(!is_shared());
3332  WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, value);
3333  CONDITIONAL_WRITE_BARRIER(GetHeap(),
3334                            this,
3335                            kInstanceDescriptorsOrBitField3Offset,
3336                            mode);
3337}
3338
3339
3340int Map::bit_field3() {
3341  Object* object = READ_FIELD(this,
3342                              kInstanceDescriptorsOrBitField3Offset);
3343  if (object->IsSmi()) {
3344    return Smi::cast(object)->value();
3345  } else {
3346    return DescriptorArray::cast(object)->bit_field3_storage();
3347  }
3348}
3349
3350
3351void Map::set_bit_field3(int value) {
3352  ASSERT(Smi::IsValid(value));
3353  Object* object = READ_FIELD(this,
3354                              kInstanceDescriptorsOrBitField3Offset);
3355  if (object->IsSmi()) {
3356    WRITE_FIELD(this,
3357                kInstanceDescriptorsOrBitField3Offset,
3358                Smi::FromInt(value));
3359  } else {
3360    DescriptorArray::cast(object)->set_bit_field3_storage(value);
3361  }
3362}
3363
3364
3365ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
3366ACCESSORS(Map, prototype_transitions, FixedArray, kPrototypeTransitionsOffset)
3367ACCESSORS(Map, constructor, Object, kConstructorOffset)
3368
3369ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
3370ACCESSORS(JSFunction, literals, FixedArray, kLiteralsOffset)
3371ACCESSORS_GCSAFE(JSFunction, next_function_link, Object,
3372                 kNextFunctionLinkOffset)
3373
3374ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
3375ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
3376ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
3377
3378ACCESSORS(JSGlobalProxy, context, Object, kContextOffset)
3379
3380ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
3381ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
3382ACCESSORS(AccessorInfo, data, Object, kDataOffset)
3383ACCESSORS(AccessorInfo, name, Object, kNameOffset)
3384ACCESSORS(AccessorInfo, flag, Smi, kFlagOffset)
3385
3386ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
3387ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
3388ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
3389
3390ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
3391ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
3392ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
3393ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
3394ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
3395ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
3396
3397ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
3398ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
3399
3400ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
3401ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
3402
3403ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
3404ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
3405ACCESSORS(FunctionTemplateInfo, property_accessors, Object,
3406          kPropertyAccessorsOffset)
3407ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
3408          kPrototypeTemplateOffset)
3409ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
3410ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
3411          kNamedPropertyHandlerOffset)
3412ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
3413          kIndexedPropertyHandlerOffset)
3414ACCESSORS(FunctionTemplateInfo, instance_template, Object,
3415          kInstanceTemplateOffset)
3416ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
3417ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
3418ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
3419          kInstanceCallHandlerOffset)
3420ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
3421          kAccessCheckInfoOffset)
3422ACCESSORS(FunctionTemplateInfo, flag, Smi, kFlagOffset)
3423
3424ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
3425ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
3426          kInternalFieldCountOffset)
3427
3428ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
3429ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
3430
3431ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
3432
3433ACCESSORS(Script, source, Object, kSourceOffset)
3434ACCESSORS(Script, name, Object, kNameOffset)
3435ACCESSORS(Script, id, Object, kIdOffset)
3436ACCESSORS(Script, line_offset, Smi, kLineOffsetOffset)
3437ACCESSORS(Script, column_offset, Smi, kColumnOffsetOffset)
3438ACCESSORS(Script, data, Object, kDataOffset)
3439ACCESSORS(Script, context_data, Object, kContextOffset)
3440ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
3441ACCESSORS(Script, type, Smi, kTypeOffset)
3442ACCESSORS(Script, compilation_type, Smi, kCompilationTypeOffset)
3443ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
3444ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
3445ACCESSORS(Script, eval_from_instructions_offset, Smi,
3446          kEvalFrominstructionsOffsetOffset)
3447
3448#ifdef ENABLE_DEBUGGER_SUPPORT
3449ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
3450ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
3451ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
3452ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
3453
3454ACCESSORS(BreakPointInfo, code_position, Smi, kCodePositionIndex)
3455ACCESSORS(BreakPointInfo, source_position, Smi, kSourcePositionIndex)
3456ACCESSORS(BreakPointInfo, statement_position, Smi, kStatementPositionIndex)
3457ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
3458#endif
3459
3460ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
3461ACCESSORS_GCSAFE(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
3462ACCESSORS_GCSAFE(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
3463ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
3464          kInstanceClassNameOffset)
3465ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
3466ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
3467ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
3468ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
3469ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
3470          kThisPropertyAssignmentsOffset)
3471
3472BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
3473               kHiddenPrototypeBit)
3474BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
3475BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
3476               kNeedsAccessCheckBit)
3477BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
3478               kReadOnlyPrototypeBit)
3479BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
3480               kIsExpressionBit)
3481BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
3482               kIsTopLevelBit)
3483BOOL_GETTER(SharedFunctionInfo,
3484            compiler_hints,
3485            has_only_simple_this_property_assignments,
3486            kHasOnlySimpleThisPropertyAssignments)
3487BOOL_ACCESSORS(SharedFunctionInfo,
3488               compiler_hints,
3489               allows_lazy_compilation,
3490               kAllowLazyCompilation)
3491BOOL_ACCESSORS(SharedFunctionInfo,
3492               compiler_hints,
3493               uses_arguments,
3494               kUsesArguments)
3495BOOL_ACCESSORS(SharedFunctionInfo,
3496               compiler_hints,
3497               has_duplicate_parameters,
3498               kHasDuplicateParameters)
3499
3500
3501#if V8_HOST_ARCH_32_BIT
3502SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
3503SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
3504              kFormalParameterCountOffset)
3505SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
3506              kExpectedNofPropertiesOffset)
3507SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3508SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
3509              kStartPositionAndTypeOffset)
3510SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
3511SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
3512              kFunctionTokenPositionOffset)
3513SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
3514              kCompilerHintsOffset)
3515SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
3516              kThisPropertyAssignmentsCountOffset)
3517SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
3518#else
3519
3520#define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset)             \
3521  STATIC_ASSERT(holder::offset % kPointerSize == 0);              \
3522  int holder::name() {                                            \
3523    int value = READ_INT_FIELD(this, offset);                     \
3524    ASSERT(kHeapObjectTag == 1);                                  \
3525    ASSERT((value & kHeapObjectTag) == 0);                        \
3526    return value >> 1;                                            \
3527  }                                                               \
3528  void holder::set_##name(int value) {                            \
3529    ASSERT(kHeapObjectTag == 1);                                  \
3530    ASSERT((value & 0xC0000000) == 0xC0000000 ||                  \
3531           (value & 0xC0000000) == 0x000000000);                  \
3532    WRITE_INT_FIELD(this,                                         \
3533                    offset,                                       \
3534                    (value << 1) & ~kHeapObjectTag);              \
3535  }
3536
3537#define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset)             \
3538  STATIC_ASSERT(holder::offset % kPointerSize == kIntSize);       \
3539  INT_ACCESSORS(holder, name, offset)
3540
3541
3542PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
3543PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3544                        formal_parameter_count,
3545                        kFormalParameterCountOffset)
3546
3547PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3548                        expected_nof_properties,
3549                        kExpectedNofPropertiesOffset)
3550PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3551
3552PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
3553PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3554                        start_position_and_type,
3555                        kStartPositionAndTypeOffset)
3556
3557PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3558                        function_token_position,
3559                        kFunctionTokenPositionOffset)
3560PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3561                        compiler_hints,
3562                        kCompilerHintsOffset)
3563
3564PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3565                        this_property_assignments_count,
3566                        kThisPropertyAssignmentsCountOffset)
3567PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
3568#endif
3569
3570
3571int SharedFunctionInfo::construction_count() {
3572  return READ_BYTE_FIELD(this, kConstructionCountOffset);
3573}
3574
3575
3576void SharedFunctionInfo::set_construction_count(int value) {
3577  ASSERT(0 <= value && value < 256);
3578  WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
3579}
3580
3581
3582BOOL_ACCESSORS(SharedFunctionInfo,
3583               compiler_hints,
3584               live_objects_may_exist,
3585               kLiveObjectsMayExist)
3586
3587
3588bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
3589  return initial_map() != HEAP->undefined_value();
3590}
3591
3592
3593BOOL_GETTER(SharedFunctionInfo,
3594            compiler_hints,
3595            optimization_disabled,
3596            kOptimizationDisabled)
3597
3598
3599void SharedFunctionInfo::set_optimization_disabled(bool disable) {
3600  set_compiler_hints(BooleanBit::set(compiler_hints(),
3601                                     kOptimizationDisabled,
3602                                     disable));
3603  // If disabling optimizations we reflect that in the code object so
3604  // it will not be counted as optimizable code.
3605  if ((code()->kind() == Code::FUNCTION) && disable) {
3606    code()->set_optimizable(false);
3607  }
3608}
3609
3610
3611BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, strict_mode,
3612               kStrictModeFunction)
3613BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
3614BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
3615               name_should_print_as_anonymous,
3616               kNameShouldPrintAsAnonymous)
3617BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
3618BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
3619
3620ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
3621ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
3622
3623ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
3624
3625bool Script::HasValidSource() {
3626  Object* src = this->source();
3627  if (!src->IsString()) return true;
3628  String* src_str = String::cast(src);
3629  if (!StringShape(src_str).IsExternal()) return true;
3630  if (src_str->IsAsciiRepresentation()) {
3631    return ExternalAsciiString::cast(src)->resource() != NULL;
3632  } else if (src_str->IsTwoByteRepresentation()) {
3633    return ExternalTwoByteString::cast(src)->resource() != NULL;
3634  }
3635  return true;
3636}
3637
3638
3639void SharedFunctionInfo::DontAdaptArguments() {
3640  ASSERT(code()->kind() == Code::BUILTIN);
3641  set_formal_parameter_count(kDontAdaptArgumentsSentinel);
3642}
3643
3644
3645int SharedFunctionInfo::start_position() {
3646  return start_position_and_type() >> kStartPositionShift;
3647}
3648
3649
3650void SharedFunctionInfo::set_start_position(int start_position) {
3651  set_start_position_and_type((start_position << kStartPositionShift)
3652    | (start_position_and_type() & ~kStartPositionMask));
3653}
3654
3655
3656Code* SharedFunctionInfo::code() {
3657  return Code::cast(READ_FIELD(this, kCodeOffset));
3658}
3659
3660
3661Code* SharedFunctionInfo::unchecked_code() {
3662  return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
3663}
3664
3665
3666void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
3667  WRITE_FIELD(this, kCodeOffset, value);
3668  ASSERT(!Isolate::Current()->heap()->InNewSpace(value));
3669}
3670
3671
3672SerializedScopeInfo* SharedFunctionInfo::scope_info() {
3673  return reinterpret_cast<SerializedScopeInfo*>(
3674      READ_FIELD(this, kScopeInfoOffset));
3675}
3676
3677
3678void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value,
3679                                        WriteBarrierMode mode) {
3680  WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
3681  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kScopeInfoOffset, mode);
3682}
3683
3684
3685Smi* SharedFunctionInfo::deopt_counter() {
3686  return reinterpret_cast<Smi*>(READ_FIELD(this, kDeoptCounterOffset));
3687}
3688
3689
3690void SharedFunctionInfo::set_deopt_counter(Smi* value) {
3691  WRITE_FIELD(this, kDeoptCounterOffset, value);
3692}
3693
3694
3695bool SharedFunctionInfo::is_compiled() {
3696  return code() !=
3697      Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);
3698}
3699
3700
3701bool SharedFunctionInfo::IsApiFunction() {
3702  return function_data()->IsFunctionTemplateInfo();
3703}
3704
3705
3706FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
3707  ASSERT(IsApiFunction());
3708  return FunctionTemplateInfo::cast(function_data());
3709}
3710
3711
3712bool SharedFunctionInfo::HasBuiltinFunctionId() {
3713  return function_data()->IsSmi();
3714}
3715
3716
3717BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
3718  ASSERT(HasBuiltinFunctionId());
3719  return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
3720}
3721
3722
3723int SharedFunctionInfo::code_age() {
3724  return (compiler_hints() >> kCodeAgeShift) & kCodeAgeMask;
3725}
3726
3727
3728void SharedFunctionInfo::set_code_age(int code_age) {
3729  set_compiler_hints(compiler_hints() |
3730                     ((code_age & kCodeAgeMask) << kCodeAgeShift));
3731}
3732
3733
3734bool SharedFunctionInfo::has_deoptimization_support() {
3735  Code* code = this->code();
3736  return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
3737}
3738
3739
3740bool JSFunction::IsBuiltin() {
3741  return context()->global()->IsJSBuiltinsObject();
3742}
3743
3744
3745bool JSFunction::NeedsArgumentsAdaption() {
3746  return shared()->formal_parameter_count() !=
3747      SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3748}
3749
3750
3751bool JSFunction::IsOptimized() {
3752  return code()->kind() == Code::OPTIMIZED_FUNCTION;
3753}
3754
3755
3756bool JSFunction::IsOptimizable() {
3757  return code()->kind() == Code::FUNCTION && code()->optimizable();
3758}
3759
3760
3761bool JSFunction::IsMarkedForLazyRecompilation() {
3762  return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
3763}
3764
3765
3766Code* JSFunction::code() {
3767  return Code::cast(unchecked_code());
3768}
3769
3770
3771Code* JSFunction::unchecked_code() {
3772  return reinterpret_cast<Code*>(
3773      Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
3774}
3775
3776
3777void JSFunction::set_code(Code* value) {
3778  // Skip the write barrier because code is never in new space.
3779  ASSERT(!HEAP->InNewSpace(value));
3780  Address entry = value->entry();
3781  WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
3782}
3783
3784
3785void JSFunction::ReplaceCode(Code* code) {
3786  bool was_optimized = IsOptimized();
3787  bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
3788
3789  set_code(code);
3790
3791  // Add/remove the function from the list of optimized functions for this
3792  // context based on the state change.
3793  if (!was_optimized && is_optimized) {
3794    context()->global_context()->AddOptimizedFunction(this);
3795  }
3796  if (was_optimized && !is_optimized) {
3797    context()->global_context()->RemoveOptimizedFunction(this);
3798  }
3799}
3800
3801
3802Context* JSFunction::context() {
3803  return Context::cast(READ_FIELD(this, kContextOffset));
3804}
3805
3806
3807Object* JSFunction::unchecked_context() {
3808  return READ_FIELD(this, kContextOffset);
3809}
3810
3811
3812SharedFunctionInfo* JSFunction::unchecked_shared() {
3813  return reinterpret_cast<SharedFunctionInfo*>(
3814      READ_FIELD(this, kSharedFunctionInfoOffset));
3815}
3816
3817
3818void JSFunction::set_context(Object* value) {
3819  ASSERT(value->IsUndefined() || value->IsContext());
3820  WRITE_FIELD(this, kContextOffset, value);
3821  WRITE_BARRIER(this, kContextOffset);
3822}
3823
3824ACCESSORS(JSFunction, prototype_or_initial_map, Object,
3825          kPrototypeOrInitialMapOffset)
3826
3827
3828Map* JSFunction::initial_map() {
3829  return Map::cast(prototype_or_initial_map());
3830}
3831
3832
3833void JSFunction::set_initial_map(Map* value) {
3834  set_prototype_or_initial_map(value);
3835}
3836
3837
3838bool JSFunction::has_initial_map() {
3839  return prototype_or_initial_map()->IsMap();
3840}
3841
3842
3843bool JSFunction::has_instance_prototype() {
3844  return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
3845}
3846
3847
3848bool JSFunction::has_prototype() {
3849  return map()->has_non_instance_prototype() || has_instance_prototype();
3850}
3851
3852
3853Object* JSFunction::instance_prototype() {
3854  ASSERT(has_instance_prototype());
3855  if (has_initial_map()) return initial_map()->prototype();
3856  // When there is no initial map and the prototype is a JSObject, the
3857  // initial map field is used for the prototype field.
3858  return prototype_or_initial_map();
3859}
3860
3861
3862Object* JSFunction::prototype() {
3863  ASSERT(has_prototype());
3864  // If the function's prototype property has been set to a non-JSObject
3865  // value, that value is stored in the constructor field of the map.
3866  if (map()->has_non_instance_prototype()) return map()->constructor();
3867  return instance_prototype();
3868}
3869
3870bool JSFunction::should_have_prototype() {
3871  return map()->function_with_prototype();
3872}
3873
3874
3875bool JSFunction::is_compiled() {
3876  return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
3877}
3878
3879
3880int JSFunction::NumberOfLiterals() {
3881  return literals()->length();
3882}
3883
3884
3885Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
3886  ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
3887  return READ_FIELD(this, OffsetOfFunctionWithId(id));
3888}
3889
3890
3891void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
3892                                              Object* value) {
3893  ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
3894  WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
3895  WRITE_BARRIER(this, OffsetOfFunctionWithId(id));
3896}
3897
3898
3899Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
3900  ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
3901  return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
3902}
3903
3904
3905void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
3906                                                   Code* value) {
3907  ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
3908  WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
3909  ASSERT(!HEAP->InNewSpace(value));
3910}
3911
3912
3913ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
3914ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
3915ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
3916
3917
3918void JSProxy::InitializeBody(int object_size, Object* value) {
3919  ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
3920  for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
3921    WRITE_FIELD(this, offset, value);
3922  }
3923}
3924
3925
3926ACCESSORS(JSWeakMap, table, ObjectHashTable, kTableOffset)
3927ACCESSORS_GCSAFE(JSWeakMap, next, Object, kNextOffset)
3928
3929
3930ObjectHashTable* JSWeakMap::unchecked_table() {
3931  return reinterpret_cast<ObjectHashTable*>(READ_FIELD(this, kTableOffset));
3932}
3933
3934
3935Address Foreign::address() {
3936  return AddressFrom<Address>(READ_INTPTR_FIELD(this, kAddressOffset));
3937}
3938
3939
3940void Foreign::set_address(Address value) {
3941  WRITE_INTPTR_FIELD(this, kAddressOffset, OffsetFrom(value));
3942}
3943
3944
3945ACCESSORS(JSValue, value, Object, kValueOffset)
3946
3947
3948JSValue* JSValue::cast(Object* obj) {
3949  ASSERT(obj->IsJSValue());
3950  ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
3951  return reinterpret_cast<JSValue*>(obj);
3952}
3953
3954
3955ACCESSORS(JSMessageObject, type, String, kTypeOffset)
3956ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
3957ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
3958ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
3959ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
3960SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
3961SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
3962
3963
3964JSMessageObject* JSMessageObject::cast(Object* obj) {
3965  ASSERT(obj->IsJSMessageObject());
3966  ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
3967  return reinterpret_cast<JSMessageObject*>(obj);
3968}
3969
3970
3971INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
3972ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
3973ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
3974ACCESSORS(Code, next_code_flushing_candidate,
3975          Object, kNextCodeFlushingCandidateOffset)
3976
3977
3978byte* Code::instruction_start()  {
3979  return FIELD_ADDR(this, kHeaderSize);
3980}
3981
3982
3983byte* Code::instruction_end()  {
3984  return instruction_start() + instruction_size();
3985}
3986
3987
3988int Code::body_size() {
3989  return RoundUp(instruction_size(), kObjectAlignment);
3990}
3991
3992
3993FixedArray* Code::unchecked_deoptimization_data() {
3994  return reinterpret_cast<FixedArray*>(
3995      READ_FIELD(this, kDeoptimizationDataOffset));
3996}
3997
3998
3999ByteArray* Code::unchecked_relocation_info() {
4000  return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
4001}
4002
4003
4004byte* Code::relocation_start() {
4005  return unchecked_relocation_info()->GetDataStartAddress();
4006}
4007
4008
4009int Code::relocation_size() {
4010  return unchecked_relocation_info()->length();
4011}
4012
4013
4014byte* Code::entry() {
4015  return instruction_start();
4016}
4017
4018
4019bool Code::contains(byte* pc) {
4020  return (instruction_start() <= pc) &&
4021      (pc <= instruction_start() + instruction_size());
4022}
4023
4024
4025ACCESSORS(JSArray, length, Object, kLengthOffset)
4026
4027
4028ACCESSORS(JSRegExp, data, Object, kDataOffset)
4029
4030
4031JSRegExp::Type JSRegExp::TypeTag() {
4032  Object* data = this->data();
4033  if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
4034  Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
4035  return static_cast<JSRegExp::Type>(smi->value());
4036}
4037
4038
4039JSRegExp::Type JSRegExp::TypeTagUnchecked() {
4040  Smi* smi = Smi::cast(DataAtUnchecked(kTagIndex));
4041  return static_cast<JSRegExp::Type>(smi->value());
4042}
4043
4044
4045int JSRegExp::CaptureCount() {
4046  switch (TypeTag()) {
4047    case ATOM:
4048      return 0;
4049    case IRREGEXP:
4050      return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
4051    default:
4052      UNREACHABLE();
4053      return -1;
4054  }
4055}
4056
4057
4058JSRegExp::Flags JSRegExp::GetFlags() {
4059  ASSERT(this->data()->IsFixedArray());
4060  Object* data = this->data();
4061  Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
4062  return Flags(smi->value());
4063}
4064
4065
4066String* JSRegExp::Pattern() {
4067  ASSERT(this->data()->IsFixedArray());
4068  Object* data = this->data();
4069  String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
4070  return pattern;
4071}
4072
4073
4074Object* JSRegExp::DataAt(int index) {
4075  ASSERT(TypeTag() != NOT_COMPILED);
4076  return FixedArray::cast(data())->get(index);
4077}
4078
4079
4080Object* JSRegExp::DataAtUnchecked(int index) {
4081  FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4082  int offset = FixedArray::kHeaderSize + index * kPointerSize;
4083  return READ_FIELD(fa, offset);
4084}
4085
4086
4087void JSRegExp::SetDataAt(int index, Object* value) {
4088  ASSERT(TypeTag() != NOT_COMPILED);
4089  ASSERT(index >= kDataIndex);  // Only implementation data can be set this way.
4090  FixedArray::cast(data())->set(index, value);
4091}
4092
4093
4094void JSRegExp::SetDataAtUnchecked(int index, Object* value, Heap* heap) {
4095  ASSERT(index >= kDataIndex);  // Only implementation data can be set this way.
4096  FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4097  if (value->IsSmi()) {
4098    fa->set_unchecked(index, Smi::cast(value));
4099  } else {
4100    fa->set_unchecked(heap, index, value, SKIP_WRITE_BARRIER);
4101  }
4102}
4103
4104
4105ElementsKind JSObject::GetElementsKind() {
4106  ElementsKind kind = map()->elements_kind();
4107  ASSERT((kind == FAST_ELEMENTS &&
4108          (elements()->map() == GetHeap()->fixed_array_map() ||
4109           elements()->map() == GetHeap()->fixed_cow_array_map())) ||
4110         (kind == FAST_DOUBLE_ELEMENTS &&
4111          elements()->IsFixedDoubleArray()) ||
4112         (kind == DICTIONARY_ELEMENTS &&
4113          elements()->IsFixedArray() &&
4114          elements()->IsDictionary()) ||
4115         (kind > DICTIONARY_ELEMENTS));
4116  return kind;
4117}
4118
4119
4120ElementsAccessor* JSObject::GetElementsAccessor() {
4121  return ElementsAccessor::ForKind(GetElementsKind());
4122}
4123
4124
4125bool JSObject::HasFastElements() {
4126  return GetElementsKind() == FAST_ELEMENTS;
4127}
4128
4129
4130bool JSObject::HasFastDoubleElements() {
4131  return GetElementsKind() == FAST_DOUBLE_ELEMENTS;
4132}
4133
4134
4135bool JSObject::HasDictionaryElements() {
4136  return GetElementsKind() == DICTIONARY_ELEMENTS;
4137}
4138
4139
4140bool JSObject::HasExternalArrayElements() {
4141  HeapObject* array = elements();
4142  ASSERT(array != NULL);
4143  return array->IsExternalArray();
4144}
4145
4146
4147#define EXTERNAL_ELEMENTS_CHECK(name, type)          \
4148bool JSObject::HasExternal##name##Elements() {       \
4149  HeapObject* array = elements();                    \
4150  ASSERT(array != NULL);                             \
4151  if (!array->IsHeapObject())                        \
4152    return false;                                    \
4153  return array->map()->instance_type() == type;      \
4154}
4155
4156
4157EXTERNAL_ELEMENTS_CHECK(Byte, EXTERNAL_BYTE_ARRAY_TYPE)
4158EXTERNAL_ELEMENTS_CHECK(UnsignedByte, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
4159EXTERNAL_ELEMENTS_CHECK(Short, EXTERNAL_SHORT_ARRAY_TYPE)
4160EXTERNAL_ELEMENTS_CHECK(UnsignedShort,
4161                        EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
4162EXTERNAL_ELEMENTS_CHECK(Int, EXTERNAL_INT_ARRAY_TYPE)
4163EXTERNAL_ELEMENTS_CHECK(UnsignedInt,
4164                        EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
4165EXTERNAL_ELEMENTS_CHECK(Float,
4166                        EXTERNAL_FLOAT_ARRAY_TYPE)
4167EXTERNAL_ELEMENTS_CHECK(Double,
4168                        EXTERNAL_DOUBLE_ARRAY_TYPE)
4169EXTERNAL_ELEMENTS_CHECK(Pixel, EXTERNAL_PIXEL_ARRAY_TYPE)
4170
4171
4172bool JSObject::HasNamedInterceptor() {
4173  return map()->has_named_interceptor();
4174}
4175
4176
4177bool JSObject::HasIndexedInterceptor() {
4178  return map()->has_indexed_interceptor();
4179}
4180
4181
4182bool JSObject::AllowsSetElementsLength() {
4183  bool result = elements()->IsFixedArray() ||
4184      elements()->IsFixedDoubleArray();
4185  ASSERT(result == !HasExternalArrayElements());
4186  return result;
4187}
4188
4189
4190MaybeObject* JSObject::EnsureWritableFastElements() {
4191  ASSERT(HasFastElements());
4192  FixedArray* elems = FixedArray::cast(elements());
4193  Isolate* isolate = GetIsolate();
4194  if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
4195  Object* writable_elems;
4196  { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
4197      elems, isolate->heap()->fixed_array_map());
4198    if (!maybe_writable_elems->ToObject(&writable_elems)) {
4199      return maybe_writable_elems;
4200    }
4201  }
4202  set_elements(FixedArray::cast(writable_elems));
4203  isolate->counters()->cow_arrays_converted()->Increment();
4204  return writable_elems;
4205}
4206
4207
4208StringDictionary* JSObject::property_dictionary() {
4209  ASSERT(!HasFastProperties());
4210  return StringDictionary::cast(properties());
4211}
4212
4213
4214SeededNumberDictionary* JSObject::element_dictionary() {
4215  ASSERT(HasDictionaryElements());
4216  return SeededNumberDictionary::cast(elements());
4217}
4218
4219
4220bool String::IsHashFieldComputed(uint32_t field) {
4221  return (field & kHashNotComputedMask) == 0;
4222}
4223
4224
4225bool String::HasHashCode() {
4226  return IsHashFieldComputed(hash_field());
4227}
4228
4229
4230uint32_t String::Hash() {
4231  // Fast case: has hash code already been computed?
4232  uint32_t field = hash_field();
4233  if (IsHashFieldComputed(field)) return field >> kHashShift;
4234  // Slow case: compute hash code and set it.
4235  return ComputeAndSetHash();
4236}
4237
4238
4239StringHasher::StringHasher(int length, uint32_t seed)
4240  : length_(length),
4241    raw_running_hash_(seed),
4242    array_index_(0),
4243    is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
4244    is_first_char_(true),
4245    is_valid_(true) {
4246  ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
4247}
4248
4249
4250bool StringHasher::has_trivial_hash() {
4251  return length_ > String::kMaxHashCalcLength;
4252}
4253
4254
4255void StringHasher::AddCharacter(uc32 c) {
4256  // Use the Jenkins one-at-a-time hash function to update the hash
4257  // for the given character.
4258  raw_running_hash_ += c;
4259  raw_running_hash_ += (raw_running_hash_ << 10);
4260  raw_running_hash_ ^= (raw_running_hash_ >> 6);
4261  // Incremental array index computation.
4262  if (is_array_index_) {
4263    if (c < '0' || c > '9') {
4264      is_array_index_ = false;
4265    } else {
4266      int d = c - '0';
4267      if (is_first_char_) {
4268        is_first_char_ = false;
4269        if (c == '0' && length_ > 1) {
4270          is_array_index_ = false;
4271          return;
4272        }
4273      }
4274      if (array_index_ > 429496729U - ((d + 2) >> 3)) {
4275        is_array_index_ = false;
4276      } else {
4277        array_index_ = array_index_ * 10 + d;
4278      }
4279    }
4280  }
4281}
4282
4283
4284void StringHasher::AddCharacterNoIndex(uc32 c) {
4285  ASSERT(!is_array_index());
4286  raw_running_hash_ += c;
4287  raw_running_hash_ += (raw_running_hash_ << 10);
4288  raw_running_hash_ ^= (raw_running_hash_ >> 6);
4289}
4290
4291
4292uint32_t StringHasher::GetHash() {
4293  // Get the calculated raw hash value and do some more bit ops to distribute
4294  // the hash further. Ensure that we never return zero as the hash value.
4295  uint32_t result = raw_running_hash_;
4296  result += (result << 3);
4297  result ^= (result >> 11);
4298  result += (result << 15);
4299  if ((result & String::kHashBitMask) == 0) {
4300    result = 27;
4301  }
4302  return result;
4303}
4304
4305
4306template <typename schar>
4307uint32_t HashSequentialString(const schar* chars, int length, uint32_t seed) {
4308  StringHasher hasher(length, seed);
4309  if (!hasher.has_trivial_hash()) {
4310    int i;
4311    for (i = 0; hasher.is_array_index() && (i < length); i++) {
4312      hasher.AddCharacter(chars[i]);
4313    }
4314    for (; i < length; i++) {
4315      hasher.AddCharacterNoIndex(chars[i]);
4316    }
4317  }
4318  return hasher.GetHashField();
4319}
4320
4321
4322bool String::AsArrayIndex(uint32_t* index) {
4323  uint32_t field = hash_field();
4324  if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
4325    return false;
4326  }
4327  return SlowAsArrayIndex(index);
4328}
4329
4330
4331Object* JSReceiver::GetPrototype() {
4332  return HeapObject::cast(this)->map()->prototype();
4333}
4334
4335
4336bool JSReceiver::HasProperty(String* name) {
4337  if (IsJSProxy()) {
4338    return JSProxy::cast(this)->HasPropertyWithHandler(name);
4339  }
4340  return GetPropertyAttribute(name) != ABSENT;
4341}
4342
4343
4344bool JSReceiver::HasLocalProperty(String* name) {
4345  if (IsJSProxy()) {
4346    return JSProxy::cast(this)->HasPropertyWithHandler(name);
4347  }
4348  return GetLocalPropertyAttribute(name) != ABSENT;
4349}
4350
4351
4352PropertyAttributes JSReceiver::GetPropertyAttribute(String* key) {
4353  return GetPropertyAttributeWithReceiver(this, key);
4354}
4355
4356// TODO(504): this may be useful in other places too where JSGlobalProxy
4357// is used.
4358Object* JSObject::BypassGlobalProxy() {
4359  if (IsJSGlobalProxy()) {
4360    Object* proto = GetPrototype();
4361    if (proto->IsNull()) return GetHeap()->undefined_value();
4362    ASSERT(proto->IsJSGlobalObject());
4363    return proto;
4364  }
4365  return this;
4366}
4367
4368
4369bool JSObject::HasHiddenPropertiesObject() {
4370  ASSERT(!IsJSGlobalProxy());
4371  return GetPropertyAttributePostInterceptor(this,
4372                                             GetHeap()->hidden_symbol(),
4373                                             false) != ABSENT;
4374}
4375
4376
4377Object* JSObject::GetHiddenPropertiesObject() {
4378  ASSERT(!IsJSGlobalProxy());
4379  PropertyAttributes attributes;
4380  // You can't install a getter on a property indexed by the hidden symbol,
4381  // so we can be sure that GetLocalPropertyPostInterceptor returns a real
4382  // object.
4383  Object* result =
4384      GetLocalPropertyPostInterceptor(this,
4385                                      GetHeap()->hidden_symbol(),
4386                                      &attributes)->ToObjectUnchecked();
4387  return result;
4388}
4389
4390
4391MaybeObject* JSObject::SetHiddenPropertiesObject(Object* hidden_obj) {
4392  ASSERT(!IsJSGlobalProxy());
4393  return SetPropertyPostInterceptor(GetHeap()->hidden_symbol(),
4394                                    hidden_obj,
4395                                    DONT_ENUM,
4396                                    kNonStrictMode);
4397}
4398
4399
4400bool JSObject::HasHiddenProperties() {
4401  return !GetHiddenProperties(OMIT_CREATION)->ToObjectChecked()->IsUndefined();
4402}
4403
4404
4405bool JSObject::HasElement(uint32_t index) {
4406  return HasElementWithReceiver(this, index);
4407}
4408
4409
4410bool AccessorInfo::all_can_read() {
4411  return BooleanBit::get(flag(), kAllCanReadBit);
4412}
4413
4414
4415void AccessorInfo::set_all_can_read(bool value) {
4416  set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
4417}
4418
4419
4420bool AccessorInfo::all_can_write() {
4421  return BooleanBit::get(flag(), kAllCanWriteBit);
4422}
4423
4424
4425void AccessorInfo::set_all_can_write(bool value) {
4426  set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
4427}
4428
4429
4430bool AccessorInfo::prohibits_overwriting() {
4431  return BooleanBit::get(flag(), kProhibitsOverwritingBit);
4432}
4433
4434
4435void AccessorInfo::set_prohibits_overwriting(bool value) {
4436  set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
4437}
4438
4439
4440PropertyAttributes AccessorInfo::property_attributes() {
4441  return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
4442}
4443
4444
4445void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
4446  set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
4447}
4448
4449
4450template<typename Shape, typename Key>
4451void Dictionary<Shape, Key>::SetEntry(int entry,
4452                                      Object* key,
4453                                      Object* value) {
4454  SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
4455}
4456
4457
4458template<typename Shape, typename Key>
4459void Dictionary<Shape, Key>::SetEntry(int entry,
4460                                      Object* key,
4461                                      Object* value,
4462                                      PropertyDetails details) {
4463  ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0);
4464  int index = HashTable<Shape, Key>::EntryToIndex(entry);
4465  AssertNoAllocation no_gc;
4466  WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
4467  FixedArray::set(index, key, mode);
4468  FixedArray::set(index+1, value, mode);
4469  FixedArray::fast_set(this, index+2, details.AsSmi());
4470}
4471
4472
4473bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
4474  ASSERT(other->IsNumber());
4475  return key == static_cast<uint32_t>(other->Number());
4476}
4477
4478
4479uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
4480  return ComputeIntegerHash(key, 0);
4481}
4482
4483
4484uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
4485                                                      Object* other) {
4486  ASSERT(other->IsNumber());
4487  return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
4488}
4489
4490uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
4491  return ComputeIntegerHash(key, seed);
4492}
4493
4494uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
4495                                                          uint32_t seed,
4496                                                          Object* other) {
4497  ASSERT(other->IsNumber());
4498  return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
4499}
4500
4501MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) {
4502  return Isolate::Current()->heap()->NumberFromUint32(key);
4503}
4504
4505
4506bool StringDictionaryShape::IsMatch(String* key, Object* other) {
4507  // We know that all entries in a hash table had their hash keys created.
4508  // Use that knowledge to have fast failure.
4509  if (key->Hash() != String::cast(other)->Hash()) return false;
4510  return key->Equals(String::cast(other));
4511}
4512
4513
4514uint32_t StringDictionaryShape::Hash(String* key) {
4515  return key->Hash();
4516}
4517
4518
4519uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) {
4520  return String::cast(other)->Hash();
4521}
4522
4523
4524MaybeObject* StringDictionaryShape::AsObject(String* key) {
4525  return key;
4526}
4527
4528
4529bool ObjectHashTableShape::IsMatch(JSObject* key, Object* other) {
4530  return key == JSObject::cast(other);
4531}
4532
4533
4534uint32_t ObjectHashTableShape::Hash(JSObject* key) {
4535  MaybeObject* maybe_hash = key->GetIdentityHash(JSObject::OMIT_CREATION);
4536  ASSERT(!maybe_hash->IsFailure());
4537  return Smi::cast(maybe_hash->ToObjectUnchecked())->value();
4538}
4539
4540
4541uint32_t ObjectHashTableShape::HashForObject(JSObject* key, Object* other) {
4542  MaybeObject* maybe_hash = JSObject::cast(other)->GetIdentityHash(
4543      JSObject::OMIT_CREATION);
4544  ASSERT(!maybe_hash->IsFailure());
4545  return Smi::cast(maybe_hash->ToObjectUnchecked())->value();
4546}
4547
4548
4549MaybeObject* ObjectHashTableShape::AsObject(JSObject* key) {
4550  return key;
4551}
4552
4553
4554void ObjectHashTable::RemoveEntry(int entry) {
4555  RemoveEntry(entry, GetHeap());
4556}
4557
4558
4559void Map::ClearCodeCache(Heap* heap) {
4560  // No write barrier is needed since empty_fixed_array is not in new space.
4561  // Please note this function is used during marking:
4562  //  - MarkCompactCollector::MarkUnmarkedObject
4563  ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
4564  WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
4565}
4566
4567
4568void JSArray::EnsureSize(int required_size) {
4569  ASSERT(HasFastElements());
4570  FixedArray* elts = FixedArray::cast(elements());
4571  const int kArraySizeThatFitsComfortablyInNewSpace = 128;
4572  if (elts->length() < required_size) {
4573    // Doubling in size would be overkill, but leave some slack to avoid
4574    // constantly growing.
4575    Expand(required_size + (required_size >> 3));
4576    // It's a performance benefit to keep a frequently used array in new-space.
4577  } else if (!GetHeap()->new_space()->Contains(elts) &&
4578             required_size < kArraySizeThatFitsComfortablyInNewSpace) {
4579    // Expand will allocate a new backing store in new space even if the size
4580    // we asked for isn't larger than what we had before.
4581    Expand(required_size);
4582  }
4583}
4584
4585
4586void JSArray::set_length(Smi* length) {
4587  set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
4588}
4589
4590
4591void JSArray::SetContent(FixedArray* storage) {
4592  set_length(Smi::FromInt(storage->length()));
4593  set_elements(storage);
4594}
4595
4596
4597MaybeObject* FixedArray::Copy() {
4598  if (length() == 0) return this;
4599  return GetHeap()->CopyFixedArray(this);
4600}
4601
4602
4603Relocatable::Relocatable(Isolate* isolate) {
4604  ASSERT(isolate == Isolate::Current());
4605  isolate_ = isolate;
4606  prev_ = isolate->relocatable_top();
4607  isolate->set_relocatable_top(this);
4608}
4609
4610
4611Relocatable::~Relocatable() {
4612  ASSERT(isolate_ == Isolate::Current());
4613  ASSERT_EQ(isolate_->relocatable_top(), this);
4614  isolate_->set_relocatable_top(prev_);
4615}
4616
4617
4618int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
4619  return map->instance_size();
4620}
4621
4622
4623void Foreign::ForeignIterateBody(ObjectVisitor* v) {
4624  v->VisitExternalReference(
4625      reinterpret_cast<Address *>(FIELD_ADDR(this, kAddressOffset)));
4626}
4627
4628
4629template<typename StaticVisitor>
4630void Foreign::ForeignIterateBody() {
4631  StaticVisitor::VisitExternalReference(
4632      reinterpret_cast<Address *>(FIELD_ADDR(this, kAddressOffset)));
4633}
4634
4635
4636void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
4637  typedef v8::String::ExternalAsciiStringResource Resource;
4638  v->VisitExternalAsciiString(
4639      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4640}
4641
4642
4643template<typename StaticVisitor>
4644void ExternalAsciiString::ExternalAsciiStringIterateBody() {
4645  typedef v8::String::ExternalAsciiStringResource Resource;
4646  StaticVisitor::VisitExternalAsciiString(
4647      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4648}
4649
4650
4651void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
4652  typedef v8::String::ExternalStringResource Resource;
4653  v->VisitExternalTwoByteString(
4654      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4655}
4656
4657
4658template<typename StaticVisitor>
4659void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
4660  typedef v8::String::ExternalStringResource Resource;
4661  StaticVisitor::VisitExternalTwoByteString(
4662      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4663}
4664
4665#define SLOT_ADDR(obj, offset) \
4666  reinterpret_cast<Object**>((obj)->address() + offset)
4667
4668template<int start_offset, int end_offset, int size>
4669void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
4670    HeapObject* obj,
4671    ObjectVisitor* v) {
4672    v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, end_offset));
4673}
4674
4675
4676template<int start_offset>
4677void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
4678                                                       int object_size,
4679                                                       ObjectVisitor* v) {
4680  v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, object_size));
4681}
4682
4683#undef SLOT_ADDR
4684
4685
4686#undef CAST_ACCESSOR
4687#undef INT_ACCESSORS
4688#undef SMI_ACCESSORS
4689#undef ACCESSORS
4690#undef FIELD_ADDR
4691#undef READ_FIELD
4692#undef WRITE_FIELD
4693#undef WRITE_BARRIER
4694#undef CONDITIONAL_WRITE_BARRIER
4695#undef READ_MEMADDR_FIELD
4696#undef WRITE_MEMADDR_FIELD
4697#undef READ_DOUBLE_FIELD
4698#undef WRITE_DOUBLE_FIELD
4699#undef READ_INT_FIELD
4700#undef WRITE_INT_FIELD
4701#undef READ_SHORT_FIELD
4702#undef WRITE_SHORT_FIELD
4703#undef READ_BYTE_FIELD
4704#undef WRITE_BYTE_FIELD
4705
4706
4707} }  // namespace v8::internal
4708
4709#endif  // V8_OBJECTS_INL_H_
4710