objects-inl.h revision 8b112d2025046f85ef7f6be087c6129c872ebad2
1// Copyright 2011 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27//
28// Review notes:
29//
30// - The use of macros in these inline functions may seem superfluous
31// but it is absolutely needed to make sure gcc generates optimal
32// code. gcc is not happy when attempting to inline too deep.
33//
34
35#ifndef V8_OBJECTS_INL_H_
36#define V8_OBJECTS_INL_H_
37
38#include "objects.h"
39#include "contexts.h"
40#include "conversions-inl.h"
41#include "heap.h"
42#include "isolate.h"
43#include "property.h"
44#include "spaces.h"
45#include "v8memory.h"
46
47namespace v8 {
48namespace internal {
49
50PropertyDetails::PropertyDetails(Smi* smi) {
51  value_ = smi->value();
52}
53
54
55Smi* PropertyDetails::AsSmi() {
56  return Smi::FromInt(value_);
57}
58
59
60PropertyDetails PropertyDetails::AsDeleted() {
61  Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
62  return PropertyDetails(smi);
63}
64
65
66#define CAST_ACCESSOR(type)                     \
67  type* type::cast(Object* object) {            \
68    ASSERT(object->Is##type());                 \
69    return reinterpret_cast<type*>(object);     \
70  }
71
72
73#define INT_ACCESSORS(holder, name, offset)                             \
74  int holder::name() { return READ_INT_FIELD(this, offset); }           \
75  void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
76
77
78#define ACCESSORS(holder, name, type, offset)                           \
79  type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
80  void holder::set_##name(type* value, WriteBarrierMode mode) {         \
81    WRITE_FIELD(this, offset, value);                                   \
82    CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode);           \
83  }
84
85
86// GC-safe accessors do not use HeapObject::GetHeap(), but access TLS instead.
87#define ACCESSORS_GCSAFE(holder, name, type, offset)                    \
88  type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
89  void holder::set_##name(type* value, WriteBarrierMode mode) {         \
90    WRITE_FIELD(this, offset, value);                                   \
91    CONDITIONAL_WRITE_BARRIER(HEAP, this, offset, mode);                \
92  }
93
94
95#define SMI_ACCESSORS(holder, name, offset)             \
96  int holder::name() {                                  \
97    Object* value = READ_FIELD(this, offset);           \
98    return Smi::cast(value)->value();                   \
99  }                                                     \
100  void holder::set_##name(int value) {                  \
101    WRITE_FIELD(this, offset, Smi::FromInt(value));     \
102  }
103
104
105#define BOOL_GETTER(holder, field, name, offset)           \
106  bool holder::name() {                                    \
107    return BooleanBit::get(field(), offset);               \
108  }                                                        \
109
110
111#define BOOL_ACCESSORS(holder, field, name, offset)        \
112  bool holder::name() {                                    \
113    return BooleanBit::get(field(), offset);               \
114  }                                                        \
115  void holder::set_##name(bool value) {                    \
116    set_##field(BooleanBit::set(field(), offset, value));  \
117  }
118
119
120bool Object::IsInstanceOf(FunctionTemplateInfo* expected) {
121  // There is a constraint on the object; check.
122  if (!this->IsJSObject()) return false;
123  // Fetch the constructor function of the object.
124  Object* cons_obj = JSObject::cast(this)->map()->constructor();
125  if (!cons_obj->IsJSFunction()) return false;
126  JSFunction* fun = JSFunction::cast(cons_obj);
127  // Iterate through the chain of inheriting function templates to
128  // see if the required one occurs.
129  for (Object* type = fun->shared()->function_data();
130       type->IsFunctionTemplateInfo();
131       type = FunctionTemplateInfo::cast(type)->parent_template()) {
132    if (type == expected) return true;
133  }
134  // Didn't find the required type in the inheritance chain.
135  return false;
136}
137
138
139bool Object::IsSmi() {
140  return HAS_SMI_TAG(this);
141}
142
143
144bool Object::IsHeapObject() {
145  return Internals::HasHeapObjectTag(this);
146}
147
148
149bool Object::IsHeapNumber() {
150  return Object::IsHeapObject()
151    && HeapObject::cast(this)->map()->instance_type() == HEAP_NUMBER_TYPE;
152}
153
154
155bool Object::IsString() {
156  return Object::IsHeapObject()
157    && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
158}
159
160
161bool Object::IsSymbol() {
162  if (!this->IsHeapObject()) return false;
163  uint32_t type = HeapObject::cast(this)->map()->instance_type();
164  // Because the symbol tag is non-zero and no non-string types have the
165  // symbol bit set we can test for symbols with a very simple test
166  // operation.
167  ASSERT(kSymbolTag != 0);
168  ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
169  return (type & kIsSymbolMask) != 0;
170}
171
172
173bool Object::IsConsString() {
174  if (!this->IsHeapObject()) return false;
175  uint32_t type = HeapObject::cast(this)->map()->instance_type();
176  return (type & (kIsNotStringMask | kStringRepresentationMask)) ==
177         (kStringTag | kConsStringTag);
178}
179
180
181bool Object::IsSeqString() {
182  if (!IsString()) return false;
183  return StringShape(String::cast(this)).IsSequential();
184}
185
186
187bool Object::IsSeqAsciiString() {
188  if (!IsString()) return false;
189  return StringShape(String::cast(this)).IsSequential() &&
190         String::cast(this)->IsAsciiRepresentation();
191}
192
193
194bool Object::IsSeqTwoByteString() {
195  if (!IsString()) return false;
196  return StringShape(String::cast(this)).IsSequential() &&
197         String::cast(this)->IsTwoByteRepresentation();
198}
199
200
201bool Object::IsExternalString() {
202  if (!IsString()) return false;
203  return StringShape(String::cast(this)).IsExternal();
204}
205
206
207bool Object::IsExternalAsciiString() {
208  if (!IsString()) return false;
209  return StringShape(String::cast(this)).IsExternal() &&
210         String::cast(this)->IsAsciiRepresentation();
211}
212
213
214bool Object::IsExternalTwoByteString() {
215  if (!IsString()) return false;
216  return StringShape(String::cast(this)).IsExternal() &&
217         String::cast(this)->IsTwoByteRepresentation();
218}
219
220
221StringShape::StringShape(String* str)
222  : type_(str->map()->instance_type()) {
223  set_valid();
224  ASSERT((type_ & kIsNotStringMask) == kStringTag);
225}
226
227
228StringShape::StringShape(Map* map)
229  : type_(map->instance_type()) {
230  set_valid();
231  ASSERT((type_ & kIsNotStringMask) == kStringTag);
232}
233
234
235StringShape::StringShape(InstanceType t)
236  : type_(static_cast<uint32_t>(t)) {
237  set_valid();
238  ASSERT((type_ & kIsNotStringMask) == kStringTag);
239}
240
241
242bool StringShape::IsSymbol() {
243  ASSERT(valid());
244  ASSERT(kSymbolTag != 0);
245  return (type_ & kIsSymbolMask) != 0;
246}
247
248
249bool String::IsAsciiRepresentation() {
250  uint32_t type = map()->instance_type();
251  return (type & kStringEncodingMask) == kAsciiStringTag;
252}
253
254
255bool String::IsTwoByteRepresentation() {
256  uint32_t type = map()->instance_type();
257  return (type & kStringEncodingMask) == kTwoByteStringTag;
258}
259
260
261bool String::HasOnlyAsciiChars() {
262  uint32_t type = map()->instance_type();
263  return (type & kStringEncodingMask) == kAsciiStringTag ||
264         (type & kAsciiDataHintMask) == kAsciiDataHintTag;
265}
266
267
268bool StringShape::IsCons() {
269  return (type_ & kStringRepresentationMask) == kConsStringTag;
270}
271
272
273bool StringShape::IsExternal() {
274  return (type_ & kStringRepresentationMask) == kExternalStringTag;
275}
276
277
278bool StringShape::IsSequential() {
279  return (type_ & kStringRepresentationMask) == kSeqStringTag;
280}
281
282
283StringRepresentationTag StringShape::representation_tag() {
284  uint32_t tag = (type_ & kStringRepresentationMask);
285  return static_cast<StringRepresentationTag>(tag);
286}
287
288
289uint32_t StringShape::full_representation_tag() {
290  return (type_ & (kStringRepresentationMask | kStringEncodingMask));
291}
292
293
294STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
295             Internals::kFullStringRepresentationMask);
296
297
298bool StringShape::IsSequentialAscii() {
299  return full_representation_tag() == (kSeqStringTag | kAsciiStringTag);
300}
301
302
303bool StringShape::IsSequentialTwoByte() {
304  return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
305}
306
307
308bool StringShape::IsExternalAscii() {
309  return full_representation_tag() == (kExternalStringTag | kAsciiStringTag);
310}
311
312
313bool StringShape::IsExternalTwoByte() {
314  return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
315}
316
317
318STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
319             Internals::kExternalTwoByteRepresentationTag);
320
321
322uc32 FlatStringReader::Get(int index) {
323  ASSERT(0 <= index && index <= length_);
324  if (is_ascii_) {
325    return static_cast<const byte*>(start_)[index];
326  } else {
327    return static_cast<const uc16*>(start_)[index];
328  }
329}
330
331
332bool Object::IsNumber() {
333  return IsSmi() || IsHeapNumber();
334}
335
336
337bool Object::IsByteArray() {
338  return Object::IsHeapObject()
339    && HeapObject::cast(this)->map()->instance_type() == BYTE_ARRAY_TYPE;
340}
341
342
343bool Object::IsExternalPixelArray() {
344  return Object::IsHeapObject() &&
345      HeapObject::cast(this)->map()->instance_type() ==
346          EXTERNAL_PIXEL_ARRAY_TYPE;
347}
348
349
350bool Object::IsExternalArray() {
351  if (!Object::IsHeapObject())
352    return false;
353  InstanceType instance_type =
354      HeapObject::cast(this)->map()->instance_type();
355  return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
356          instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
357}
358
359
360bool Object::IsExternalByteArray() {
361  return Object::IsHeapObject() &&
362      HeapObject::cast(this)->map()->instance_type() ==
363      EXTERNAL_BYTE_ARRAY_TYPE;
364}
365
366
367bool Object::IsExternalUnsignedByteArray() {
368  return Object::IsHeapObject() &&
369      HeapObject::cast(this)->map()->instance_type() ==
370      EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE;
371}
372
373
374bool Object::IsExternalShortArray() {
375  return Object::IsHeapObject() &&
376      HeapObject::cast(this)->map()->instance_type() ==
377      EXTERNAL_SHORT_ARRAY_TYPE;
378}
379
380
381bool Object::IsExternalUnsignedShortArray() {
382  return Object::IsHeapObject() &&
383      HeapObject::cast(this)->map()->instance_type() ==
384      EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE;
385}
386
387
388bool Object::IsExternalIntArray() {
389  return Object::IsHeapObject() &&
390      HeapObject::cast(this)->map()->instance_type() ==
391      EXTERNAL_INT_ARRAY_TYPE;
392}
393
394
395bool Object::IsExternalUnsignedIntArray() {
396  return Object::IsHeapObject() &&
397      HeapObject::cast(this)->map()->instance_type() ==
398      EXTERNAL_UNSIGNED_INT_ARRAY_TYPE;
399}
400
401
402bool Object::IsExternalFloatArray() {
403  return Object::IsHeapObject() &&
404      HeapObject::cast(this)->map()->instance_type() ==
405      EXTERNAL_FLOAT_ARRAY_TYPE;
406}
407
408
409bool MaybeObject::IsFailure() {
410  return HAS_FAILURE_TAG(this);
411}
412
413
414bool MaybeObject::IsRetryAfterGC() {
415  return HAS_FAILURE_TAG(this)
416    && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC;
417}
418
419
420bool MaybeObject::IsOutOfMemory() {
421  return HAS_FAILURE_TAG(this)
422      && Failure::cast(this)->IsOutOfMemoryException();
423}
424
425
426bool MaybeObject::IsException() {
427  return this == Failure::Exception();
428}
429
430
431bool MaybeObject::IsTheHole() {
432  return !IsFailure() && ToObjectUnchecked()->IsTheHole();
433}
434
435
436Failure* Failure::cast(MaybeObject* obj) {
437  ASSERT(HAS_FAILURE_TAG(obj));
438  return reinterpret_cast<Failure*>(obj);
439}
440
441
442bool Object::IsJSObject() {
443  return IsHeapObject()
444      && HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
445}
446
447
448bool Object::IsJSContextExtensionObject() {
449  return IsHeapObject()
450      && (HeapObject::cast(this)->map()->instance_type() ==
451          JS_CONTEXT_EXTENSION_OBJECT_TYPE);
452}
453
454
455bool Object::IsMap() {
456  return Object::IsHeapObject()
457      && HeapObject::cast(this)->map()->instance_type() == MAP_TYPE;
458}
459
460
461bool Object::IsFixedArray() {
462  return Object::IsHeapObject()
463      && HeapObject::cast(this)->map()->instance_type() == FIXED_ARRAY_TYPE;
464}
465
466
467bool Object::IsDescriptorArray() {
468  return IsFixedArray();
469}
470
471
472bool Object::IsDeoptimizationInputData() {
473  // Must be a fixed array.
474  if (!IsFixedArray()) return false;
475
476  // There's no sure way to detect the difference between a fixed array and
477  // a deoptimization data array.  Since this is used for asserts we can
478  // check that the length is zero or else the fixed size plus a multiple of
479  // the entry size.
480  int length = FixedArray::cast(this)->length();
481  if (length == 0) return true;
482
483  length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
484  return length >= 0 &&
485      length % DeoptimizationInputData::kDeoptEntrySize == 0;
486}
487
488
489bool Object::IsDeoptimizationOutputData() {
490  if (!IsFixedArray()) return false;
491  // There's actually no way to see the difference between a fixed array and
492  // a deoptimization data array.  Since this is used for asserts we can check
493  // that the length is plausible though.
494  if (FixedArray::cast(this)->length() % 2 != 0) return false;
495  return true;
496}
497
498
499bool Object::IsContext() {
500  if (Object::IsHeapObject()) {
501    Heap* heap = HeapObject::cast(this)->GetHeap();
502    return (HeapObject::cast(this)->map() == heap->context_map() ||
503            HeapObject::cast(this)->map() == heap->catch_context_map() ||
504            HeapObject::cast(this)->map() == heap->global_context_map());
505  }
506  return false;
507}
508
509
510bool Object::IsCatchContext() {
511  return Object::IsHeapObject() &&
512      HeapObject::cast(this)->map() ==
513      HeapObject::cast(this)->GetHeap()->catch_context_map();
514}
515
516
517bool Object::IsGlobalContext() {
518  return Object::IsHeapObject() &&
519      HeapObject::cast(this)->map() ==
520      HeapObject::cast(this)->GetHeap()->global_context_map();
521}
522
523
524bool Object::IsJSFunction() {
525  return Object::IsHeapObject()
526      && HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_TYPE;
527}
528
529
530template <> inline bool Is<JSFunction>(Object* obj) {
531  return obj->IsJSFunction();
532}
533
534
535bool Object::IsCode() {
536  return Object::IsHeapObject()
537      && HeapObject::cast(this)->map()->instance_type() == CODE_TYPE;
538}
539
540
541bool Object::IsOddball() {
542  ASSERT(HEAP->is_safe_to_read_maps());
543  return Object::IsHeapObject()
544    && HeapObject::cast(this)->map()->instance_type() == ODDBALL_TYPE;
545}
546
547
548bool Object::IsJSGlobalPropertyCell() {
549  return Object::IsHeapObject()
550      && HeapObject::cast(this)->map()->instance_type()
551      == JS_GLOBAL_PROPERTY_CELL_TYPE;
552}
553
554
555bool Object::IsSharedFunctionInfo() {
556  return Object::IsHeapObject() &&
557      (HeapObject::cast(this)->map()->instance_type() ==
558       SHARED_FUNCTION_INFO_TYPE);
559}
560
561
562bool Object::IsJSValue() {
563  return Object::IsHeapObject()
564      && HeapObject::cast(this)->map()->instance_type() == JS_VALUE_TYPE;
565}
566
567
568bool Object::IsJSMessageObject() {
569  return Object::IsHeapObject()
570      && (HeapObject::cast(this)->map()->instance_type() ==
571          JS_MESSAGE_OBJECT_TYPE);
572}
573
574
575bool Object::IsStringWrapper() {
576  return IsJSValue() && JSValue::cast(this)->value()->IsString();
577}
578
579
580bool Object::IsProxy() {
581  return Object::IsHeapObject()
582      && HeapObject::cast(this)->map()->instance_type() == PROXY_TYPE;
583}
584
585
586bool Object::IsBoolean() {
587  return IsOddball() &&
588      ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
589}
590
591
592bool Object::IsJSArray() {
593  return Object::IsHeapObject()
594      && HeapObject::cast(this)->map()->instance_type() == JS_ARRAY_TYPE;
595}
596
597
598bool Object::IsJSRegExp() {
599  return Object::IsHeapObject()
600      && HeapObject::cast(this)->map()->instance_type() == JS_REGEXP_TYPE;
601}
602
603
604template <> inline bool Is<JSArray>(Object* obj) {
605  return obj->IsJSArray();
606}
607
608
609bool Object::IsHashTable() {
610  return Object::IsHeapObject() &&
611      HeapObject::cast(this)->map() ==
612      HeapObject::cast(this)->GetHeap()->hash_table_map();
613}
614
615
616bool Object::IsDictionary() {
617  return IsHashTable() && this !=
618         HeapObject::cast(this)->GetHeap()->symbol_table();
619}
620
621
622bool Object::IsSymbolTable() {
623  return IsHashTable() && this ==
624         HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
625}
626
627
628bool Object::IsJSFunctionResultCache() {
629  if (!IsFixedArray()) return false;
630  FixedArray* self = FixedArray::cast(this);
631  int length = self->length();
632  if (length < JSFunctionResultCache::kEntriesIndex) return false;
633  if ((length - JSFunctionResultCache::kEntriesIndex)
634      % JSFunctionResultCache::kEntrySize != 0) {
635    return false;
636  }
637#ifdef DEBUG
638  reinterpret_cast<JSFunctionResultCache*>(this)->JSFunctionResultCacheVerify();
639#endif
640  return true;
641}
642
643
644bool Object::IsNormalizedMapCache() {
645  if (!IsFixedArray()) return false;
646  if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
647    return false;
648  }
649#ifdef DEBUG
650  reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
651#endif
652  return true;
653}
654
655
656bool Object::IsCompilationCacheTable() {
657  return IsHashTable();
658}
659
660
661bool Object::IsCodeCacheHashTable() {
662  return IsHashTable();
663}
664
665
666bool Object::IsMapCache() {
667  return IsHashTable();
668}
669
670
671bool Object::IsPrimitive() {
672  return IsOddball() || IsNumber() || IsString();
673}
674
675
676bool Object::IsJSGlobalProxy() {
677  bool result = IsHeapObject() &&
678                (HeapObject::cast(this)->map()->instance_type() ==
679                 JS_GLOBAL_PROXY_TYPE);
680  ASSERT(!result || IsAccessCheckNeeded());
681  return result;
682}
683
684
685bool Object::IsGlobalObject() {
686  if (!IsHeapObject()) return false;
687
688  InstanceType type = HeapObject::cast(this)->map()->instance_type();
689  return type == JS_GLOBAL_OBJECT_TYPE ||
690         type == JS_BUILTINS_OBJECT_TYPE;
691}
692
693
694bool Object::IsJSGlobalObject() {
695  return IsHeapObject() &&
696      (HeapObject::cast(this)->map()->instance_type() ==
697       JS_GLOBAL_OBJECT_TYPE);
698}
699
700
701bool Object::IsJSBuiltinsObject() {
702  return IsHeapObject() &&
703      (HeapObject::cast(this)->map()->instance_type() ==
704       JS_BUILTINS_OBJECT_TYPE);
705}
706
707
708bool Object::IsUndetectableObject() {
709  return IsHeapObject()
710    && HeapObject::cast(this)->map()->is_undetectable();
711}
712
713
714bool Object::IsAccessCheckNeeded() {
715  return IsHeapObject()
716    && HeapObject::cast(this)->map()->is_access_check_needed();
717}
718
719
720bool Object::IsStruct() {
721  if (!IsHeapObject()) return false;
722  switch (HeapObject::cast(this)->map()->instance_type()) {
723#define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
724  STRUCT_LIST(MAKE_STRUCT_CASE)
725#undef MAKE_STRUCT_CASE
726    default: return false;
727  }
728}
729
730
731#define MAKE_STRUCT_PREDICATE(NAME, Name, name)                  \
732  bool Object::Is##Name() {                                      \
733    return Object::IsHeapObject()                                \
734      && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
735  }
736  STRUCT_LIST(MAKE_STRUCT_PREDICATE)
737#undef MAKE_STRUCT_PREDICATE
738
739
740bool Object::IsUndefined() {
741  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
742}
743
744
745bool Object::IsNull() {
746  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
747}
748
749
750bool Object::IsTheHole() {
751  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
752}
753
754
755bool Object::IsTrue() {
756  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
757}
758
759
760bool Object::IsFalse() {
761  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
762}
763
764
765bool Object::IsArgumentsMarker() {
766  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
767}
768
769
770double Object::Number() {
771  ASSERT(IsNumber());
772  return IsSmi()
773    ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
774    : reinterpret_cast<HeapNumber*>(this)->value();
775}
776
777
778MaybeObject* Object::ToSmi() {
779  if (IsSmi()) return this;
780  if (IsHeapNumber()) {
781    double value = HeapNumber::cast(this)->value();
782    int int_value = FastD2I(value);
783    if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
784      return Smi::FromInt(int_value);
785    }
786  }
787  return Failure::Exception();
788}
789
790
791bool Object::HasSpecificClassOf(String* name) {
792  return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
793}
794
795
796MaybeObject* Object::GetElement(uint32_t index) {
797  // GetElement can trigger a getter which can cause allocation.
798  // This was not always the case. This ASSERT is here to catch
799  // leftover incorrect uses.
800  ASSERT(HEAP->IsAllocationAllowed());
801  return GetElementWithReceiver(this, index);
802}
803
804
805Object* Object::GetElementNoExceptionThrown(uint32_t index) {
806  MaybeObject* maybe = GetElementWithReceiver(this, index);
807  ASSERT(!maybe->IsFailure());
808  Object* result = NULL;  // Initialization to please compiler.
809  maybe->ToObject(&result);
810  return result;
811}
812
813
814MaybeObject* Object::GetProperty(String* key) {
815  PropertyAttributes attributes;
816  return GetPropertyWithReceiver(this, key, &attributes);
817}
818
819
820MaybeObject* Object::GetProperty(String* key, PropertyAttributes* attributes) {
821  return GetPropertyWithReceiver(this, key, attributes);
822}
823
824
825#define FIELD_ADDR(p, offset) \
826  (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
827
828#define READ_FIELD(p, offset) \
829  (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
830
831#define WRITE_FIELD(p, offset, value) \
832  (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
833
834// TODO(isolates): Pass heap in to these macros.
835#define WRITE_BARRIER(object, offset) \
836  object->GetHeap()->RecordWrite(object->address(), offset);
837
838// CONDITIONAL_WRITE_BARRIER must be issued after the actual
839// write due to the assert validating the written value.
840#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, mode) \
841  if (mode == UPDATE_WRITE_BARRIER) { \
842    heap->RecordWrite(object->address(), offset); \
843  } else { \
844    ASSERT(mode == SKIP_WRITE_BARRIER); \
845    ASSERT(heap->InNewSpace(object) || \
846           !heap->InNewSpace(READ_FIELD(object, offset)) || \
847           Page::FromAddress(object->address())->           \
848               IsRegionDirty(object->address() + offset));  \
849  }
850
851#ifndef V8_TARGET_ARCH_MIPS
852  #define READ_DOUBLE_FIELD(p, offset) \
853    (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
854#else  // V8_TARGET_ARCH_MIPS
855  // Prevent gcc from using load-double (mips ldc1) on (possibly)
856  // non-64-bit aligned HeapNumber::value.
857  static inline double read_double_field(HeapNumber* p, int offset) {
858    union conversion {
859      double d;
860      uint32_t u[2];
861    } c;
862    c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
863    c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
864    return c.d;
865  }
866  #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
867#endif  // V8_TARGET_ARCH_MIPS
868
869
870#ifndef V8_TARGET_ARCH_MIPS
871  #define WRITE_DOUBLE_FIELD(p, offset, value) \
872    (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
873#else  // V8_TARGET_ARCH_MIPS
874  // Prevent gcc from using store-double (mips sdc1) on (possibly)
875  // non-64-bit aligned HeapNumber::value.
876  static inline void write_double_field(HeapNumber* p, int offset,
877                                        double value) {
878    union conversion {
879      double d;
880      uint32_t u[2];
881    } c;
882    c.d = value;
883    (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
884    (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
885  }
886  #define WRITE_DOUBLE_FIELD(p, offset, value) \
887    write_double_field(p, offset, value)
888#endif  // V8_TARGET_ARCH_MIPS
889
890
891#define READ_INT_FIELD(p, offset) \
892  (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
893
894#define WRITE_INT_FIELD(p, offset, value) \
895  (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
896
897#define READ_INTPTR_FIELD(p, offset) \
898  (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
899
900#define WRITE_INTPTR_FIELD(p, offset, value) \
901  (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
902
903#define READ_UINT32_FIELD(p, offset) \
904  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
905
906#define WRITE_UINT32_FIELD(p, offset, value) \
907  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
908
909#define READ_SHORT_FIELD(p, offset) \
910  (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
911
912#define WRITE_SHORT_FIELD(p, offset, value) \
913  (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
914
915#define READ_BYTE_FIELD(p, offset) \
916  (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
917
918#define WRITE_BYTE_FIELD(p, offset, value) \
919  (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
920
921
922Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
923  return &READ_FIELD(obj, byte_offset);
924}
925
926
927int Smi::value() {
928  return Internals::SmiValue(this);
929}
930
931
932Smi* Smi::FromInt(int value) {
933  ASSERT(Smi::IsValid(value));
934  int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
935  intptr_t tagged_value =
936      (static_cast<intptr_t>(value) << smi_shift_bits) | kSmiTag;
937  return reinterpret_cast<Smi*>(tagged_value);
938}
939
940
941Smi* Smi::FromIntptr(intptr_t value) {
942  ASSERT(Smi::IsValid(value));
943  int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
944  return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
945}
946
947
948Failure::Type Failure::type() const {
949  return static_cast<Type>(value() & kFailureTypeTagMask);
950}
951
952
953bool Failure::IsInternalError() const {
954  return type() == INTERNAL_ERROR;
955}
956
957
958bool Failure::IsOutOfMemoryException() const {
959  return type() == OUT_OF_MEMORY_EXCEPTION;
960}
961
962
963AllocationSpace Failure::allocation_space() const {
964  ASSERT_EQ(RETRY_AFTER_GC, type());
965  return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
966                                      & kSpaceTagMask);
967}
968
969
970Failure* Failure::InternalError() {
971  return Construct(INTERNAL_ERROR);
972}
973
974
975Failure* Failure::Exception() {
976  return Construct(EXCEPTION);
977}
978
979
980Failure* Failure::OutOfMemoryException() {
981  return Construct(OUT_OF_MEMORY_EXCEPTION);
982}
983
984
985intptr_t Failure::value() const {
986  return static_cast<intptr_t>(
987      reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
988}
989
990
991Failure* Failure::RetryAfterGC() {
992  return RetryAfterGC(NEW_SPACE);
993}
994
995
996Failure* Failure::RetryAfterGC(AllocationSpace space) {
997  ASSERT((space & ~kSpaceTagMask) == 0);
998  return Construct(RETRY_AFTER_GC, space);
999}
1000
1001
1002Failure* Failure::Construct(Type type, intptr_t value) {
1003  uintptr_t info =
1004      (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
1005  ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
1006  return reinterpret_cast<Failure*>((info << kFailureTagSize) | kFailureTag);
1007}
1008
1009
1010bool Smi::IsValid(intptr_t value) {
1011#ifdef DEBUG
1012  bool in_range = (value >= kMinValue) && (value <= kMaxValue);
1013#endif
1014
1015#ifdef V8_TARGET_ARCH_X64
1016  // To be representable as a long smi, the value must be a 32-bit integer.
1017  bool result = (value == static_cast<int32_t>(value));
1018#else
1019  // To be representable as an tagged small integer, the two
1020  // most-significant bits of 'value' must be either 00 or 11 due to
1021  // sign-extension. To check this we add 01 to the two
1022  // most-significant bits, and check if the most-significant bit is 0
1023  //
1024  // CAUTION: The original code below:
1025  // bool result = ((value + 0x40000000) & 0x80000000) == 0;
1026  // may lead to incorrect results according to the C language spec, and
1027  // in fact doesn't work correctly with gcc4.1.1 in some cases: The
1028  // compiler may produce undefined results in case of signed integer
1029  // overflow. The computation must be done w/ unsigned ints.
1030  bool result = (static_cast<uintptr_t>(value + 0x40000000U) < 0x80000000U);
1031#endif
1032  ASSERT(result == in_range);
1033  return result;
1034}
1035
1036
1037MapWord MapWord::FromMap(Map* map) {
1038  return MapWord(reinterpret_cast<uintptr_t>(map));
1039}
1040
1041
1042Map* MapWord::ToMap() {
1043  return reinterpret_cast<Map*>(value_);
1044}
1045
1046
1047bool MapWord::IsForwardingAddress() {
1048  return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1049}
1050
1051
1052MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1053  Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1054  return MapWord(reinterpret_cast<uintptr_t>(raw));
1055}
1056
1057
1058HeapObject* MapWord::ToForwardingAddress() {
1059  ASSERT(IsForwardingAddress());
1060  return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1061}
1062
1063
1064bool MapWord::IsMarked() {
1065  return (value_ & kMarkingMask) == 0;
1066}
1067
1068
1069void MapWord::SetMark() {
1070  value_ &= ~kMarkingMask;
1071}
1072
1073
1074void MapWord::ClearMark() {
1075  value_ |= kMarkingMask;
1076}
1077
1078
1079bool MapWord::IsOverflowed() {
1080  return (value_ & kOverflowMask) != 0;
1081}
1082
1083
1084void MapWord::SetOverflow() {
1085  value_ |= kOverflowMask;
1086}
1087
1088
1089void MapWord::ClearOverflow() {
1090  value_ &= ~kOverflowMask;
1091}
1092
1093
1094MapWord MapWord::EncodeAddress(Address map_address, int offset) {
1095  // Offset is the distance in live bytes from the first live object in the
1096  // same page. The offset between two objects in the same page should not
1097  // exceed the object area size of a page.
1098  ASSERT(0 <= offset && offset < Page::kObjectAreaSize);
1099
1100  uintptr_t compact_offset = offset >> kObjectAlignmentBits;
1101  ASSERT(compact_offset < (1 << kForwardingOffsetBits));
1102
1103  Page* map_page = Page::FromAddress(map_address);
1104  ASSERT_MAP_PAGE_INDEX(map_page->mc_page_index);
1105
1106  uintptr_t map_page_offset =
1107      map_page->Offset(map_address) >> kMapAlignmentBits;
1108
1109  uintptr_t encoding =
1110      (compact_offset << kForwardingOffsetShift) |
1111      (map_page_offset << kMapPageOffsetShift) |
1112      (map_page->mc_page_index << kMapPageIndexShift);
1113  return MapWord(encoding);
1114}
1115
1116
1117Address MapWord::DecodeMapAddress(MapSpace* map_space) {
1118  int map_page_index =
1119      static_cast<int>((value_ & kMapPageIndexMask) >> kMapPageIndexShift);
1120  ASSERT_MAP_PAGE_INDEX(map_page_index);
1121
1122  int map_page_offset = static_cast<int>(
1123      ((value_ & kMapPageOffsetMask) >> kMapPageOffsetShift) <<
1124      kMapAlignmentBits);
1125
1126  return (map_space->PageAddress(map_page_index) + map_page_offset);
1127}
1128
1129
1130int MapWord::DecodeOffset() {
1131  // The offset field is represented in the kForwardingOffsetBits
1132  // most-significant bits.
1133  uintptr_t offset = (value_ >> kForwardingOffsetShift) << kObjectAlignmentBits;
1134  ASSERT(offset < static_cast<uintptr_t>(Page::kObjectAreaSize));
1135  return static_cast<int>(offset);
1136}
1137
1138
1139MapWord MapWord::FromEncodedAddress(Address address) {
1140  return MapWord(reinterpret_cast<uintptr_t>(address));
1141}
1142
1143
1144Address MapWord::ToEncodedAddress() {
1145  return reinterpret_cast<Address>(value_);
1146}
1147
1148
1149#ifdef DEBUG
1150void HeapObject::VerifyObjectField(int offset) {
1151  VerifyPointer(READ_FIELD(this, offset));
1152}
1153
1154void HeapObject::VerifySmiField(int offset) {
1155  ASSERT(READ_FIELD(this, offset)->IsSmi());
1156}
1157#endif
1158
1159
1160Heap* HeapObject::GetHeap() {
1161  // During GC, the map pointer in HeapObject is used in various ways that
1162  // prevent us from retrieving Heap from the map.
1163  // Assert that we are not in GC, implement GC code in a way that it doesn't
1164  // pull heap from the map.
1165  ASSERT(HEAP->is_safe_to_read_maps());
1166  return map()->heap();
1167}
1168
1169
1170Isolate* HeapObject::GetIsolate() {
1171  return GetHeap()->isolate();
1172}
1173
1174
1175Map* HeapObject::map() {
1176  return map_word().ToMap();
1177}
1178
1179
1180void HeapObject::set_map(Map* value) {
1181  set_map_word(MapWord::FromMap(value));
1182}
1183
1184
1185MapWord HeapObject::map_word() {
1186  return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1187}
1188
1189
1190void HeapObject::set_map_word(MapWord map_word) {
1191  // WRITE_FIELD does not invoke write barrier, but there is no need
1192  // here.
1193  WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1194}
1195
1196
1197HeapObject* HeapObject::FromAddress(Address address) {
1198  ASSERT_TAG_ALIGNED(address);
1199  return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1200}
1201
1202
1203Address HeapObject::address() {
1204  return reinterpret_cast<Address>(this) - kHeapObjectTag;
1205}
1206
1207
1208int HeapObject::Size() {
1209  return SizeFromMap(map());
1210}
1211
1212
1213void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1214  v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1215                   reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1216}
1217
1218
1219void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1220  v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1221}
1222
1223
1224bool HeapObject::IsMarked() {
1225  return map_word().IsMarked();
1226}
1227
1228
1229void HeapObject::SetMark() {
1230  ASSERT(!IsMarked());
1231  MapWord first_word = map_word();
1232  first_word.SetMark();
1233  set_map_word(first_word);
1234}
1235
1236
1237void HeapObject::ClearMark() {
1238  ASSERT(IsMarked());
1239  MapWord first_word = map_word();
1240  first_word.ClearMark();
1241  set_map_word(first_word);
1242}
1243
1244
1245bool HeapObject::IsOverflowed() {
1246  return map_word().IsOverflowed();
1247}
1248
1249
1250void HeapObject::SetOverflow() {
1251  MapWord first_word = map_word();
1252  first_word.SetOverflow();
1253  set_map_word(first_word);
1254}
1255
1256
1257void HeapObject::ClearOverflow() {
1258  ASSERT(IsOverflowed());
1259  MapWord first_word = map_word();
1260  first_word.ClearOverflow();
1261  set_map_word(first_word);
1262}
1263
1264
1265double HeapNumber::value() {
1266  return READ_DOUBLE_FIELD(this, kValueOffset);
1267}
1268
1269
1270void HeapNumber::set_value(double value) {
1271  WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1272}
1273
1274
1275int HeapNumber::get_exponent() {
1276  return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1277          kExponentShift) - kExponentBias;
1278}
1279
1280
1281int HeapNumber::get_sign() {
1282  return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1283}
1284
1285
1286ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1287
1288
1289HeapObject* JSObject::elements() {
1290  Object* array = READ_FIELD(this, kElementsOffset);
1291  // In the assert below Dictionary is covered under FixedArray.
1292  ASSERT(array->IsFixedArray() || array->IsExternalArray());
1293  return reinterpret_cast<HeapObject*>(array);
1294}
1295
1296
1297void JSObject::set_elements(HeapObject* value, WriteBarrierMode mode) {
1298  ASSERT(map()->has_fast_elements() ==
1299         (value->map() == GetHeap()->fixed_array_map() ||
1300          value->map() == GetHeap()->fixed_cow_array_map()));
1301  // In the assert below Dictionary is covered under FixedArray.
1302  ASSERT(value->IsFixedArray() || value->IsExternalArray());
1303  WRITE_FIELD(this, kElementsOffset, value);
1304  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, mode);
1305}
1306
1307
1308void JSObject::initialize_properties() {
1309  ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1310  WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1311}
1312
1313
1314void JSObject::initialize_elements() {
1315  ASSERT(map()->has_fast_elements());
1316  ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1317  WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1318}
1319
1320
1321MaybeObject* JSObject::ResetElements() {
1322  Object* obj;
1323  { MaybeObject* maybe_obj = map()->GetFastElementsMap();
1324    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1325  }
1326  set_map(Map::cast(obj));
1327  initialize_elements();
1328  return this;
1329}
1330
1331
1332ACCESSORS(Oddball, to_string, String, kToStringOffset)
1333ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1334
1335
1336byte Oddball::kind() {
1337  return READ_BYTE_FIELD(this, kKindOffset);
1338}
1339
1340
1341void Oddball::set_kind(byte value) {
1342  WRITE_BYTE_FIELD(this, kKindOffset, value);
1343}
1344
1345
1346Object* JSGlobalPropertyCell::value() {
1347  return READ_FIELD(this, kValueOffset);
1348}
1349
1350
1351void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) {
1352  // The write barrier is not used for global property cells.
1353  ASSERT(!val->IsJSGlobalPropertyCell());
1354  WRITE_FIELD(this, kValueOffset, val);
1355}
1356
1357
1358int JSObject::GetHeaderSize() {
1359  InstanceType type = map()->instance_type();
1360  // Check for the most common kind of JavaScript object before
1361  // falling into the generic switch. This speeds up the internal
1362  // field operations considerably on average.
1363  if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1364  switch (type) {
1365    case JS_GLOBAL_PROXY_TYPE:
1366      return JSGlobalProxy::kSize;
1367    case JS_GLOBAL_OBJECT_TYPE:
1368      return JSGlobalObject::kSize;
1369    case JS_BUILTINS_OBJECT_TYPE:
1370      return JSBuiltinsObject::kSize;
1371    case JS_FUNCTION_TYPE:
1372      return JSFunction::kSize;
1373    case JS_VALUE_TYPE:
1374      return JSValue::kSize;
1375    case JS_ARRAY_TYPE:
1376      return JSValue::kSize;
1377    case JS_REGEXP_TYPE:
1378      return JSValue::kSize;
1379    case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1380      return JSObject::kHeaderSize;
1381    case JS_MESSAGE_OBJECT_TYPE:
1382      return JSMessageObject::kSize;
1383    default:
1384      UNREACHABLE();
1385      return 0;
1386  }
1387}
1388
1389
1390int JSObject::GetInternalFieldCount() {
1391  ASSERT(1 << kPointerSizeLog2 == kPointerSize);
1392  // Make sure to adjust for the number of in-object properties. These
1393  // properties do contribute to the size, but are not internal fields.
1394  return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1395         map()->inobject_properties();
1396}
1397
1398
1399int JSObject::GetInternalFieldOffset(int index) {
1400  ASSERT(index < GetInternalFieldCount() && index >= 0);
1401  return GetHeaderSize() + (kPointerSize * index);
1402}
1403
1404
1405Object* JSObject::GetInternalField(int index) {
1406  ASSERT(index < GetInternalFieldCount() && index >= 0);
1407  // Internal objects do follow immediately after the header, whereas in-object
1408  // properties are at the end of the object. Therefore there is no need
1409  // to adjust the index here.
1410  return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1411}
1412
1413
1414void JSObject::SetInternalField(int index, Object* value) {
1415  ASSERT(index < GetInternalFieldCount() && index >= 0);
1416  // Internal objects do follow immediately after the header, whereas in-object
1417  // properties are at the end of the object. Therefore there is no need
1418  // to adjust the index here.
1419  int offset = GetHeaderSize() + (kPointerSize * index);
1420  WRITE_FIELD(this, offset, value);
1421  WRITE_BARRIER(this, offset);
1422}
1423
1424
1425// Access fast-case object properties at index. The use of these routines
1426// is needed to correctly distinguish between properties stored in-object and
1427// properties stored in the properties array.
1428Object* JSObject::FastPropertyAt(int index) {
1429  // Adjust for the number of properties stored in the object.
1430  index -= map()->inobject_properties();
1431  if (index < 0) {
1432    int offset = map()->instance_size() + (index * kPointerSize);
1433    return READ_FIELD(this, offset);
1434  } else {
1435    ASSERT(index < properties()->length());
1436    return properties()->get(index);
1437  }
1438}
1439
1440
1441Object* JSObject::FastPropertyAtPut(int index, Object* value) {
1442  // Adjust for the number of properties stored in the object.
1443  index -= map()->inobject_properties();
1444  if (index < 0) {
1445    int offset = map()->instance_size() + (index * kPointerSize);
1446    WRITE_FIELD(this, offset, value);
1447    WRITE_BARRIER(this, offset);
1448  } else {
1449    ASSERT(index < properties()->length());
1450    properties()->set(index, value);
1451  }
1452  return value;
1453}
1454
1455
1456int JSObject::GetInObjectPropertyOffset(int index) {
1457  // Adjust for the number of properties stored in the object.
1458  index -= map()->inobject_properties();
1459  ASSERT(index < 0);
1460  return map()->instance_size() + (index * kPointerSize);
1461}
1462
1463
1464Object* JSObject::InObjectPropertyAt(int index) {
1465  // Adjust for the number of properties stored in the object.
1466  index -= map()->inobject_properties();
1467  ASSERT(index < 0);
1468  int offset = map()->instance_size() + (index * kPointerSize);
1469  return READ_FIELD(this, offset);
1470}
1471
1472
1473Object* JSObject::InObjectPropertyAtPut(int index,
1474                                        Object* value,
1475                                        WriteBarrierMode mode) {
1476  // Adjust for the number of properties stored in the object.
1477  index -= map()->inobject_properties();
1478  ASSERT(index < 0);
1479  int offset = map()->instance_size() + (index * kPointerSize);
1480  WRITE_FIELD(this, offset, value);
1481  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode);
1482  return value;
1483}
1484
1485
1486
1487void JSObject::InitializeBody(int object_size, Object* value) {
1488  ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
1489  for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1490    WRITE_FIELD(this, offset, value);
1491  }
1492}
1493
1494
1495bool JSObject::HasFastProperties() {
1496  return !properties()->IsDictionary();
1497}
1498
1499
1500int JSObject::MaxFastProperties() {
1501  // Allow extra fast properties if the object has more than
1502  // kMaxFastProperties in-object properties. When this is the case,
1503  // it is very unlikely that the object is being used as a dictionary
1504  // and there is a good chance that allowing more map transitions
1505  // will be worth it.
1506  return Max(map()->inobject_properties(), kMaxFastProperties);
1507}
1508
1509
1510void Struct::InitializeBody(int object_size) {
1511  Object* value = GetHeap()->undefined_value();
1512  for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1513    WRITE_FIELD(this, offset, value);
1514  }
1515}
1516
1517
1518bool Object::ToArrayIndex(uint32_t* index) {
1519  if (IsSmi()) {
1520    int value = Smi::cast(this)->value();
1521    if (value < 0) return false;
1522    *index = value;
1523    return true;
1524  }
1525  if (IsHeapNumber()) {
1526    double value = HeapNumber::cast(this)->value();
1527    uint32_t uint_value = static_cast<uint32_t>(value);
1528    if (value == static_cast<double>(uint_value)) {
1529      *index = uint_value;
1530      return true;
1531    }
1532  }
1533  return false;
1534}
1535
1536
1537bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
1538  if (!this->IsJSValue()) return false;
1539
1540  JSValue* js_value = JSValue::cast(this);
1541  if (!js_value->value()->IsString()) return false;
1542
1543  String* str = String::cast(js_value->value());
1544  if (index >= (uint32_t)str->length()) return false;
1545
1546  return true;
1547}
1548
1549
1550Object* FixedArray::get(int index) {
1551  ASSERT(index >= 0 && index < this->length());
1552  return READ_FIELD(this, kHeaderSize + index * kPointerSize);
1553}
1554
1555
1556void FixedArray::set(int index, Smi* value) {
1557  ASSERT(map() != HEAP->fixed_cow_array_map());
1558  ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1559  int offset = kHeaderSize + index * kPointerSize;
1560  WRITE_FIELD(this, offset, value);
1561}
1562
1563
1564void FixedArray::set(int index, Object* value) {
1565  ASSERT(map() != HEAP->fixed_cow_array_map());
1566  ASSERT(index >= 0 && index < this->length());
1567  int offset = kHeaderSize + index * kPointerSize;
1568  WRITE_FIELD(this, offset, value);
1569  WRITE_BARRIER(this, offset);
1570}
1571
1572
1573WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
1574  if (GetHeap()->InNewSpace(this)) return SKIP_WRITE_BARRIER;
1575  return UPDATE_WRITE_BARRIER;
1576}
1577
1578
1579void FixedArray::set(int index,
1580                     Object* value,
1581                     WriteBarrierMode mode) {
1582  ASSERT(map() != HEAP->fixed_cow_array_map());
1583  ASSERT(index >= 0 && index < this->length());
1584  int offset = kHeaderSize + index * kPointerSize;
1585  WRITE_FIELD(this, offset, value);
1586  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode);
1587}
1588
1589
1590void FixedArray::fast_set(FixedArray* array, int index, Object* value) {
1591  ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1592  ASSERT(index >= 0 && index < array->length());
1593  ASSERT(!HEAP->InNewSpace(value));
1594  WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
1595}
1596
1597
1598void FixedArray::set_undefined(int index) {
1599  ASSERT(map() != HEAP->fixed_cow_array_map());
1600  set_undefined(GetHeap(), index);
1601}
1602
1603
1604void FixedArray::set_undefined(Heap* heap, int index) {
1605  ASSERT(index >= 0 && index < this->length());
1606  ASSERT(!heap->InNewSpace(heap->undefined_value()));
1607  WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
1608              heap->undefined_value());
1609}
1610
1611
1612void FixedArray::set_null(int index) {
1613  set_null(GetHeap(), index);
1614}
1615
1616
1617void FixedArray::set_null(Heap* heap, int index) {
1618  ASSERT(index >= 0 && index < this->length());
1619  ASSERT(!heap->InNewSpace(heap->null_value()));
1620  WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1621}
1622
1623
1624void FixedArray::set_the_hole(int index) {
1625  ASSERT(map() != HEAP->fixed_cow_array_map());
1626  ASSERT(index >= 0 && index < this->length());
1627  ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
1628  WRITE_FIELD(this,
1629              kHeaderSize + index * kPointerSize,
1630              GetHeap()->the_hole_value());
1631}
1632
1633
1634void FixedArray::set_unchecked(int index, Smi* value) {
1635  ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1636  int offset = kHeaderSize + index * kPointerSize;
1637  WRITE_FIELD(this, offset, value);
1638}
1639
1640
1641void FixedArray::set_unchecked(Heap* heap,
1642                               int index,
1643                               Object* value,
1644                               WriteBarrierMode mode) {
1645  int offset = kHeaderSize + index * kPointerSize;
1646  WRITE_FIELD(this, offset, value);
1647  CONDITIONAL_WRITE_BARRIER(heap, this, offset, mode);
1648}
1649
1650
1651void FixedArray::set_null_unchecked(Heap* heap, int index) {
1652  ASSERT(index >= 0 && index < this->length());
1653  ASSERT(!HEAP->InNewSpace(heap->null_value()));
1654  WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1655}
1656
1657
1658Object** FixedArray::data_start() {
1659  return HeapObject::RawField(this, kHeaderSize);
1660}
1661
1662
1663bool DescriptorArray::IsEmpty() {
1664  ASSERT(this->length() > kFirstIndex ||
1665         this == HEAP->empty_descriptor_array());
1666  return length() <= kFirstIndex;
1667}
1668
1669
1670void DescriptorArray::fast_swap(FixedArray* array, int first, int second) {
1671  Object* tmp = array->get(first);
1672  fast_set(array, first, array->get(second));
1673  fast_set(array, second, tmp);
1674}
1675
1676
1677int DescriptorArray::Search(String* name) {
1678  SLOW_ASSERT(IsSortedNoDuplicates());
1679
1680  // Check for empty descriptor array.
1681  int nof = number_of_descriptors();
1682  if (nof == 0) return kNotFound;
1683
1684  // Fast case: do linear search for small arrays.
1685  const int kMaxElementsForLinearSearch = 8;
1686  if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) {
1687    return LinearSearch(name, nof);
1688  }
1689
1690  // Slow case: perform binary search.
1691  return BinarySearch(name, 0, nof - 1);
1692}
1693
1694
1695int DescriptorArray::SearchWithCache(String* name) {
1696  int number = GetIsolate()->descriptor_lookup_cache()->Lookup(this, name);
1697  if (number == DescriptorLookupCache::kAbsent) {
1698    number = Search(name);
1699    GetIsolate()->descriptor_lookup_cache()->Update(this, name, number);
1700  }
1701  return number;
1702}
1703
1704
1705String* DescriptorArray::GetKey(int descriptor_number) {
1706  ASSERT(descriptor_number < number_of_descriptors());
1707  return String::cast(get(ToKeyIndex(descriptor_number)));
1708}
1709
1710
1711Object* DescriptorArray::GetValue(int descriptor_number) {
1712  ASSERT(descriptor_number < number_of_descriptors());
1713  return GetContentArray()->get(ToValueIndex(descriptor_number));
1714}
1715
1716
1717Smi* DescriptorArray::GetDetails(int descriptor_number) {
1718  ASSERT(descriptor_number < number_of_descriptors());
1719  return Smi::cast(GetContentArray()->get(ToDetailsIndex(descriptor_number)));
1720}
1721
1722
1723PropertyType DescriptorArray::GetType(int descriptor_number) {
1724  ASSERT(descriptor_number < number_of_descriptors());
1725  return PropertyDetails(GetDetails(descriptor_number)).type();
1726}
1727
1728
1729int DescriptorArray::GetFieldIndex(int descriptor_number) {
1730  return Descriptor::IndexFromValue(GetValue(descriptor_number));
1731}
1732
1733
1734JSFunction* DescriptorArray::GetConstantFunction(int descriptor_number) {
1735  return JSFunction::cast(GetValue(descriptor_number));
1736}
1737
1738
1739Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
1740  ASSERT(GetType(descriptor_number) == CALLBACKS);
1741  return GetValue(descriptor_number);
1742}
1743
1744
1745AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
1746  ASSERT(GetType(descriptor_number) == CALLBACKS);
1747  Proxy* p = Proxy::cast(GetCallbacksObject(descriptor_number));
1748  return reinterpret_cast<AccessorDescriptor*>(p->proxy());
1749}
1750
1751
1752bool DescriptorArray::IsProperty(int descriptor_number) {
1753  return GetType(descriptor_number) < FIRST_PHANTOM_PROPERTY_TYPE;
1754}
1755
1756
1757bool DescriptorArray::IsTransition(int descriptor_number) {
1758  PropertyType t = GetType(descriptor_number);
1759  return t == MAP_TRANSITION || t == CONSTANT_TRANSITION ||
1760      t == EXTERNAL_ARRAY_TRANSITION;
1761}
1762
1763
1764bool DescriptorArray::IsNullDescriptor(int descriptor_number) {
1765  return GetType(descriptor_number) == NULL_DESCRIPTOR;
1766}
1767
1768
1769bool DescriptorArray::IsDontEnum(int descriptor_number) {
1770  return PropertyDetails(GetDetails(descriptor_number)).IsDontEnum();
1771}
1772
1773
1774void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
1775  desc->Init(GetKey(descriptor_number),
1776             GetValue(descriptor_number),
1777             PropertyDetails(GetDetails(descriptor_number)));
1778}
1779
1780
1781void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
1782  // Range check.
1783  ASSERT(descriptor_number < number_of_descriptors());
1784
1785  // Make sure none of the elements in desc are in new space.
1786  ASSERT(!HEAP->InNewSpace(desc->GetKey()));
1787  ASSERT(!HEAP->InNewSpace(desc->GetValue()));
1788
1789  fast_set(this, ToKeyIndex(descriptor_number), desc->GetKey());
1790  FixedArray* content_array = GetContentArray();
1791  fast_set(content_array, ToValueIndex(descriptor_number), desc->GetValue());
1792  fast_set(content_array, ToDetailsIndex(descriptor_number),
1793           desc->GetDetails().AsSmi());
1794}
1795
1796
1797void DescriptorArray::CopyFrom(int index, DescriptorArray* src, int src_index) {
1798  Descriptor desc;
1799  src->Get(src_index, &desc);
1800  Set(index, &desc);
1801}
1802
1803
1804void DescriptorArray::Swap(int first, int second) {
1805  fast_swap(this, ToKeyIndex(first), ToKeyIndex(second));
1806  FixedArray* content_array = GetContentArray();
1807  fast_swap(content_array, ToValueIndex(first), ToValueIndex(second));
1808  fast_swap(content_array, ToDetailsIndex(first),  ToDetailsIndex(second));
1809}
1810
1811
1812template<typename Shape, typename Key>
1813int HashTable<Shape, Key>::FindEntry(Key key) {
1814  return FindEntry(GetIsolate(), key);
1815}
1816
1817
1818// Find entry for key otherwise return kNotFound.
1819template<typename Shape, typename Key>
1820int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
1821  uint32_t capacity = Capacity();
1822  uint32_t entry = FirstProbe(Shape::Hash(key), capacity);
1823  uint32_t count = 1;
1824  // EnsureCapacity will guarantee the hash table is never full.
1825  while (true) {
1826    Object* element = KeyAt(entry);
1827    if (element == isolate->heap()->undefined_value()) break;  // Empty entry.
1828    if (element != isolate->heap()->null_value() &&
1829        Shape::IsMatch(key, element)) return entry;
1830    entry = NextProbe(entry, count++, capacity);
1831  }
1832  return kNotFound;
1833}
1834
1835
1836bool NumberDictionary::requires_slow_elements() {
1837  Object* max_index_object = get(kMaxNumberKeyIndex);
1838  if (!max_index_object->IsSmi()) return false;
1839  return 0 !=
1840      (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
1841}
1842
1843uint32_t NumberDictionary::max_number_key() {
1844  ASSERT(!requires_slow_elements());
1845  Object* max_index_object = get(kMaxNumberKeyIndex);
1846  if (!max_index_object->IsSmi()) return 0;
1847  uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
1848  return value >> kRequiresSlowElementsTagSize;
1849}
1850
1851void NumberDictionary::set_requires_slow_elements() {
1852  set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
1853}
1854
1855
1856// ------------------------------------
1857// Cast operations
1858
1859
1860CAST_ACCESSOR(FixedArray)
1861CAST_ACCESSOR(DescriptorArray)
1862CAST_ACCESSOR(DeoptimizationInputData)
1863CAST_ACCESSOR(DeoptimizationOutputData)
1864CAST_ACCESSOR(SymbolTable)
1865CAST_ACCESSOR(JSFunctionResultCache)
1866CAST_ACCESSOR(NormalizedMapCache)
1867CAST_ACCESSOR(CompilationCacheTable)
1868CAST_ACCESSOR(CodeCacheHashTable)
1869CAST_ACCESSOR(MapCache)
1870CAST_ACCESSOR(String)
1871CAST_ACCESSOR(SeqString)
1872CAST_ACCESSOR(SeqAsciiString)
1873CAST_ACCESSOR(SeqTwoByteString)
1874CAST_ACCESSOR(ConsString)
1875CAST_ACCESSOR(ExternalString)
1876CAST_ACCESSOR(ExternalAsciiString)
1877CAST_ACCESSOR(ExternalTwoByteString)
1878CAST_ACCESSOR(JSObject)
1879CAST_ACCESSOR(Smi)
1880CAST_ACCESSOR(HeapObject)
1881CAST_ACCESSOR(HeapNumber)
1882CAST_ACCESSOR(Oddball)
1883CAST_ACCESSOR(JSGlobalPropertyCell)
1884CAST_ACCESSOR(SharedFunctionInfo)
1885CAST_ACCESSOR(Map)
1886CAST_ACCESSOR(JSFunction)
1887CAST_ACCESSOR(GlobalObject)
1888CAST_ACCESSOR(JSGlobalProxy)
1889CAST_ACCESSOR(JSGlobalObject)
1890CAST_ACCESSOR(JSBuiltinsObject)
1891CAST_ACCESSOR(Code)
1892CAST_ACCESSOR(JSArray)
1893CAST_ACCESSOR(JSRegExp)
1894CAST_ACCESSOR(Proxy)
1895CAST_ACCESSOR(ByteArray)
1896CAST_ACCESSOR(ExternalArray)
1897CAST_ACCESSOR(ExternalByteArray)
1898CAST_ACCESSOR(ExternalUnsignedByteArray)
1899CAST_ACCESSOR(ExternalShortArray)
1900CAST_ACCESSOR(ExternalUnsignedShortArray)
1901CAST_ACCESSOR(ExternalIntArray)
1902CAST_ACCESSOR(ExternalUnsignedIntArray)
1903CAST_ACCESSOR(ExternalFloatArray)
1904CAST_ACCESSOR(ExternalPixelArray)
1905CAST_ACCESSOR(Struct)
1906
1907
1908#define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
1909  STRUCT_LIST(MAKE_STRUCT_CAST)
1910#undef MAKE_STRUCT_CAST
1911
1912
1913template <typename Shape, typename Key>
1914HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
1915  ASSERT(obj->IsHashTable());
1916  return reinterpret_cast<HashTable*>(obj);
1917}
1918
1919
1920SMI_ACCESSORS(FixedArray, length, kLengthOffset)
1921SMI_ACCESSORS(ByteArray, length, kLengthOffset)
1922
1923INT_ACCESSORS(ExternalArray, length, kLengthOffset)
1924
1925
1926SMI_ACCESSORS(String, length, kLengthOffset)
1927
1928
1929uint32_t String::hash_field() {
1930  return READ_UINT32_FIELD(this, kHashFieldOffset);
1931}
1932
1933
1934void String::set_hash_field(uint32_t value) {
1935  WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
1936#if V8_HOST_ARCH_64_BIT
1937  WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
1938#endif
1939}
1940
1941
1942bool String::Equals(String* other) {
1943  if (other == this) return true;
1944  if (StringShape(this).IsSymbol() && StringShape(other).IsSymbol()) {
1945    return false;
1946  }
1947  return SlowEquals(other);
1948}
1949
1950
1951MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
1952  if (!StringShape(this).IsCons()) return this;
1953  ConsString* cons = ConsString::cast(this);
1954  if (cons->second()->length() == 0) return cons->first();
1955  return SlowTryFlatten(pretenure);
1956}
1957
1958
1959String* String::TryFlattenGetString(PretenureFlag pretenure) {
1960  MaybeObject* flat = TryFlatten(pretenure);
1961  Object* successfully_flattened;
1962  if (flat->ToObject(&successfully_flattened)) {
1963    return String::cast(successfully_flattened);
1964  }
1965  return this;
1966}
1967
1968
1969uint16_t String::Get(int index) {
1970  ASSERT(index >= 0 && index < length());
1971  switch (StringShape(this).full_representation_tag()) {
1972    case kSeqStringTag | kAsciiStringTag:
1973      return SeqAsciiString::cast(this)->SeqAsciiStringGet(index);
1974    case kSeqStringTag | kTwoByteStringTag:
1975      return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
1976    case kConsStringTag | kAsciiStringTag:
1977    case kConsStringTag | kTwoByteStringTag:
1978      return ConsString::cast(this)->ConsStringGet(index);
1979    case kExternalStringTag | kAsciiStringTag:
1980      return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
1981    case kExternalStringTag | kTwoByteStringTag:
1982      return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
1983    default:
1984      break;
1985  }
1986
1987  UNREACHABLE();
1988  return 0;
1989}
1990
1991
1992void String::Set(int index, uint16_t value) {
1993  ASSERT(index >= 0 && index < length());
1994  ASSERT(StringShape(this).IsSequential());
1995
1996  return this->IsAsciiRepresentation()
1997      ? SeqAsciiString::cast(this)->SeqAsciiStringSet(index, value)
1998      : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
1999}
2000
2001
2002bool String::IsFlat() {
2003  switch (StringShape(this).representation_tag()) {
2004    case kConsStringTag: {
2005      String* second = ConsString::cast(this)->second();
2006      // Only flattened strings have second part empty.
2007      return second->length() == 0;
2008    }
2009    default:
2010      return true;
2011  }
2012}
2013
2014
2015uint16_t SeqAsciiString::SeqAsciiStringGet(int index) {
2016  ASSERT(index >= 0 && index < length());
2017  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2018}
2019
2020
2021void SeqAsciiString::SeqAsciiStringSet(int index, uint16_t value) {
2022  ASSERT(index >= 0 && index < length() && value <= kMaxAsciiCharCode);
2023  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
2024                   static_cast<byte>(value));
2025}
2026
2027
2028Address SeqAsciiString::GetCharsAddress() {
2029  return FIELD_ADDR(this, kHeaderSize);
2030}
2031
2032
2033char* SeqAsciiString::GetChars() {
2034  return reinterpret_cast<char*>(GetCharsAddress());
2035}
2036
2037
2038Address SeqTwoByteString::GetCharsAddress() {
2039  return FIELD_ADDR(this, kHeaderSize);
2040}
2041
2042
2043uc16* SeqTwoByteString::GetChars() {
2044  return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
2045}
2046
2047
2048uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
2049  ASSERT(index >= 0 && index < length());
2050  return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
2051}
2052
2053
2054void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
2055  ASSERT(index >= 0 && index < length());
2056  WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
2057}
2058
2059
2060int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
2061  return SizeFor(length());
2062}
2063
2064
2065int SeqAsciiString::SeqAsciiStringSize(InstanceType instance_type) {
2066  return SizeFor(length());
2067}
2068
2069
2070String* ConsString::first() {
2071  return String::cast(READ_FIELD(this, kFirstOffset));
2072}
2073
2074
2075Object* ConsString::unchecked_first() {
2076  return READ_FIELD(this, kFirstOffset);
2077}
2078
2079
2080void ConsString::set_first(String* value, WriteBarrierMode mode) {
2081  WRITE_FIELD(this, kFirstOffset, value);
2082  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, mode);
2083}
2084
2085
2086String* ConsString::second() {
2087  return String::cast(READ_FIELD(this, kSecondOffset));
2088}
2089
2090
2091Object* ConsString::unchecked_second() {
2092  return READ_FIELD(this, kSecondOffset);
2093}
2094
2095
2096void ConsString::set_second(String* value, WriteBarrierMode mode) {
2097  WRITE_FIELD(this, kSecondOffset, value);
2098  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, mode);
2099}
2100
2101
2102ExternalAsciiString::Resource* ExternalAsciiString::resource() {
2103  return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2104}
2105
2106
2107void ExternalAsciiString::set_resource(
2108    ExternalAsciiString::Resource* resource) {
2109  *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)) = resource;
2110}
2111
2112
2113ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
2114  return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2115}
2116
2117
2118void ExternalTwoByteString::set_resource(
2119    ExternalTwoByteString::Resource* resource) {
2120  *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)) = resource;
2121}
2122
2123
2124void JSFunctionResultCache::MakeZeroSize() {
2125  set_finger_index(kEntriesIndex);
2126  set_size(kEntriesIndex);
2127}
2128
2129
2130void JSFunctionResultCache::Clear() {
2131  int cache_size = size();
2132  Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
2133  MemsetPointer(entries_start,
2134                GetHeap()->the_hole_value(),
2135                cache_size - kEntriesIndex);
2136  MakeZeroSize();
2137}
2138
2139
2140int JSFunctionResultCache::size() {
2141  return Smi::cast(get(kCacheSizeIndex))->value();
2142}
2143
2144
2145void JSFunctionResultCache::set_size(int size) {
2146  set(kCacheSizeIndex, Smi::FromInt(size));
2147}
2148
2149
2150int JSFunctionResultCache::finger_index() {
2151  return Smi::cast(get(kFingerIndex))->value();
2152}
2153
2154
2155void JSFunctionResultCache::set_finger_index(int finger_index) {
2156  set(kFingerIndex, Smi::FromInt(finger_index));
2157}
2158
2159
2160byte ByteArray::get(int index) {
2161  ASSERT(index >= 0 && index < this->length());
2162  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2163}
2164
2165
2166void ByteArray::set(int index, byte value) {
2167  ASSERT(index >= 0 && index < this->length());
2168  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
2169}
2170
2171
2172int ByteArray::get_int(int index) {
2173  ASSERT(index >= 0 && (index * kIntSize) < this->length());
2174  return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
2175}
2176
2177
2178ByteArray* ByteArray::FromDataStartAddress(Address address) {
2179  ASSERT_TAG_ALIGNED(address);
2180  return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
2181}
2182
2183
2184Address ByteArray::GetDataStartAddress() {
2185  return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
2186}
2187
2188
2189uint8_t* ExternalPixelArray::external_pixel_pointer() {
2190  return reinterpret_cast<uint8_t*>(external_pointer());
2191}
2192
2193
2194uint8_t ExternalPixelArray::get(int index) {
2195  ASSERT((index >= 0) && (index < this->length()));
2196  uint8_t* ptr = external_pixel_pointer();
2197  return ptr[index];
2198}
2199
2200
2201void ExternalPixelArray::set(int index, uint8_t value) {
2202  ASSERT((index >= 0) && (index < this->length()));
2203  uint8_t* ptr = external_pixel_pointer();
2204  ptr[index] = value;
2205}
2206
2207
2208void* ExternalArray::external_pointer() {
2209  intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
2210  return reinterpret_cast<void*>(ptr);
2211}
2212
2213
2214void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
2215  intptr_t ptr = reinterpret_cast<intptr_t>(value);
2216  WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
2217}
2218
2219
2220int8_t ExternalByteArray::get(int index) {
2221  ASSERT((index >= 0) && (index < this->length()));
2222  int8_t* ptr = static_cast<int8_t*>(external_pointer());
2223  return ptr[index];
2224}
2225
2226
2227void ExternalByteArray::set(int index, int8_t value) {
2228  ASSERT((index >= 0) && (index < this->length()));
2229  int8_t* ptr = static_cast<int8_t*>(external_pointer());
2230  ptr[index] = value;
2231}
2232
2233
2234uint8_t ExternalUnsignedByteArray::get(int index) {
2235  ASSERT((index >= 0) && (index < this->length()));
2236  uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2237  return ptr[index];
2238}
2239
2240
2241void ExternalUnsignedByteArray::set(int index, uint8_t value) {
2242  ASSERT((index >= 0) && (index < this->length()));
2243  uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2244  ptr[index] = value;
2245}
2246
2247
2248int16_t ExternalShortArray::get(int index) {
2249  ASSERT((index >= 0) && (index < this->length()));
2250  int16_t* ptr = static_cast<int16_t*>(external_pointer());
2251  return ptr[index];
2252}
2253
2254
2255void ExternalShortArray::set(int index, int16_t value) {
2256  ASSERT((index >= 0) && (index < this->length()));
2257  int16_t* ptr = static_cast<int16_t*>(external_pointer());
2258  ptr[index] = value;
2259}
2260
2261
2262uint16_t ExternalUnsignedShortArray::get(int index) {
2263  ASSERT((index >= 0) && (index < this->length()));
2264  uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2265  return ptr[index];
2266}
2267
2268
2269void ExternalUnsignedShortArray::set(int index, uint16_t value) {
2270  ASSERT((index >= 0) && (index < this->length()));
2271  uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2272  ptr[index] = value;
2273}
2274
2275
2276int32_t ExternalIntArray::get(int index) {
2277  ASSERT((index >= 0) && (index < this->length()));
2278  int32_t* ptr = static_cast<int32_t*>(external_pointer());
2279  return ptr[index];
2280}
2281
2282
2283void ExternalIntArray::set(int index, int32_t value) {
2284  ASSERT((index >= 0) && (index < this->length()));
2285  int32_t* ptr = static_cast<int32_t*>(external_pointer());
2286  ptr[index] = value;
2287}
2288
2289
2290uint32_t ExternalUnsignedIntArray::get(int index) {
2291  ASSERT((index >= 0) && (index < this->length()));
2292  uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2293  return ptr[index];
2294}
2295
2296
2297void ExternalUnsignedIntArray::set(int index, uint32_t value) {
2298  ASSERT((index >= 0) && (index < this->length()));
2299  uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2300  ptr[index] = value;
2301}
2302
2303
2304float ExternalFloatArray::get(int index) {
2305  ASSERT((index >= 0) && (index < this->length()));
2306  float* ptr = static_cast<float*>(external_pointer());
2307  return ptr[index];
2308}
2309
2310
2311void ExternalFloatArray::set(int index, float value) {
2312  ASSERT((index >= 0) && (index < this->length()));
2313  float* ptr = static_cast<float*>(external_pointer());
2314  ptr[index] = value;
2315}
2316
2317
2318int Map::visitor_id() {
2319  return READ_BYTE_FIELD(this, kVisitorIdOffset);
2320}
2321
2322
2323void Map::set_visitor_id(int id) {
2324  ASSERT(0 <= id && id < 256);
2325  WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
2326}
2327
2328
2329int Map::instance_size() {
2330  return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
2331}
2332
2333
2334int Map::inobject_properties() {
2335  return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
2336}
2337
2338
2339int Map::pre_allocated_property_fields() {
2340  return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
2341}
2342
2343
2344int HeapObject::SizeFromMap(Map* map) {
2345  int instance_size = map->instance_size();
2346  if (instance_size != kVariableSizeSentinel) return instance_size;
2347  // We can ignore the "symbol" bit becase it is only set for symbols
2348  // and implies a string type.
2349  int instance_type = static_cast<int>(map->instance_type()) & ~kIsSymbolMask;
2350  // Only inline the most frequent cases.
2351  if (instance_type == FIXED_ARRAY_TYPE) {
2352    return FixedArray::BodyDescriptor::SizeOf(map, this);
2353  }
2354  if (instance_type == ASCII_STRING_TYPE) {
2355    return SeqAsciiString::SizeFor(
2356        reinterpret_cast<SeqAsciiString*>(this)->length());
2357  }
2358  if (instance_type == BYTE_ARRAY_TYPE) {
2359    return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
2360  }
2361  if (instance_type == STRING_TYPE) {
2362    return SeqTwoByteString::SizeFor(
2363        reinterpret_cast<SeqTwoByteString*>(this)->length());
2364  }
2365  ASSERT(instance_type == CODE_TYPE);
2366  return reinterpret_cast<Code*>(this)->CodeSize();
2367}
2368
2369
2370void Map::set_instance_size(int value) {
2371  ASSERT_EQ(0, value & (kPointerSize - 1));
2372  value >>= kPointerSizeLog2;
2373  ASSERT(0 <= value && value < 256);
2374  WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
2375}
2376
2377
2378void Map::set_inobject_properties(int value) {
2379  ASSERT(0 <= value && value < 256);
2380  WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
2381}
2382
2383
2384void Map::set_pre_allocated_property_fields(int value) {
2385  ASSERT(0 <= value && value < 256);
2386  WRITE_BYTE_FIELD(this,
2387                   kPreAllocatedPropertyFieldsOffset,
2388                   static_cast<byte>(value));
2389}
2390
2391
2392InstanceType Map::instance_type() {
2393  return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
2394}
2395
2396
2397void Map::set_instance_type(InstanceType value) {
2398  WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
2399}
2400
2401
2402int Map::unused_property_fields() {
2403  return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
2404}
2405
2406
2407void Map::set_unused_property_fields(int value) {
2408  WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
2409}
2410
2411
2412byte Map::bit_field() {
2413  return READ_BYTE_FIELD(this, kBitFieldOffset);
2414}
2415
2416
2417void Map::set_bit_field(byte value) {
2418  WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
2419}
2420
2421
2422byte Map::bit_field2() {
2423  return READ_BYTE_FIELD(this, kBitField2Offset);
2424}
2425
2426
2427void Map::set_bit_field2(byte value) {
2428  WRITE_BYTE_FIELD(this, kBitField2Offset, value);
2429}
2430
2431
2432void Map::set_non_instance_prototype(bool value) {
2433  if (value) {
2434    set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
2435  } else {
2436    set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
2437  }
2438}
2439
2440
2441bool Map::has_non_instance_prototype() {
2442  return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
2443}
2444
2445
2446void Map::set_function_with_prototype(bool value) {
2447  if (value) {
2448    set_bit_field2(bit_field2() | (1 << kFunctionWithPrototype));
2449  } else {
2450    set_bit_field2(bit_field2() & ~(1 << kFunctionWithPrototype));
2451  }
2452}
2453
2454
2455bool Map::function_with_prototype() {
2456  return ((1 << kFunctionWithPrototype) & bit_field2()) != 0;
2457}
2458
2459
2460void Map::set_is_access_check_needed(bool access_check_needed) {
2461  if (access_check_needed) {
2462    set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
2463  } else {
2464    set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
2465  }
2466}
2467
2468
2469bool Map::is_access_check_needed() {
2470  return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
2471}
2472
2473
2474void Map::set_is_extensible(bool value) {
2475  if (value) {
2476    set_bit_field2(bit_field2() | (1 << kIsExtensible));
2477  } else {
2478    set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
2479  }
2480}
2481
2482bool Map::is_extensible() {
2483  return ((1 << kIsExtensible) & bit_field2()) != 0;
2484}
2485
2486
2487void Map::set_attached_to_shared_function_info(bool value) {
2488  if (value) {
2489    set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
2490  } else {
2491    set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
2492  }
2493}
2494
2495bool Map::attached_to_shared_function_info() {
2496  return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
2497}
2498
2499
2500void Map::set_is_shared(bool value) {
2501  if (value) {
2502    set_bit_field2(bit_field2() | (1 << kIsShared));
2503  } else {
2504    set_bit_field2(bit_field2() & ~(1 << kIsShared));
2505  }
2506}
2507
2508bool Map::is_shared() {
2509  return ((1 << kIsShared) & bit_field2()) != 0;
2510}
2511
2512
2513JSFunction* Map::unchecked_constructor() {
2514  return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset));
2515}
2516
2517
2518Code::Flags Code::flags() {
2519  return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
2520}
2521
2522
2523void Code::set_flags(Code::Flags flags) {
2524  STATIC_ASSERT(Code::NUMBER_OF_KINDS <= (kFlagsKindMask >> kFlagsKindShift)+1);
2525  // Make sure that all call stubs have an arguments count.
2526  ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
2527          ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
2528         ExtractArgumentsCountFromFlags(flags) >= 0);
2529  WRITE_INT_FIELD(this, kFlagsOffset, flags);
2530}
2531
2532
2533Code::Kind Code::kind() {
2534  return ExtractKindFromFlags(flags());
2535}
2536
2537
2538InLoopFlag Code::ic_in_loop() {
2539  return ExtractICInLoopFromFlags(flags());
2540}
2541
2542
2543InlineCacheState Code::ic_state() {
2544  InlineCacheState result = ExtractICStateFromFlags(flags());
2545  // Only allow uninitialized or debugger states for non-IC code
2546  // objects. This is used in the debugger to determine whether or not
2547  // a call to code object has been replaced with a debug break call.
2548  ASSERT(is_inline_cache_stub() ||
2549         result == UNINITIALIZED ||
2550         result == DEBUG_BREAK ||
2551         result == DEBUG_PREPARE_STEP_IN);
2552  return result;
2553}
2554
2555
2556Code::ExtraICState Code::extra_ic_state() {
2557  ASSERT(is_inline_cache_stub());
2558  return ExtractExtraICStateFromFlags(flags());
2559}
2560
2561
2562PropertyType Code::type() {
2563  ASSERT(ic_state() == MONOMORPHIC);
2564  return ExtractTypeFromFlags(flags());
2565}
2566
2567
2568int Code::arguments_count() {
2569  ASSERT(is_call_stub() || is_keyed_call_stub() || kind() == STUB);
2570  return ExtractArgumentsCountFromFlags(flags());
2571}
2572
2573
2574int Code::major_key() {
2575  ASSERT(kind() == STUB ||
2576         kind() == TYPE_RECORDING_BINARY_OP_IC ||
2577         kind() == COMPARE_IC);
2578  return READ_BYTE_FIELD(this, kStubMajorKeyOffset);
2579}
2580
2581
2582void Code::set_major_key(int major) {
2583  ASSERT(kind() == STUB ||
2584         kind() == TYPE_RECORDING_BINARY_OP_IC ||
2585         kind() == COMPARE_IC);
2586  ASSERT(0 <= major && major < 256);
2587  WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major);
2588}
2589
2590
2591bool Code::optimizable() {
2592  ASSERT(kind() == FUNCTION);
2593  return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
2594}
2595
2596
2597void Code::set_optimizable(bool value) {
2598  ASSERT(kind() == FUNCTION);
2599  WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
2600}
2601
2602
2603bool Code::has_deoptimization_support() {
2604  ASSERT(kind() == FUNCTION);
2605  return READ_BYTE_FIELD(this, kHasDeoptimizationSupportOffset) == 1;
2606}
2607
2608
2609void Code::set_has_deoptimization_support(bool value) {
2610  ASSERT(kind() == FUNCTION);
2611  WRITE_BYTE_FIELD(this, kHasDeoptimizationSupportOffset, value ? 1 : 0);
2612}
2613
2614
2615int Code::allow_osr_at_loop_nesting_level() {
2616  ASSERT(kind() == FUNCTION);
2617  return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
2618}
2619
2620
2621void Code::set_allow_osr_at_loop_nesting_level(int level) {
2622  ASSERT(kind() == FUNCTION);
2623  ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
2624  WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
2625}
2626
2627
2628unsigned Code::stack_slots() {
2629  ASSERT(kind() == OPTIMIZED_FUNCTION);
2630  return READ_UINT32_FIELD(this, kStackSlotsOffset);
2631}
2632
2633
2634void Code::set_stack_slots(unsigned slots) {
2635  ASSERT(kind() == OPTIMIZED_FUNCTION);
2636  WRITE_UINT32_FIELD(this, kStackSlotsOffset, slots);
2637}
2638
2639
2640unsigned Code::safepoint_table_offset() {
2641  ASSERT(kind() == OPTIMIZED_FUNCTION);
2642  return READ_UINT32_FIELD(this, kSafepointTableOffsetOffset);
2643}
2644
2645
2646void Code::set_safepoint_table_offset(unsigned offset) {
2647  ASSERT(kind() == OPTIMIZED_FUNCTION);
2648  ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
2649  WRITE_UINT32_FIELD(this, kSafepointTableOffsetOffset, offset);
2650}
2651
2652
2653unsigned Code::stack_check_table_offset() {
2654  ASSERT(kind() == FUNCTION);
2655  return READ_UINT32_FIELD(this, kStackCheckTableOffsetOffset);
2656}
2657
2658
2659void Code::set_stack_check_table_offset(unsigned offset) {
2660  ASSERT(kind() == FUNCTION);
2661  ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
2662  WRITE_UINT32_FIELD(this, kStackCheckTableOffsetOffset, offset);
2663}
2664
2665
2666CheckType Code::check_type() {
2667  ASSERT(is_call_stub() || is_keyed_call_stub());
2668  byte type = READ_BYTE_FIELD(this, kCheckTypeOffset);
2669  return static_cast<CheckType>(type);
2670}
2671
2672
2673void Code::set_check_type(CheckType value) {
2674  ASSERT(is_call_stub() || is_keyed_call_stub());
2675  WRITE_BYTE_FIELD(this, kCheckTypeOffset, value);
2676}
2677
2678
2679ExternalArrayType Code::external_array_type() {
2680  ASSERT(is_external_array_load_stub() || is_external_array_store_stub());
2681  byte type = READ_BYTE_FIELD(this, kExternalArrayTypeOffset);
2682  return static_cast<ExternalArrayType>(type);
2683}
2684
2685
2686void Code::set_external_array_type(ExternalArrayType value) {
2687  ASSERT(is_external_array_load_stub() || is_external_array_store_stub());
2688  WRITE_BYTE_FIELD(this, kExternalArrayTypeOffset, value);
2689}
2690
2691
2692byte Code::type_recording_binary_op_type() {
2693  ASSERT(is_type_recording_binary_op_stub());
2694  return READ_BYTE_FIELD(this, kBinaryOpTypeOffset);
2695}
2696
2697
2698void Code::set_type_recording_binary_op_type(byte value) {
2699  ASSERT(is_type_recording_binary_op_stub());
2700  WRITE_BYTE_FIELD(this, kBinaryOpTypeOffset, value);
2701}
2702
2703
2704byte Code::type_recording_binary_op_result_type() {
2705  ASSERT(is_type_recording_binary_op_stub());
2706  return READ_BYTE_FIELD(this, kBinaryOpReturnTypeOffset);
2707}
2708
2709
2710void Code::set_type_recording_binary_op_result_type(byte value) {
2711  ASSERT(is_type_recording_binary_op_stub());
2712  WRITE_BYTE_FIELD(this, kBinaryOpReturnTypeOffset, value);
2713}
2714
2715
2716byte Code::compare_state() {
2717  ASSERT(is_compare_ic_stub());
2718  return READ_BYTE_FIELD(this, kCompareStateOffset);
2719}
2720
2721
2722void Code::set_compare_state(byte value) {
2723  ASSERT(is_compare_ic_stub());
2724  WRITE_BYTE_FIELD(this, kCompareStateOffset, value);
2725}
2726
2727
2728bool Code::is_inline_cache_stub() {
2729  Kind kind = this->kind();
2730  return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
2731}
2732
2733
2734Code::Flags Code::ComputeFlags(Kind kind,
2735                               InLoopFlag in_loop,
2736                               InlineCacheState ic_state,
2737                               ExtraICState extra_ic_state,
2738                               PropertyType type,
2739                               int argc,
2740                               InlineCacheHolderFlag holder) {
2741  // Extra IC state is only allowed for monomorphic call IC stubs
2742  // or for store IC stubs.
2743  ASSERT(extra_ic_state == kNoExtraICState ||
2744         (kind == CALL_IC && (ic_state == MONOMORPHIC ||
2745                              ic_state == MONOMORPHIC_PROTOTYPE_FAILURE)) ||
2746         (kind == STORE_IC) ||
2747         (kind == KEYED_STORE_IC));
2748  // Compute the bit mask.
2749  int bits = kind << kFlagsKindShift;
2750  if (in_loop) bits |= kFlagsICInLoopMask;
2751  bits |= ic_state << kFlagsICStateShift;
2752  bits |= type << kFlagsTypeShift;
2753  bits |= extra_ic_state << kFlagsExtraICStateShift;
2754  bits |= argc << kFlagsArgumentsCountShift;
2755  if (holder == PROTOTYPE_MAP) bits |= kFlagsCacheInPrototypeMapMask;
2756  // Cast to flags and validate result before returning it.
2757  Flags result = static_cast<Flags>(bits);
2758  ASSERT(ExtractKindFromFlags(result) == kind);
2759  ASSERT(ExtractICStateFromFlags(result) == ic_state);
2760  ASSERT(ExtractICInLoopFromFlags(result) == in_loop);
2761  ASSERT(ExtractTypeFromFlags(result) == type);
2762  ASSERT(ExtractExtraICStateFromFlags(result) == extra_ic_state);
2763  ASSERT(ExtractArgumentsCountFromFlags(result) == argc);
2764  return result;
2765}
2766
2767
2768Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
2769                                          PropertyType type,
2770                                          ExtraICState extra_ic_state,
2771                                          InlineCacheHolderFlag holder,
2772                                          InLoopFlag in_loop,
2773                                          int argc) {
2774  return ComputeFlags(
2775      kind, in_loop, MONOMORPHIC, extra_ic_state, type, argc, holder);
2776}
2777
2778
2779Code::Kind Code::ExtractKindFromFlags(Flags flags) {
2780  int bits = (flags & kFlagsKindMask) >> kFlagsKindShift;
2781  return static_cast<Kind>(bits);
2782}
2783
2784
2785InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
2786  int bits = (flags & kFlagsICStateMask) >> kFlagsICStateShift;
2787  return static_cast<InlineCacheState>(bits);
2788}
2789
2790
2791Code::ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
2792  int bits = (flags & kFlagsExtraICStateMask) >> kFlagsExtraICStateShift;
2793  return static_cast<ExtraICState>(bits);
2794}
2795
2796
2797InLoopFlag Code::ExtractICInLoopFromFlags(Flags flags) {
2798  int bits = (flags & kFlagsICInLoopMask);
2799  return bits != 0 ? IN_LOOP : NOT_IN_LOOP;
2800}
2801
2802
2803PropertyType Code::ExtractTypeFromFlags(Flags flags) {
2804  int bits = (flags & kFlagsTypeMask) >> kFlagsTypeShift;
2805  return static_cast<PropertyType>(bits);
2806}
2807
2808
2809int Code::ExtractArgumentsCountFromFlags(Flags flags) {
2810  return (flags & kFlagsArgumentsCountMask) >> kFlagsArgumentsCountShift;
2811}
2812
2813
2814InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
2815  int bits = (flags & kFlagsCacheInPrototypeMapMask);
2816  return bits != 0 ? PROTOTYPE_MAP : OWN_MAP;
2817}
2818
2819
2820Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
2821  int bits = flags & ~kFlagsTypeMask;
2822  return static_cast<Flags>(bits);
2823}
2824
2825
2826Code* Code::GetCodeFromTargetAddress(Address address) {
2827  HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
2828  // GetCodeFromTargetAddress might be called when marking objects during mark
2829  // sweep. reinterpret_cast is therefore used instead of the more appropriate
2830  // Code::cast. Code::cast does not work when the object's map is
2831  // marked.
2832  Code* result = reinterpret_cast<Code*>(code);
2833  return result;
2834}
2835
2836
2837Isolate* Map::isolate() {
2838  return heap()->isolate();
2839}
2840
2841
2842Heap* Map::heap() {
2843  // NOTE: address() helper is not used to save one instruction.
2844  Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap_;
2845  ASSERT(heap != NULL);
2846  ASSERT(heap->isolate() == Isolate::Current());
2847  return heap;
2848}
2849
2850
2851Heap* Code::heap() {
2852  // NOTE: address() helper is not used to save one instruction.
2853  Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap_;
2854  ASSERT(heap != NULL);
2855  ASSERT(heap->isolate() == Isolate::Current());
2856  return heap;
2857}
2858
2859
2860Isolate* Code::isolate() {
2861  return heap()->isolate();
2862}
2863
2864
2865Heap* JSGlobalPropertyCell::heap() {
2866  // NOTE: address() helper is not used to save one instruction.
2867  Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap_;
2868  ASSERT(heap != NULL);
2869  ASSERT(heap->isolate() == Isolate::Current());
2870  return heap;
2871}
2872
2873
2874Isolate* JSGlobalPropertyCell::isolate() {
2875  return heap()->isolate();
2876}
2877
2878
2879Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
2880  return HeapObject::
2881      FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
2882}
2883
2884
2885Object* Map::prototype() {
2886  return READ_FIELD(this, kPrototypeOffset);
2887}
2888
2889
2890void Map::set_prototype(Object* value, WriteBarrierMode mode) {
2891  ASSERT(value->IsNull() || value->IsJSObject());
2892  WRITE_FIELD(this, kPrototypeOffset, value);
2893  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, mode);
2894}
2895
2896
2897MaybeObject* Map::GetFastElementsMap() {
2898  if (has_fast_elements()) return this;
2899  Object* obj;
2900  { MaybeObject* maybe_obj = CopyDropTransitions();
2901    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2902  }
2903  Map* new_map = Map::cast(obj);
2904  new_map->set_has_fast_elements(true);
2905  isolate()->counters()->map_slow_to_fast_elements()->Increment();
2906  return new_map;
2907}
2908
2909
2910MaybeObject* Map::GetSlowElementsMap() {
2911  if (!has_fast_elements()) return this;
2912  Object* obj;
2913  { MaybeObject* maybe_obj = CopyDropTransitions();
2914    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2915  }
2916  Map* new_map = Map::cast(obj);
2917  new_map->set_has_fast_elements(false);
2918  isolate()->counters()->map_fast_to_slow_elements()->Increment();
2919  return new_map;
2920}
2921
2922
2923ACCESSORS(Map, instance_descriptors, DescriptorArray,
2924          kInstanceDescriptorsOffset)
2925ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
2926ACCESSORS(Map, constructor, Object, kConstructorOffset)
2927
2928ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
2929ACCESSORS(JSFunction, literals, FixedArray, kLiteralsOffset)
2930ACCESSORS_GCSAFE(JSFunction, next_function_link, Object,
2931                 kNextFunctionLinkOffset)
2932
2933ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
2934ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
2935ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
2936
2937ACCESSORS(JSGlobalProxy, context, Object, kContextOffset)
2938
2939ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
2940ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
2941ACCESSORS(AccessorInfo, data, Object, kDataOffset)
2942ACCESSORS(AccessorInfo, name, Object, kNameOffset)
2943ACCESSORS(AccessorInfo, flag, Smi, kFlagOffset)
2944
2945ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
2946ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
2947ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
2948
2949ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
2950ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
2951ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
2952ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
2953ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
2954ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
2955
2956ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
2957ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
2958
2959ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
2960ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
2961
2962ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
2963ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
2964ACCESSORS(FunctionTemplateInfo, property_accessors, Object,
2965          kPropertyAccessorsOffset)
2966ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
2967          kPrototypeTemplateOffset)
2968ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
2969ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
2970          kNamedPropertyHandlerOffset)
2971ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
2972          kIndexedPropertyHandlerOffset)
2973ACCESSORS(FunctionTemplateInfo, instance_template, Object,
2974          kInstanceTemplateOffset)
2975ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
2976ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
2977ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
2978          kInstanceCallHandlerOffset)
2979ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
2980          kAccessCheckInfoOffset)
2981ACCESSORS(FunctionTemplateInfo, flag, Smi, kFlagOffset)
2982
2983ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
2984ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
2985          kInternalFieldCountOffset)
2986
2987ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
2988ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
2989
2990ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
2991
2992ACCESSORS(Script, source, Object, kSourceOffset)
2993ACCESSORS(Script, name, Object, kNameOffset)
2994ACCESSORS(Script, id, Object, kIdOffset)
2995ACCESSORS(Script, line_offset, Smi, kLineOffsetOffset)
2996ACCESSORS(Script, column_offset, Smi, kColumnOffsetOffset)
2997ACCESSORS(Script, data, Object, kDataOffset)
2998ACCESSORS(Script, context_data, Object, kContextOffset)
2999ACCESSORS(Script, wrapper, Proxy, kWrapperOffset)
3000ACCESSORS(Script, type, Smi, kTypeOffset)
3001ACCESSORS(Script, compilation_type, Smi, kCompilationTypeOffset)
3002ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
3003ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
3004ACCESSORS(Script, eval_from_instructions_offset, Smi,
3005          kEvalFrominstructionsOffsetOffset)
3006
3007#ifdef ENABLE_DEBUGGER_SUPPORT
3008ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
3009ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
3010ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
3011ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
3012
3013ACCESSORS(BreakPointInfo, code_position, Smi, kCodePositionIndex)
3014ACCESSORS(BreakPointInfo, source_position, Smi, kSourcePositionIndex)
3015ACCESSORS(BreakPointInfo, statement_position, Smi, kStatementPositionIndex)
3016ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
3017#endif
3018
3019ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
3020ACCESSORS_GCSAFE(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
3021ACCESSORS_GCSAFE(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
3022ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
3023          kInstanceClassNameOffset)
3024ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
3025ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
3026ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
3027ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
3028ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
3029          kThisPropertyAssignmentsOffset)
3030
3031BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
3032               kHiddenPrototypeBit)
3033BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
3034BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
3035               kNeedsAccessCheckBit)
3036BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
3037               kIsExpressionBit)
3038BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
3039               kIsTopLevelBit)
3040BOOL_GETTER(SharedFunctionInfo, compiler_hints,
3041            has_only_simple_this_property_assignments,
3042            kHasOnlySimpleThisPropertyAssignments)
3043BOOL_ACCESSORS(SharedFunctionInfo,
3044               compiler_hints,
3045               allows_lazy_compilation,
3046               kAllowLazyCompilation)
3047
3048
3049#if V8_HOST_ARCH_32_BIT
3050SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
3051SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
3052              kFormalParameterCountOffset)
3053SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
3054              kExpectedNofPropertiesOffset)
3055SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3056SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
3057              kStartPositionAndTypeOffset)
3058SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
3059SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
3060              kFunctionTokenPositionOffset)
3061SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
3062              kCompilerHintsOffset)
3063SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
3064              kThisPropertyAssignmentsCountOffset)
3065SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
3066#else
3067
3068#define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset)             \
3069  STATIC_ASSERT(holder::offset % kPointerSize == 0);              \
3070  int holder::name() {                                            \
3071    int value = READ_INT_FIELD(this, offset);                     \
3072    ASSERT(kHeapObjectTag == 1);                                  \
3073    ASSERT((value & kHeapObjectTag) == 0);                        \
3074    return value >> 1;                                            \
3075  }                                                               \
3076  void holder::set_##name(int value) {                            \
3077    ASSERT(kHeapObjectTag == 1);                                  \
3078    ASSERT((value & 0xC0000000) == 0xC0000000 ||                  \
3079           (value & 0xC0000000) == 0x000000000);                  \
3080    WRITE_INT_FIELD(this,                                         \
3081                    offset,                                       \
3082                    (value << 1) & ~kHeapObjectTag);              \
3083  }
3084
3085#define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset)             \
3086  STATIC_ASSERT(holder::offset % kPointerSize == kIntSize);       \
3087  INT_ACCESSORS(holder, name, offset)
3088
3089
3090PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
3091PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3092                        formal_parameter_count,
3093                        kFormalParameterCountOffset)
3094
3095PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3096                        expected_nof_properties,
3097                        kExpectedNofPropertiesOffset)
3098PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3099
3100PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
3101PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3102                        start_position_and_type,
3103                        kStartPositionAndTypeOffset)
3104
3105PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3106                        function_token_position,
3107                        kFunctionTokenPositionOffset)
3108PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3109                        compiler_hints,
3110                        kCompilerHintsOffset)
3111
3112PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3113                        this_property_assignments_count,
3114                        kThisPropertyAssignmentsCountOffset)
3115PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
3116#endif
3117
3118
3119int SharedFunctionInfo::construction_count() {
3120  return READ_BYTE_FIELD(this, kConstructionCountOffset);
3121}
3122
3123
3124void SharedFunctionInfo::set_construction_count(int value) {
3125  ASSERT(0 <= value && value < 256);
3126  WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
3127}
3128
3129
3130bool SharedFunctionInfo::live_objects_may_exist() {
3131  return (compiler_hints() & (1 << kLiveObjectsMayExist)) != 0;
3132}
3133
3134
3135void SharedFunctionInfo::set_live_objects_may_exist(bool value) {
3136  if (value) {
3137    set_compiler_hints(compiler_hints() | (1 << kLiveObjectsMayExist));
3138  } else {
3139    set_compiler_hints(compiler_hints() & ~(1 << kLiveObjectsMayExist));
3140  }
3141}
3142
3143
3144bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
3145  return initial_map() != HEAP->undefined_value();
3146}
3147
3148
3149bool SharedFunctionInfo::optimization_disabled() {
3150  return BooleanBit::get(compiler_hints(), kOptimizationDisabled);
3151}
3152
3153
3154void SharedFunctionInfo::set_optimization_disabled(bool disable) {
3155  set_compiler_hints(BooleanBit::set(compiler_hints(),
3156                                     kOptimizationDisabled,
3157                                     disable));
3158  // If disabling optimizations we reflect that in the code object so
3159  // it will not be counted as optimizable code.
3160  if ((code()->kind() == Code::FUNCTION) && disable) {
3161    code()->set_optimizable(false);
3162  }
3163}
3164
3165
3166bool SharedFunctionInfo::strict_mode() {
3167  return BooleanBit::get(compiler_hints(), kStrictModeFunction);
3168}
3169
3170
3171void SharedFunctionInfo::set_strict_mode(bool value) {
3172  set_compiler_hints(BooleanBit::set(compiler_hints(),
3173                                     kStrictModeFunction,
3174                                     value));
3175}
3176
3177
3178ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
3179ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
3180
3181bool Script::HasValidSource() {
3182  Object* src = this->source();
3183  if (!src->IsString()) return true;
3184  String* src_str = String::cast(src);
3185  if (!StringShape(src_str).IsExternal()) return true;
3186  if (src_str->IsAsciiRepresentation()) {
3187    return ExternalAsciiString::cast(src)->resource() != NULL;
3188  } else if (src_str->IsTwoByteRepresentation()) {
3189    return ExternalTwoByteString::cast(src)->resource() != NULL;
3190  }
3191  return true;
3192}
3193
3194
3195void SharedFunctionInfo::DontAdaptArguments() {
3196  ASSERT(code()->kind() == Code::BUILTIN);
3197  set_formal_parameter_count(kDontAdaptArgumentsSentinel);
3198}
3199
3200
3201int SharedFunctionInfo::start_position() {
3202  return start_position_and_type() >> kStartPositionShift;
3203}
3204
3205
3206void SharedFunctionInfo::set_start_position(int start_position) {
3207  set_start_position_and_type((start_position << kStartPositionShift)
3208    | (start_position_and_type() & ~kStartPositionMask));
3209}
3210
3211
3212Code* SharedFunctionInfo::code() {
3213  return Code::cast(READ_FIELD(this, kCodeOffset));
3214}
3215
3216
3217Code* SharedFunctionInfo::unchecked_code() {
3218  return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
3219}
3220
3221
3222void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
3223  WRITE_FIELD(this, kCodeOffset, value);
3224  ASSERT(!Isolate::Current()->heap()->InNewSpace(value));
3225}
3226
3227
3228SerializedScopeInfo* SharedFunctionInfo::scope_info() {
3229  return reinterpret_cast<SerializedScopeInfo*>(
3230      READ_FIELD(this, kScopeInfoOffset));
3231}
3232
3233
3234void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value,
3235                                        WriteBarrierMode mode) {
3236  WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
3237  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kScopeInfoOffset, mode);
3238}
3239
3240
3241Smi* SharedFunctionInfo::deopt_counter() {
3242  return reinterpret_cast<Smi*>(READ_FIELD(this, kDeoptCounterOffset));
3243}
3244
3245
3246void SharedFunctionInfo::set_deopt_counter(Smi* value) {
3247  WRITE_FIELD(this, kDeoptCounterOffset, value);
3248}
3249
3250
3251bool SharedFunctionInfo::is_compiled() {
3252  return code() !=
3253      Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);
3254}
3255
3256
3257bool SharedFunctionInfo::IsApiFunction() {
3258  return function_data()->IsFunctionTemplateInfo();
3259}
3260
3261
3262FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
3263  ASSERT(IsApiFunction());
3264  return FunctionTemplateInfo::cast(function_data());
3265}
3266
3267
3268bool SharedFunctionInfo::HasBuiltinFunctionId() {
3269  return function_data()->IsSmi();
3270}
3271
3272
3273BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
3274  ASSERT(HasBuiltinFunctionId());
3275  return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
3276}
3277
3278
3279int SharedFunctionInfo::code_age() {
3280  return (compiler_hints() >> kCodeAgeShift) & kCodeAgeMask;
3281}
3282
3283
3284void SharedFunctionInfo::set_code_age(int code_age) {
3285  set_compiler_hints(compiler_hints() |
3286                     ((code_age & kCodeAgeMask) << kCodeAgeShift));
3287}
3288
3289
3290bool SharedFunctionInfo::has_deoptimization_support() {
3291  Code* code = this->code();
3292  return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
3293}
3294
3295
3296bool JSFunction::IsBuiltin() {
3297  return context()->global()->IsJSBuiltinsObject();
3298}
3299
3300
3301bool JSFunction::NeedsArgumentsAdaption() {
3302  return shared()->formal_parameter_count() !=
3303      SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3304}
3305
3306
3307bool JSFunction::IsOptimized() {
3308  return code()->kind() == Code::OPTIMIZED_FUNCTION;
3309}
3310
3311
3312bool JSFunction::IsOptimizable() {
3313  return code()->kind() == Code::FUNCTION && code()->optimizable();
3314}
3315
3316
3317bool JSFunction::IsMarkedForLazyRecompilation() {
3318  return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
3319}
3320
3321
3322Code* JSFunction::code() {
3323  return Code::cast(unchecked_code());
3324}
3325
3326
3327Code* JSFunction::unchecked_code() {
3328  return reinterpret_cast<Code*>(
3329      Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
3330}
3331
3332
3333void JSFunction::set_code(Code* value) {
3334  // Skip the write barrier because code is never in new space.
3335  ASSERT(!HEAP->InNewSpace(value));
3336  Address entry = value->entry();
3337  WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
3338}
3339
3340
3341void JSFunction::ReplaceCode(Code* code) {
3342  bool was_optimized = IsOptimized();
3343  bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
3344
3345  set_code(code);
3346
3347  // Add/remove the function from the list of optimized functions for this
3348  // context based on the state change.
3349  if (!was_optimized && is_optimized) {
3350    context()->global_context()->AddOptimizedFunction(this);
3351  }
3352  if (was_optimized && !is_optimized) {
3353    context()->global_context()->RemoveOptimizedFunction(this);
3354  }
3355}
3356
3357
3358Context* JSFunction::context() {
3359  return Context::cast(READ_FIELD(this, kContextOffset));
3360}
3361
3362
3363Object* JSFunction::unchecked_context() {
3364  return READ_FIELD(this, kContextOffset);
3365}
3366
3367
3368SharedFunctionInfo* JSFunction::unchecked_shared() {
3369  return reinterpret_cast<SharedFunctionInfo*>(
3370      READ_FIELD(this, kSharedFunctionInfoOffset));
3371}
3372
3373
3374void JSFunction::set_context(Object* value) {
3375  ASSERT(value->IsUndefined() || value->IsContext());
3376  WRITE_FIELD(this, kContextOffset, value);
3377  WRITE_BARRIER(this, kContextOffset);
3378}
3379
3380ACCESSORS(JSFunction, prototype_or_initial_map, Object,
3381          kPrototypeOrInitialMapOffset)
3382
3383
3384Map* JSFunction::initial_map() {
3385  return Map::cast(prototype_or_initial_map());
3386}
3387
3388
3389void JSFunction::set_initial_map(Map* value) {
3390  set_prototype_or_initial_map(value);
3391}
3392
3393
3394bool JSFunction::has_initial_map() {
3395  return prototype_or_initial_map()->IsMap();
3396}
3397
3398
3399bool JSFunction::has_instance_prototype() {
3400  return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
3401}
3402
3403
3404bool JSFunction::has_prototype() {
3405  return map()->has_non_instance_prototype() || has_instance_prototype();
3406}
3407
3408
3409Object* JSFunction::instance_prototype() {
3410  ASSERT(has_instance_prototype());
3411  if (has_initial_map()) return initial_map()->prototype();
3412  // When there is no initial map and the prototype is a JSObject, the
3413  // initial map field is used for the prototype field.
3414  return prototype_or_initial_map();
3415}
3416
3417
3418Object* JSFunction::prototype() {
3419  ASSERT(has_prototype());
3420  // If the function's prototype property has been set to a non-JSObject
3421  // value, that value is stored in the constructor field of the map.
3422  if (map()->has_non_instance_prototype()) return map()->constructor();
3423  return instance_prototype();
3424}
3425
3426bool JSFunction::should_have_prototype() {
3427  return map()->function_with_prototype();
3428}
3429
3430
3431bool JSFunction::is_compiled() {
3432  return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
3433}
3434
3435
3436int JSFunction::NumberOfLiterals() {
3437  return literals()->length();
3438}
3439
3440
3441Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
3442  ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
3443  return READ_FIELD(this, OffsetOfFunctionWithId(id));
3444}
3445
3446
3447void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
3448                                              Object* value) {
3449  ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
3450  WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
3451  WRITE_BARRIER(this, OffsetOfFunctionWithId(id));
3452}
3453
3454
3455Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
3456  ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
3457  return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
3458}
3459
3460
3461void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
3462                                                   Code* value) {
3463  ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
3464  WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
3465  ASSERT(!HEAP->InNewSpace(value));
3466}
3467
3468
3469Address Proxy::proxy() {
3470  return AddressFrom<Address>(READ_INTPTR_FIELD(this, kProxyOffset));
3471}
3472
3473
3474void Proxy::set_proxy(Address value) {
3475  WRITE_INTPTR_FIELD(this, kProxyOffset, OffsetFrom(value));
3476}
3477
3478
3479ACCESSORS(JSValue, value, Object, kValueOffset)
3480
3481
3482JSValue* JSValue::cast(Object* obj) {
3483  ASSERT(obj->IsJSValue());
3484  ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
3485  return reinterpret_cast<JSValue*>(obj);
3486}
3487
3488
3489ACCESSORS(JSMessageObject, type, String, kTypeOffset)
3490ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
3491ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
3492ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
3493ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
3494SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
3495SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
3496
3497
3498JSMessageObject* JSMessageObject::cast(Object* obj) {
3499  ASSERT(obj->IsJSMessageObject());
3500  ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
3501  return reinterpret_cast<JSMessageObject*>(obj);
3502}
3503
3504
3505INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
3506ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
3507ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
3508
3509
3510byte* Code::instruction_start()  {
3511  return FIELD_ADDR(this, kHeaderSize);
3512}
3513
3514
3515byte* Code::instruction_end()  {
3516  return instruction_start() + instruction_size();
3517}
3518
3519
3520int Code::body_size() {
3521  return RoundUp(instruction_size(), kObjectAlignment);
3522}
3523
3524
3525FixedArray* Code::unchecked_deoptimization_data() {
3526  return reinterpret_cast<FixedArray*>(
3527      READ_FIELD(this, kDeoptimizationDataOffset));
3528}
3529
3530
3531ByteArray* Code::unchecked_relocation_info() {
3532  return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
3533}
3534
3535
3536byte* Code::relocation_start() {
3537  return unchecked_relocation_info()->GetDataStartAddress();
3538}
3539
3540
3541int Code::relocation_size() {
3542  return unchecked_relocation_info()->length();
3543}
3544
3545
3546byte* Code::entry() {
3547  return instruction_start();
3548}
3549
3550
3551bool Code::contains(byte* pc) {
3552  return (instruction_start() <= pc) &&
3553      (pc <= instruction_start() + instruction_size());
3554}
3555
3556
3557ACCESSORS(JSArray, length, Object, kLengthOffset)
3558
3559
3560ACCESSORS(JSRegExp, data, Object, kDataOffset)
3561
3562
3563JSRegExp::Type JSRegExp::TypeTag() {
3564  Object* data = this->data();
3565  if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
3566  Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
3567  return static_cast<JSRegExp::Type>(smi->value());
3568}
3569
3570
3571int JSRegExp::CaptureCount() {
3572  switch (TypeTag()) {
3573    case ATOM:
3574      return 0;
3575    case IRREGEXP:
3576      return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
3577    default:
3578      UNREACHABLE();
3579      return -1;
3580  }
3581}
3582
3583
3584JSRegExp::Flags JSRegExp::GetFlags() {
3585  ASSERT(this->data()->IsFixedArray());
3586  Object* data = this->data();
3587  Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
3588  return Flags(smi->value());
3589}
3590
3591
3592String* JSRegExp::Pattern() {
3593  ASSERT(this->data()->IsFixedArray());
3594  Object* data = this->data();
3595  String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
3596  return pattern;
3597}
3598
3599
3600Object* JSRegExp::DataAt(int index) {
3601  ASSERT(TypeTag() != NOT_COMPILED);
3602  return FixedArray::cast(data())->get(index);
3603}
3604
3605
3606void JSRegExp::SetDataAt(int index, Object* value) {
3607  ASSERT(TypeTag() != NOT_COMPILED);
3608  ASSERT(index >= kDataIndex);  // Only implementation data can be set this way.
3609  FixedArray::cast(data())->set(index, value);
3610}
3611
3612
3613JSObject::ElementsKind JSObject::GetElementsKind() {
3614  if (map()->has_fast_elements()) {
3615    ASSERT(elements()->map() == GetHeap()->fixed_array_map() ||
3616           elements()->map() == GetHeap()->fixed_cow_array_map());
3617    return FAST_ELEMENTS;
3618  }
3619  HeapObject* array = elements();
3620  if (array->IsFixedArray()) {
3621    // FAST_ELEMENTS or DICTIONARY_ELEMENTS are both stored in a
3622    // FixedArray, but FAST_ELEMENTS is already handled above.
3623    ASSERT(array->IsDictionary());
3624    return DICTIONARY_ELEMENTS;
3625  }
3626  ASSERT(!map()->has_fast_elements());
3627  if (array->IsExternalArray()) {
3628    switch (array->map()->instance_type()) {
3629      case EXTERNAL_BYTE_ARRAY_TYPE:
3630        return EXTERNAL_BYTE_ELEMENTS;
3631      case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
3632        return EXTERNAL_UNSIGNED_BYTE_ELEMENTS;
3633      case EXTERNAL_SHORT_ARRAY_TYPE:
3634        return EXTERNAL_SHORT_ELEMENTS;
3635      case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE:
3636        return EXTERNAL_UNSIGNED_SHORT_ELEMENTS;
3637      case EXTERNAL_INT_ARRAY_TYPE:
3638        return EXTERNAL_INT_ELEMENTS;
3639      case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
3640        return EXTERNAL_UNSIGNED_INT_ELEMENTS;
3641      case EXTERNAL_PIXEL_ARRAY_TYPE:
3642        return EXTERNAL_PIXEL_ELEMENTS;
3643      default:
3644        break;
3645    }
3646  }
3647  ASSERT(array->map()->instance_type() == EXTERNAL_FLOAT_ARRAY_TYPE);
3648  return EXTERNAL_FLOAT_ELEMENTS;
3649}
3650
3651
3652bool JSObject::HasFastElements() {
3653  return GetElementsKind() == FAST_ELEMENTS;
3654}
3655
3656
3657bool JSObject::HasDictionaryElements() {
3658  return GetElementsKind() == DICTIONARY_ELEMENTS;
3659}
3660
3661
3662bool JSObject::HasExternalArrayElements() {
3663  HeapObject* array = elements();
3664  ASSERT(array != NULL);
3665  return array->IsExternalArray();
3666}
3667
3668
3669#define EXTERNAL_ELEMENTS_CHECK(name, type)          \
3670bool JSObject::HasExternal##name##Elements() {       \
3671  HeapObject* array = elements();                    \
3672  ASSERT(array != NULL);                             \
3673  if (!array->IsHeapObject())                        \
3674    return false;                                    \
3675  return array->map()->instance_type() == type;      \
3676}
3677
3678
3679EXTERNAL_ELEMENTS_CHECK(Byte, EXTERNAL_BYTE_ARRAY_TYPE)
3680EXTERNAL_ELEMENTS_CHECK(UnsignedByte, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
3681EXTERNAL_ELEMENTS_CHECK(Short, EXTERNAL_SHORT_ARRAY_TYPE)
3682EXTERNAL_ELEMENTS_CHECK(UnsignedShort,
3683                        EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
3684EXTERNAL_ELEMENTS_CHECK(Int, EXTERNAL_INT_ARRAY_TYPE)
3685EXTERNAL_ELEMENTS_CHECK(UnsignedInt,
3686                        EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
3687EXTERNAL_ELEMENTS_CHECK(Float,
3688                        EXTERNAL_FLOAT_ARRAY_TYPE)
3689EXTERNAL_ELEMENTS_CHECK(Pixel, EXTERNAL_PIXEL_ARRAY_TYPE)
3690
3691
3692bool JSObject::HasNamedInterceptor() {
3693  return map()->has_named_interceptor();
3694}
3695
3696
3697bool JSObject::HasIndexedInterceptor() {
3698  return map()->has_indexed_interceptor();
3699}
3700
3701
3702bool JSObject::AllowsSetElementsLength() {
3703  bool result = elements()->IsFixedArray();
3704  ASSERT(result == !HasExternalArrayElements());
3705  return result;
3706}
3707
3708
3709MaybeObject* JSObject::EnsureWritableFastElements() {
3710  ASSERT(HasFastElements());
3711  FixedArray* elems = FixedArray::cast(elements());
3712  Isolate* isolate = GetIsolate();
3713  if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
3714  Object* writable_elems;
3715  { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
3716      elems, isolate->heap()->fixed_array_map());
3717    if (!maybe_writable_elems->ToObject(&writable_elems)) {
3718      return maybe_writable_elems;
3719    }
3720  }
3721  set_elements(FixedArray::cast(writable_elems));
3722  isolate->counters()->cow_arrays_converted()->Increment();
3723  return writable_elems;
3724}
3725
3726
3727StringDictionary* JSObject::property_dictionary() {
3728  ASSERT(!HasFastProperties());
3729  return StringDictionary::cast(properties());
3730}
3731
3732
3733NumberDictionary* JSObject::element_dictionary() {
3734  ASSERT(HasDictionaryElements());
3735  return NumberDictionary::cast(elements());
3736}
3737
3738
3739bool String::IsHashFieldComputed(uint32_t field) {
3740  return (field & kHashNotComputedMask) == 0;
3741}
3742
3743
3744bool String::HasHashCode() {
3745  return IsHashFieldComputed(hash_field());
3746}
3747
3748
3749uint32_t String::Hash() {
3750  // Fast case: has hash code already been computed?
3751  uint32_t field = hash_field();
3752  if (IsHashFieldComputed(field)) return field >> kHashShift;
3753  // Slow case: compute hash code and set it.
3754  return ComputeAndSetHash();
3755}
3756
3757
3758StringHasher::StringHasher(int length)
3759  : length_(length),
3760    raw_running_hash_(0),
3761    array_index_(0),
3762    is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
3763    is_first_char_(true),
3764    is_valid_(true) { }
3765
3766
3767bool StringHasher::has_trivial_hash() {
3768  return length_ > String::kMaxHashCalcLength;
3769}
3770
3771
3772void StringHasher::AddCharacter(uc32 c) {
3773  // Use the Jenkins one-at-a-time hash function to update the hash
3774  // for the given character.
3775  raw_running_hash_ += c;
3776  raw_running_hash_ += (raw_running_hash_ << 10);
3777  raw_running_hash_ ^= (raw_running_hash_ >> 6);
3778  // Incremental array index computation.
3779  if (is_array_index_) {
3780    if (c < '0' || c > '9') {
3781      is_array_index_ = false;
3782    } else {
3783      int d = c - '0';
3784      if (is_first_char_) {
3785        is_first_char_ = false;
3786        if (c == '0' && length_ > 1) {
3787          is_array_index_ = false;
3788          return;
3789        }
3790      }
3791      if (array_index_ > 429496729U - ((d + 2) >> 3)) {
3792        is_array_index_ = false;
3793      } else {
3794        array_index_ = array_index_ * 10 + d;
3795      }
3796    }
3797  }
3798}
3799
3800
3801void StringHasher::AddCharacterNoIndex(uc32 c) {
3802  ASSERT(!is_array_index());
3803  raw_running_hash_ += c;
3804  raw_running_hash_ += (raw_running_hash_ << 10);
3805  raw_running_hash_ ^= (raw_running_hash_ >> 6);
3806}
3807
3808
3809uint32_t StringHasher::GetHash() {
3810  // Get the calculated raw hash value and do some more bit ops to distribute
3811  // the hash further. Ensure that we never return zero as the hash value.
3812  uint32_t result = raw_running_hash_;
3813  result += (result << 3);
3814  result ^= (result >> 11);
3815  result += (result << 15);
3816  if (result == 0) {
3817    result = 27;
3818  }
3819  return result;
3820}
3821
3822
3823template <typename schar>
3824uint32_t HashSequentialString(const schar* chars, int length) {
3825  StringHasher hasher(length);
3826  if (!hasher.has_trivial_hash()) {
3827    int i;
3828    for (i = 0; hasher.is_array_index() && (i < length); i++) {
3829      hasher.AddCharacter(chars[i]);
3830    }
3831    for (; i < length; i++) {
3832      hasher.AddCharacterNoIndex(chars[i]);
3833    }
3834  }
3835  return hasher.GetHashField();
3836}
3837
3838
3839bool String::AsArrayIndex(uint32_t* index) {
3840  uint32_t field = hash_field();
3841  if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
3842    return false;
3843  }
3844  return SlowAsArrayIndex(index);
3845}
3846
3847
3848Object* JSObject::GetPrototype() {
3849  return JSObject::cast(this)->map()->prototype();
3850}
3851
3852
3853PropertyAttributes JSObject::GetPropertyAttribute(String* key) {
3854  return GetPropertyAttributeWithReceiver(this, key);
3855}
3856
3857// TODO(504): this may be useful in other places too where JSGlobalProxy
3858// is used.
3859Object* JSObject::BypassGlobalProxy() {
3860  if (IsJSGlobalProxy()) {
3861    Object* proto = GetPrototype();
3862    if (proto->IsNull()) return GetHeap()->undefined_value();
3863    ASSERT(proto->IsJSGlobalObject());
3864    return proto;
3865  }
3866  return this;
3867}
3868
3869
3870bool JSObject::HasHiddenPropertiesObject() {
3871  ASSERT(!IsJSGlobalProxy());
3872  return GetPropertyAttributePostInterceptor(this,
3873                                             GetHeap()->hidden_symbol(),
3874                                             false) != ABSENT;
3875}
3876
3877
3878Object* JSObject::GetHiddenPropertiesObject() {
3879  ASSERT(!IsJSGlobalProxy());
3880  PropertyAttributes attributes;
3881  // You can't install a getter on a property indexed by the hidden symbol,
3882  // so we can be sure that GetLocalPropertyPostInterceptor returns a real
3883  // object.
3884  Object* result =
3885      GetLocalPropertyPostInterceptor(this,
3886                                      GetHeap()->hidden_symbol(),
3887                                      &attributes)->ToObjectUnchecked();
3888  return result;
3889}
3890
3891
3892MaybeObject* JSObject::SetHiddenPropertiesObject(Object* hidden_obj) {
3893  ASSERT(!IsJSGlobalProxy());
3894  return SetPropertyPostInterceptor(GetHeap()->hidden_symbol(),
3895                                    hidden_obj,
3896                                    DONT_ENUM,
3897                                    kNonStrictMode);
3898}
3899
3900
3901bool JSObject::HasElement(uint32_t index) {
3902  return HasElementWithReceiver(this, index);
3903}
3904
3905
3906bool AccessorInfo::all_can_read() {
3907  return BooleanBit::get(flag(), kAllCanReadBit);
3908}
3909
3910
3911void AccessorInfo::set_all_can_read(bool value) {
3912  set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
3913}
3914
3915
3916bool AccessorInfo::all_can_write() {
3917  return BooleanBit::get(flag(), kAllCanWriteBit);
3918}
3919
3920
3921void AccessorInfo::set_all_can_write(bool value) {
3922  set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
3923}
3924
3925
3926bool AccessorInfo::prohibits_overwriting() {
3927  return BooleanBit::get(flag(), kProhibitsOverwritingBit);
3928}
3929
3930
3931void AccessorInfo::set_prohibits_overwriting(bool value) {
3932  set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
3933}
3934
3935
3936PropertyAttributes AccessorInfo::property_attributes() {
3937  return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
3938}
3939
3940
3941void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
3942  ASSERT(AttributesField::is_valid(attributes));
3943  int rest_value = flag()->value() & ~AttributesField::mask();
3944  set_flag(Smi::FromInt(rest_value | AttributesField::encode(attributes)));
3945}
3946
3947
3948template<typename Shape, typename Key>
3949void Dictionary<Shape, Key>::SetEntry(int entry,
3950                                      Object* key,
3951                                      Object* value) {
3952  SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
3953}
3954
3955
3956template<typename Shape, typename Key>
3957void Dictionary<Shape, Key>::SetEntry(int entry,
3958                                      Object* key,
3959                                      Object* value,
3960                                      PropertyDetails details) {
3961  ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0);
3962  int index = HashTable<Shape, Key>::EntryToIndex(entry);
3963  AssertNoAllocation no_gc;
3964  WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
3965  FixedArray::set(index, key, mode);
3966  FixedArray::set(index+1, value, mode);
3967  FixedArray::fast_set(this, index+2, details.AsSmi());
3968}
3969
3970
3971bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
3972  ASSERT(other->IsNumber());
3973  return key == static_cast<uint32_t>(other->Number());
3974}
3975
3976
3977uint32_t NumberDictionaryShape::Hash(uint32_t key) {
3978  return ComputeIntegerHash(key);
3979}
3980
3981
3982uint32_t NumberDictionaryShape::HashForObject(uint32_t key, Object* other) {
3983  ASSERT(other->IsNumber());
3984  return ComputeIntegerHash(static_cast<uint32_t>(other->Number()));
3985}
3986
3987
3988MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) {
3989  return Isolate::Current()->heap()->NumberFromUint32(key);
3990}
3991
3992
3993bool StringDictionaryShape::IsMatch(String* key, Object* other) {
3994  // We know that all entries in a hash table had their hash keys created.
3995  // Use that knowledge to have fast failure.
3996  if (key->Hash() != String::cast(other)->Hash()) return false;
3997  return key->Equals(String::cast(other));
3998}
3999
4000
4001uint32_t StringDictionaryShape::Hash(String* key) {
4002  return key->Hash();
4003}
4004
4005
4006uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) {
4007  return String::cast(other)->Hash();
4008}
4009
4010
4011MaybeObject* StringDictionaryShape::AsObject(String* key) {
4012  return key;
4013}
4014
4015
4016void Map::ClearCodeCache(Heap* heap) {
4017  // No write barrier is needed since empty_fixed_array is not in new space.
4018  // Please note this function is used during marking:
4019  //  - MarkCompactCollector::MarkUnmarkedObject
4020  ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
4021  WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
4022}
4023
4024
4025void JSArray::EnsureSize(int required_size) {
4026  ASSERT(HasFastElements());
4027  FixedArray* elts = FixedArray::cast(elements());
4028  const int kArraySizeThatFitsComfortablyInNewSpace = 128;
4029  if (elts->length() < required_size) {
4030    // Doubling in size would be overkill, but leave some slack to avoid
4031    // constantly growing.
4032    Expand(required_size + (required_size >> 3));
4033    // It's a performance benefit to keep a frequently used array in new-space.
4034  } else if (!GetHeap()->new_space()->Contains(elts) &&
4035             required_size < kArraySizeThatFitsComfortablyInNewSpace) {
4036    // Expand will allocate a new backing store in new space even if the size
4037    // we asked for isn't larger than what we had before.
4038    Expand(required_size);
4039  }
4040}
4041
4042
4043void JSArray::set_length(Smi* length) {
4044  set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
4045}
4046
4047
4048void JSArray::SetContent(FixedArray* storage) {
4049  set_length(Smi::FromInt(storage->length()));
4050  set_elements(storage);
4051}
4052
4053
4054MaybeObject* FixedArray::Copy() {
4055  if (length() == 0) return this;
4056  return GetHeap()->CopyFixedArray(this);
4057}
4058
4059
4060Relocatable::Relocatable(Isolate* isolate) {
4061  ASSERT(isolate == Isolate::Current());
4062  isolate_ = isolate;
4063  prev_ = isolate->relocatable_top();
4064  isolate->set_relocatable_top(this);
4065}
4066
4067
4068Relocatable::~Relocatable() {
4069  ASSERT(isolate_ == Isolate::Current());
4070  ASSERT_EQ(isolate_->relocatable_top(), this);
4071  isolate_->set_relocatable_top(prev_);
4072}
4073
4074
4075int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
4076  return map->instance_size();
4077}
4078
4079
4080void Proxy::ProxyIterateBody(ObjectVisitor* v) {
4081  v->VisitExternalReference(
4082      reinterpret_cast<Address *>(FIELD_ADDR(this, kProxyOffset)));
4083}
4084
4085
4086template<typename StaticVisitor>
4087void Proxy::ProxyIterateBody() {
4088  StaticVisitor::VisitExternalReference(
4089      reinterpret_cast<Address *>(FIELD_ADDR(this, kProxyOffset)));
4090}
4091
4092
4093void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
4094  typedef v8::String::ExternalAsciiStringResource Resource;
4095  v->VisitExternalAsciiString(
4096      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4097}
4098
4099
4100template<typename StaticVisitor>
4101void ExternalAsciiString::ExternalAsciiStringIterateBody() {
4102  typedef v8::String::ExternalAsciiStringResource Resource;
4103  StaticVisitor::VisitExternalAsciiString(
4104      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4105}
4106
4107
4108void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
4109  typedef v8::String::ExternalStringResource Resource;
4110  v->VisitExternalTwoByteString(
4111      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4112}
4113
4114
4115template<typename StaticVisitor>
4116void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
4117  typedef v8::String::ExternalStringResource Resource;
4118  StaticVisitor::VisitExternalTwoByteString(
4119      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4120}
4121
4122#define SLOT_ADDR(obj, offset) \
4123  reinterpret_cast<Object**>((obj)->address() + offset)
4124
4125template<int start_offset, int end_offset, int size>
4126void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
4127    HeapObject* obj,
4128    ObjectVisitor* v) {
4129    v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, end_offset));
4130}
4131
4132
4133template<int start_offset>
4134void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
4135                                                       int object_size,
4136                                                       ObjectVisitor* v) {
4137  v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, object_size));
4138}
4139
4140#undef SLOT_ADDR
4141
4142
4143#undef CAST_ACCESSOR
4144#undef INT_ACCESSORS
4145#undef SMI_ACCESSORS
4146#undef ACCESSORS
4147#undef FIELD_ADDR
4148#undef READ_FIELD
4149#undef WRITE_FIELD
4150#undef WRITE_BARRIER
4151#undef CONDITIONAL_WRITE_BARRIER
4152#undef READ_MEMADDR_FIELD
4153#undef WRITE_MEMADDR_FIELD
4154#undef READ_DOUBLE_FIELD
4155#undef WRITE_DOUBLE_FIELD
4156#undef READ_INT_FIELD
4157#undef WRITE_INT_FIELD
4158#undef READ_SHORT_FIELD
4159#undef WRITE_SHORT_FIELD
4160#undef READ_BYTE_FIELD
4161#undef WRITE_BYTE_FIELD
4162
4163
4164} }  // namespace v8::internal
4165
4166#endif  // V8_OBJECTS_INL_H_
4167