objects-inl.h revision 3fb3ca8c7ca439d408449a395897395c0faae8d1
1// Copyright 2011 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27//
28// Review notes:
29//
30// - The use of macros in these inline functions may seem superfluous
31// but it is absolutely needed to make sure gcc generates optimal
32// code. gcc is not happy when attempting to inline too deep.
33//
34
35#ifndef V8_OBJECTS_INL_H_
36#define V8_OBJECTS_INL_H_
37
38#include "objects.h"
39#include "contexts.h"
40#include "conversions-inl.h"
41#include "heap.h"
42#include "isolate.h"
43#include "property.h"
44#include "spaces.h"
45#include "v8memory.h"
46
47namespace v8 {
48namespace internal {
49
50PropertyDetails::PropertyDetails(Smi* smi) {
51  value_ = smi->value();
52}
53
54
55Smi* PropertyDetails::AsSmi() {
56  return Smi::FromInt(value_);
57}
58
59
60PropertyDetails PropertyDetails::AsDeleted() {
61  Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
62  return PropertyDetails(smi);
63}
64
65
66#define CAST_ACCESSOR(type)                     \
67  type* type::cast(Object* object) {            \
68    ASSERT(object->Is##type());                 \
69    return reinterpret_cast<type*>(object);     \
70  }
71
72
73#define INT_ACCESSORS(holder, name, offset)                             \
74  int holder::name() { return READ_INT_FIELD(this, offset); }           \
75  void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
76
77
78#define ACCESSORS(holder, name, type, offset)                           \
79  type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
80  void holder::set_##name(type* value, WriteBarrierMode mode) {         \
81    WRITE_FIELD(this, offset, value);                                   \
82    CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode);           \
83  }
84
85
86// GC-safe accessors do not use HeapObject::GetHeap(), but access TLS instead.
87#define ACCESSORS_GCSAFE(holder, name, type, offset)                    \
88  type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
89  void holder::set_##name(type* value, WriteBarrierMode mode) {         \
90    WRITE_FIELD(this, offset, value);                                   \
91    CONDITIONAL_WRITE_BARRIER(HEAP, this, offset, mode);                \
92  }
93
94
95#define SMI_ACCESSORS(holder, name, offset)             \
96  int holder::name() {                                  \
97    Object* value = READ_FIELD(this, offset);           \
98    return Smi::cast(value)->value();                   \
99  }                                                     \
100  void holder::set_##name(int value) {                  \
101    WRITE_FIELD(this, offset, Smi::FromInt(value));     \
102  }
103
104
105#define BOOL_GETTER(holder, field, name, offset)           \
106  bool holder::name() {                                    \
107    return BooleanBit::get(field(), offset);               \
108  }                                                        \
109
110
111#define BOOL_ACCESSORS(holder, field, name, offset)        \
112  bool holder::name() {                                    \
113    return BooleanBit::get(field(), offset);               \
114  }                                                        \
115  void holder::set_##name(bool value) {                    \
116    set_##field(BooleanBit::set(field(), offset, value));  \
117  }
118
119
120bool Object::IsInstanceOf(FunctionTemplateInfo* expected) {
121  // There is a constraint on the object; check.
122  if (!this->IsJSObject()) return false;
123  // Fetch the constructor function of the object.
124  Object* cons_obj = JSObject::cast(this)->map()->constructor();
125  if (!cons_obj->IsJSFunction()) return false;
126  JSFunction* fun = JSFunction::cast(cons_obj);
127  // Iterate through the chain of inheriting function templates to
128  // see if the required one occurs.
129  for (Object* type = fun->shared()->function_data();
130       type->IsFunctionTemplateInfo();
131       type = FunctionTemplateInfo::cast(type)->parent_template()) {
132    if (type == expected) return true;
133  }
134  // Didn't find the required type in the inheritance chain.
135  return false;
136}
137
138
139bool Object::IsSmi() {
140  return HAS_SMI_TAG(this);
141}
142
143
144bool Object::IsHeapObject() {
145  return Internals::HasHeapObjectTag(this);
146}
147
148
149bool Object::IsHeapNumber() {
150  return Object::IsHeapObject()
151    && HeapObject::cast(this)->map()->instance_type() == HEAP_NUMBER_TYPE;
152}
153
154
155bool Object::IsString() {
156  return Object::IsHeapObject()
157    && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
158}
159
160
161bool Object::IsSymbol() {
162  if (!this->IsHeapObject()) return false;
163  uint32_t type = HeapObject::cast(this)->map()->instance_type();
164  // Because the symbol tag is non-zero and no non-string types have the
165  // symbol bit set we can test for symbols with a very simple test
166  // operation.
167  ASSERT(kSymbolTag != 0);
168  ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
169  return (type & kIsSymbolMask) != 0;
170}
171
172
173bool Object::IsConsString() {
174  if (!this->IsHeapObject()) return false;
175  uint32_t type = HeapObject::cast(this)->map()->instance_type();
176  return (type & (kIsNotStringMask | kStringRepresentationMask)) ==
177         (kStringTag | kConsStringTag);
178}
179
180
181bool Object::IsSeqString() {
182  if (!IsString()) return false;
183  return StringShape(String::cast(this)).IsSequential();
184}
185
186
187bool Object::IsSeqAsciiString() {
188  if (!IsString()) return false;
189  return StringShape(String::cast(this)).IsSequential() &&
190         String::cast(this)->IsAsciiRepresentation();
191}
192
193
194bool Object::IsSeqTwoByteString() {
195  if (!IsString()) return false;
196  return StringShape(String::cast(this)).IsSequential() &&
197         String::cast(this)->IsTwoByteRepresentation();
198}
199
200
201bool Object::IsExternalString() {
202  if (!IsString()) return false;
203  return StringShape(String::cast(this)).IsExternal();
204}
205
206
207bool Object::IsExternalAsciiString() {
208  if (!IsString()) return false;
209  return StringShape(String::cast(this)).IsExternal() &&
210         String::cast(this)->IsAsciiRepresentation();
211}
212
213
214bool Object::IsExternalTwoByteString() {
215  if (!IsString()) return false;
216  return StringShape(String::cast(this)).IsExternal() &&
217         String::cast(this)->IsTwoByteRepresentation();
218}
219
220bool Object::HasValidElements() {
221  // Dictionary is covered under FixedArray.
222  return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray();
223}
224
225StringShape::StringShape(String* str)
226  : type_(str->map()->instance_type()) {
227  set_valid();
228  ASSERT((type_ & kIsNotStringMask) == kStringTag);
229}
230
231
232StringShape::StringShape(Map* map)
233  : type_(map->instance_type()) {
234  set_valid();
235  ASSERT((type_ & kIsNotStringMask) == kStringTag);
236}
237
238
239StringShape::StringShape(InstanceType t)
240  : type_(static_cast<uint32_t>(t)) {
241  set_valid();
242  ASSERT((type_ & kIsNotStringMask) == kStringTag);
243}
244
245
246bool StringShape::IsSymbol() {
247  ASSERT(valid());
248  ASSERT(kSymbolTag != 0);
249  return (type_ & kIsSymbolMask) != 0;
250}
251
252
253bool String::IsAsciiRepresentation() {
254  uint32_t type = map()->instance_type();
255  return (type & kStringEncodingMask) == kAsciiStringTag;
256}
257
258
259bool String::IsTwoByteRepresentation() {
260  uint32_t type = map()->instance_type();
261  return (type & kStringEncodingMask) == kTwoByteStringTag;
262}
263
264
265bool String::HasOnlyAsciiChars() {
266  uint32_t type = map()->instance_type();
267  return (type & kStringEncodingMask) == kAsciiStringTag ||
268         (type & kAsciiDataHintMask) == kAsciiDataHintTag;
269}
270
271
272bool StringShape::IsCons() {
273  return (type_ & kStringRepresentationMask) == kConsStringTag;
274}
275
276
277bool StringShape::IsExternal() {
278  return (type_ & kStringRepresentationMask) == kExternalStringTag;
279}
280
281
282bool StringShape::IsSequential() {
283  return (type_ & kStringRepresentationMask) == kSeqStringTag;
284}
285
286
287StringRepresentationTag StringShape::representation_tag() {
288  uint32_t tag = (type_ & kStringRepresentationMask);
289  return static_cast<StringRepresentationTag>(tag);
290}
291
292
293uint32_t StringShape::full_representation_tag() {
294  return (type_ & (kStringRepresentationMask | kStringEncodingMask));
295}
296
297
298STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
299             Internals::kFullStringRepresentationMask);
300
301
302bool StringShape::IsSequentialAscii() {
303  return full_representation_tag() == (kSeqStringTag | kAsciiStringTag);
304}
305
306
307bool StringShape::IsSequentialTwoByte() {
308  return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
309}
310
311
312bool StringShape::IsExternalAscii() {
313  return full_representation_tag() == (kExternalStringTag | kAsciiStringTag);
314}
315
316
317bool StringShape::IsExternalTwoByte() {
318  return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
319}
320
321
322STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
323             Internals::kExternalTwoByteRepresentationTag);
324
325
326uc32 FlatStringReader::Get(int index) {
327  ASSERT(0 <= index && index <= length_);
328  if (is_ascii_) {
329    return static_cast<const byte*>(start_)[index];
330  } else {
331    return static_cast<const uc16*>(start_)[index];
332  }
333}
334
335
336bool Object::IsNumber() {
337  return IsSmi() || IsHeapNumber();
338}
339
340
341bool Object::IsByteArray() {
342  return Object::IsHeapObject()
343    && HeapObject::cast(this)->map()->instance_type() == BYTE_ARRAY_TYPE;
344}
345
346
347bool Object::IsExternalPixelArray() {
348  return Object::IsHeapObject() &&
349      HeapObject::cast(this)->map()->instance_type() ==
350          EXTERNAL_PIXEL_ARRAY_TYPE;
351}
352
353
354bool Object::IsExternalArray() {
355  if (!Object::IsHeapObject())
356    return false;
357  InstanceType instance_type =
358      HeapObject::cast(this)->map()->instance_type();
359  return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
360          instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
361}
362
363
364bool Object::IsExternalByteArray() {
365  return Object::IsHeapObject() &&
366      HeapObject::cast(this)->map()->instance_type() ==
367      EXTERNAL_BYTE_ARRAY_TYPE;
368}
369
370
371bool Object::IsExternalUnsignedByteArray() {
372  return Object::IsHeapObject() &&
373      HeapObject::cast(this)->map()->instance_type() ==
374      EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE;
375}
376
377
378bool Object::IsExternalShortArray() {
379  return Object::IsHeapObject() &&
380      HeapObject::cast(this)->map()->instance_type() ==
381      EXTERNAL_SHORT_ARRAY_TYPE;
382}
383
384
385bool Object::IsExternalUnsignedShortArray() {
386  return Object::IsHeapObject() &&
387      HeapObject::cast(this)->map()->instance_type() ==
388      EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE;
389}
390
391
392bool Object::IsExternalIntArray() {
393  return Object::IsHeapObject() &&
394      HeapObject::cast(this)->map()->instance_type() ==
395      EXTERNAL_INT_ARRAY_TYPE;
396}
397
398
399bool Object::IsExternalUnsignedIntArray() {
400  return Object::IsHeapObject() &&
401      HeapObject::cast(this)->map()->instance_type() ==
402      EXTERNAL_UNSIGNED_INT_ARRAY_TYPE;
403}
404
405
406bool Object::IsExternalFloatArray() {
407  return Object::IsHeapObject() &&
408      HeapObject::cast(this)->map()->instance_type() ==
409      EXTERNAL_FLOAT_ARRAY_TYPE;
410}
411
412
413bool Object::IsExternalDoubleArray() {
414  return Object::IsHeapObject() &&
415      HeapObject::cast(this)->map()->instance_type() ==
416      EXTERNAL_DOUBLE_ARRAY_TYPE;
417}
418
419
420bool MaybeObject::IsFailure() {
421  return HAS_FAILURE_TAG(this);
422}
423
424
425bool MaybeObject::IsRetryAfterGC() {
426  return HAS_FAILURE_TAG(this)
427    && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC;
428}
429
430
431bool MaybeObject::IsOutOfMemory() {
432  return HAS_FAILURE_TAG(this)
433      && Failure::cast(this)->IsOutOfMemoryException();
434}
435
436
437bool MaybeObject::IsException() {
438  return this == Failure::Exception();
439}
440
441
442bool MaybeObject::IsTheHole() {
443  return !IsFailure() && ToObjectUnchecked()->IsTheHole();
444}
445
446
447Failure* Failure::cast(MaybeObject* obj) {
448  ASSERT(HAS_FAILURE_TAG(obj));
449  return reinterpret_cast<Failure*>(obj);
450}
451
452
453bool Object::IsJSReceiver() {
454  return IsHeapObject() &&
455      HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
456}
457
458
459bool Object::IsJSObject() {
460  return IsJSReceiver() && !IsJSProxy();
461}
462
463
464bool Object::IsJSProxy() {
465  return Object::IsHeapObject() &&
466     (HeapObject::cast(this)->map()->instance_type() == JS_PROXY_TYPE ||
467      HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_PROXY_TYPE);
468}
469
470
471bool Object::IsJSFunctionProxy() {
472  return Object::IsHeapObject() &&
473      HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_PROXY_TYPE;
474}
475
476
477bool Object::IsJSContextExtensionObject() {
478  return IsHeapObject()
479      && (HeapObject::cast(this)->map()->instance_type() ==
480          JS_CONTEXT_EXTENSION_OBJECT_TYPE);
481}
482
483
484bool Object::IsMap() {
485  return Object::IsHeapObject()
486      && HeapObject::cast(this)->map()->instance_type() == MAP_TYPE;
487}
488
489
490bool Object::IsFixedArray() {
491  return Object::IsHeapObject()
492      && HeapObject::cast(this)->map()->instance_type() == FIXED_ARRAY_TYPE;
493}
494
495
496bool Object::IsFixedDoubleArray() {
497  return Object::IsHeapObject()
498      && HeapObject::cast(this)->map()->instance_type() ==
499          FIXED_DOUBLE_ARRAY_TYPE;
500}
501
502
503bool Object::IsDescriptorArray() {
504  return IsFixedArray();
505}
506
507
508bool Object::IsDeoptimizationInputData() {
509  // Must be a fixed array.
510  if (!IsFixedArray()) return false;
511
512  // There's no sure way to detect the difference between a fixed array and
513  // a deoptimization data array.  Since this is used for asserts we can
514  // check that the length is zero or else the fixed size plus a multiple of
515  // the entry size.
516  int length = FixedArray::cast(this)->length();
517  if (length == 0) return true;
518
519  length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
520  return length >= 0 &&
521      length % DeoptimizationInputData::kDeoptEntrySize == 0;
522}
523
524
525bool Object::IsDeoptimizationOutputData() {
526  if (!IsFixedArray()) return false;
527  // There's actually no way to see the difference between a fixed array and
528  // a deoptimization data array.  Since this is used for asserts we can check
529  // that the length is plausible though.
530  if (FixedArray::cast(this)->length() % 2 != 0) return false;
531  return true;
532}
533
534
535bool Object::IsContext() {
536  if (Object::IsHeapObject()) {
537    Map* map = HeapObject::cast(this)->map();
538    Heap* heap = map->GetHeap();
539    return (map == heap->function_context_map() ||
540            map == heap->catch_context_map() ||
541            map == heap->with_context_map() ||
542            map == heap->global_context_map());
543  }
544  return false;
545}
546
547
548bool Object::IsGlobalContext() {
549  return Object::IsHeapObject() &&
550      HeapObject::cast(this)->map() ==
551      HeapObject::cast(this)->GetHeap()->global_context_map();
552}
553
554
555bool Object::IsJSFunction() {
556  return Object::IsHeapObject()
557      && HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_TYPE;
558}
559
560
561template <> inline bool Is<JSFunction>(Object* obj) {
562  return obj->IsJSFunction();
563}
564
565
566bool Object::IsCode() {
567  return Object::IsHeapObject()
568      && HeapObject::cast(this)->map()->instance_type() == CODE_TYPE;
569}
570
571
572bool Object::IsOddball() {
573  ASSERT(HEAP->is_safe_to_read_maps());
574  return Object::IsHeapObject()
575    && HeapObject::cast(this)->map()->instance_type() == ODDBALL_TYPE;
576}
577
578
579bool Object::IsJSGlobalPropertyCell() {
580  return Object::IsHeapObject()
581      && HeapObject::cast(this)->map()->instance_type()
582      == JS_GLOBAL_PROPERTY_CELL_TYPE;
583}
584
585
586bool Object::IsSharedFunctionInfo() {
587  return Object::IsHeapObject() &&
588      (HeapObject::cast(this)->map()->instance_type() ==
589       SHARED_FUNCTION_INFO_TYPE);
590}
591
592
593bool Object::IsJSValue() {
594  return Object::IsHeapObject()
595      && HeapObject::cast(this)->map()->instance_type() == JS_VALUE_TYPE;
596}
597
598
599bool Object::IsJSMessageObject() {
600  return Object::IsHeapObject()
601      && (HeapObject::cast(this)->map()->instance_type() ==
602          JS_MESSAGE_OBJECT_TYPE);
603}
604
605
606bool Object::IsStringWrapper() {
607  return IsJSValue() && JSValue::cast(this)->value()->IsString();
608}
609
610
611bool Object::IsForeign() {
612  return Object::IsHeapObject()
613      && HeapObject::cast(this)->map()->instance_type() == FOREIGN_TYPE;
614}
615
616
617bool Object::IsBoolean() {
618  return IsOddball() &&
619      ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
620}
621
622
623bool Object::IsJSArray() {
624  return Object::IsHeapObject()
625      && HeapObject::cast(this)->map()->instance_type() == JS_ARRAY_TYPE;
626}
627
628
629bool Object::IsJSRegExp() {
630  return Object::IsHeapObject()
631      && HeapObject::cast(this)->map()->instance_type() == JS_REGEXP_TYPE;
632}
633
634
635template <> inline bool Is<JSArray>(Object* obj) {
636  return obj->IsJSArray();
637}
638
639
640bool Object::IsHashTable() {
641  return Object::IsHeapObject() &&
642      HeapObject::cast(this)->map() ==
643      HeapObject::cast(this)->GetHeap()->hash_table_map();
644}
645
646
647bool Object::IsDictionary() {
648  return IsHashTable() &&
649      this != HeapObject::cast(this)->GetHeap()->symbol_table();
650}
651
652
653bool Object::IsSymbolTable() {
654  return IsHashTable() && this ==
655         HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
656}
657
658
659bool Object::IsJSFunctionResultCache() {
660  if (!IsFixedArray()) return false;
661  FixedArray* self = FixedArray::cast(this);
662  int length = self->length();
663  if (length < JSFunctionResultCache::kEntriesIndex) return false;
664  if ((length - JSFunctionResultCache::kEntriesIndex)
665      % JSFunctionResultCache::kEntrySize != 0) {
666    return false;
667  }
668#ifdef DEBUG
669  reinterpret_cast<JSFunctionResultCache*>(this)->JSFunctionResultCacheVerify();
670#endif
671  return true;
672}
673
674
675bool Object::IsNormalizedMapCache() {
676  if (!IsFixedArray()) return false;
677  if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
678    return false;
679  }
680#ifdef DEBUG
681  reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
682#endif
683  return true;
684}
685
686
687bool Object::IsCompilationCacheTable() {
688  return IsHashTable();
689}
690
691
692bool Object::IsCodeCacheHashTable() {
693  return IsHashTable();
694}
695
696
697bool Object::IsPolymorphicCodeCacheHashTable() {
698  return IsHashTable();
699}
700
701
702bool Object::IsMapCache() {
703  return IsHashTable();
704}
705
706
707bool Object::IsPrimitive() {
708  return IsOddball() || IsNumber() || IsString();
709}
710
711
712bool Object::IsJSGlobalProxy() {
713  bool result = IsHeapObject() &&
714                (HeapObject::cast(this)->map()->instance_type() ==
715                 JS_GLOBAL_PROXY_TYPE);
716  ASSERT(!result || IsAccessCheckNeeded());
717  return result;
718}
719
720
721bool Object::IsGlobalObject() {
722  if (!IsHeapObject()) return false;
723
724  InstanceType type = HeapObject::cast(this)->map()->instance_type();
725  return type == JS_GLOBAL_OBJECT_TYPE ||
726         type == JS_BUILTINS_OBJECT_TYPE;
727}
728
729
730bool Object::IsJSGlobalObject() {
731  return IsHeapObject() &&
732      (HeapObject::cast(this)->map()->instance_type() ==
733       JS_GLOBAL_OBJECT_TYPE);
734}
735
736
737bool Object::IsJSBuiltinsObject() {
738  return IsHeapObject() &&
739      (HeapObject::cast(this)->map()->instance_type() ==
740       JS_BUILTINS_OBJECT_TYPE);
741}
742
743
744bool Object::IsUndetectableObject() {
745  return IsHeapObject()
746    && HeapObject::cast(this)->map()->is_undetectable();
747}
748
749
750bool Object::IsAccessCheckNeeded() {
751  return IsHeapObject()
752    && HeapObject::cast(this)->map()->is_access_check_needed();
753}
754
755
756bool Object::IsStruct() {
757  if (!IsHeapObject()) return false;
758  switch (HeapObject::cast(this)->map()->instance_type()) {
759#define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
760  STRUCT_LIST(MAKE_STRUCT_CASE)
761#undef MAKE_STRUCT_CASE
762    default: return false;
763  }
764}
765
766
767#define MAKE_STRUCT_PREDICATE(NAME, Name, name)                  \
768  bool Object::Is##Name() {                                      \
769    return Object::IsHeapObject()                                \
770      && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
771  }
772  STRUCT_LIST(MAKE_STRUCT_PREDICATE)
773#undef MAKE_STRUCT_PREDICATE
774
775
776bool Object::IsUndefined() {
777  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
778}
779
780
781bool Object::IsNull() {
782  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
783}
784
785
786bool Object::IsTheHole() {
787  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
788}
789
790
791bool Object::IsTrue() {
792  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
793}
794
795
796bool Object::IsFalse() {
797  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
798}
799
800
801bool Object::IsArgumentsMarker() {
802  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
803}
804
805
806double Object::Number() {
807  ASSERT(IsNumber());
808  return IsSmi()
809    ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
810    : reinterpret_cast<HeapNumber*>(this)->value();
811}
812
813
814MaybeObject* Object::ToSmi() {
815  if (IsSmi()) return this;
816  if (IsHeapNumber()) {
817    double value = HeapNumber::cast(this)->value();
818    int int_value = FastD2I(value);
819    if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
820      return Smi::FromInt(int_value);
821    }
822  }
823  return Failure::Exception();
824}
825
826
827bool Object::HasSpecificClassOf(String* name) {
828  return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
829}
830
831
832MaybeObject* Object::GetElement(uint32_t index) {
833  // GetElement can trigger a getter which can cause allocation.
834  // This was not always the case. This ASSERT is here to catch
835  // leftover incorrect uses.
836  ASSERT(HEAP->IsAllocationAllowed());
837  return GetElementWithReceiver(this, index);
838}
839
840
841Object* Object::GetElementNoExceptionThrown(uint32_t index) {
842  MaybeObject* maybe = GetElementWithReceiver(this, index);
843  ASSERT(!maybe->IsFailure());
844  Object* result = NULL;  // Initialization to please compiler.
845  maybe->ToObject(&result);
846  return result;
847}
848
849
850MaybeObject* Object::GetProperty(String* key) {
851  PropertyAttributes attributes;
852  return GetPropertyWithReceiver(this, key, &attributes);
853}
854
855
856MaybeObject* Object::GetProperty(String* key, PropertyAttributes* attributes) {
857  return GetPropertyWithReceiver(this, key, attributes);
858}
859
860
861#define FIELD_ADDR(p, offset) \
862  (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
863
864#define READ_FIELD(p, offset) \
865  (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
866
867#define WRITE_FIELD(p, offset, value) \
868  (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
869
870// TODO(isolates): Pass heap in to these macros.
871#define WRITE_BARRIER(object, offset) \
872  object->GetHeap()->RecordWrite(object->address(), offset);
873
874// CONDITIONAL_WRITE_BARRIER must be issued after the actual
875// write due to the assert validating the written value.
876#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, mode) \
877  if (mode == UPDATE_WRITE_BARRIER) { \
878    heap->RecordWrite(object->address(), offset); \
879  } else { \
880    ASSERT(mode == SKIP_WRITE_BARRIER); \
881    ASSERT(heap->InNewSpace(object) || \
882           !heap->InNewSpace(READ_FIELD(object, offset)) || \
883           Page::FromAddress(object->address())->           \
884               IsRegionDirty(object->address() + offset));  \
885  }
886
887#ifndef V8_TARGET_ARCH_MIPS
888  #define READ_DOUBLE_FIELD(p, offset) \
889    (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
890#else  // V8_TARGET_ARCH_MIPS
891  // Prevent gcc from using load-double (mips ldc1) on (possibly)
892  // non-64-bit aligned HeapNumber::value.
893  static inline double read_double_field(void* p, int offset) {
894    union conversion {
895      double d;
896      uint32_t u[2];
897    } c;
898    c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
899    c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
900    return c.d;
901  }
902  #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
903#endif  // V8_TARGET_ARCH_MIPS
904
905
906#ifndef V8_TARGET_ARCH_MIPS
907  #define WRITE_DOUBLE_FIELD(p, offset, value) \
908    (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
909#else  // V8_TARGET_ARCH_MIPS
910  // Prevent gcc from using store-double (mips sdc1) on (possibly)
911  // non-64-bit aligned HeapNumber::value.
912  static inline void write_double_field(void* p, int offset,
913                                        double value) {
914    union conversion {
915      double d;
916      uint32_t u[2];
917    } c;
918    c.d = value;
919    (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
920    (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
921  }
922  #define WRITE_DOUBLE_FIELD(p, offset, value) \
923    write_double_field(p, offset, value)
924#endif  // V8_TARGET_ARCH_MIPS
925
926
927#define READ_INT_FIELD(p, offset) \
928  (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
929
930#define WRITE_INT_FIELD(p, offset, value) \
931  (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
932
933#define READ_INTPTR_FIELD(p, offset) \
934  (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
935
936#define WRITE_INTPTR_FIELD(p, offset, value) \
937  (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
938
939#define READ_UINT32_FIELD(p, offset) \
940  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
941
942#define WRITE_UINT32_FIELD(p, offset, value) \
943  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
944
945#define READ_SHORT_FIELD(p, offset) \
946  (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
947
948#define WRITE_SHORT_FIELD(p, offset, value) \
949  (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
950
951#define READ_BYTE_FIELD(p, offset) \
952  (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
953
954#define WRITE_BYTE_FIELD(p, offset, value) \
955  (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
956
957
958Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
959  return &READ_FIELD(obj, byte_offset);
960}
961
962
963int Smi::value() {
964  return Internals::SmiValue(this);
965}
966
967
968Smi* Smi::FromInt(int value) {
969  ASSERT(Smi::IsValid(value));
970  int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
971  intptr_t tagged_value =
972      (static_cast<intptr_t>(value) << smi_shift_bits) | kSmiTag;
973  return reinterpret_cast<Smi*>(tagged_value);
974}
975
976
977Smi* Smi::FromIntptr(intptr_t value) {
978  ASSERT(Smi::IsValid(value));
979  int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
980  return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
981}
982
983
984Failure::Type Failure::type() const {
985  return static_cast<Type>(value() & kFailureTypeTagMask);
986}
987
988
989bool Failure::IsInternalError() const {
990  return type() == INTERNAL_ERROR;
991}
992
993
994bool Failure::IsOutOfMemoryException() const {
995  return type() == OUT_OF_MEMORY_EXCEPTION;
996}
997
998
999AllocationSpace Failure::allocation_space() const {
1000  ASSERT_EQ(RETRY_AFTER_GC, type());
1001  return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
1002                                      & kSpaceTagMask);
1003}
1004
1005
1006Failure* Failure::InternalError() {
1007  return Construct(INTERNAL_ERROR);
1008}
1009
1010
1011Failure* Failure::Exception() {
1012  return Construct(EXCEPTION);
1013}
1014
1015
1016Failure* Failure::OutOfMemoryException() {
1017  return Construct(OUT_OF_MEMORY_EXCEPTION);
1018}
1019
1020
1021intptr_t Failure::value() const {
1022  return static_cast<intptr_t>(
1023      reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
1024}
1025
1026
1027Failure* Failure::RetryAfterGC() {
1028  return RetryAfterGC(NEW_SPACE);
1029}
1030
1031
1032Failure* Failure::RetryAfterGC(AllocationSpace space) {
1033  ASSERT((space & ~kSpaceTagMask) == 0);
1034  return Construct(RETRY_AFTER_GC, space);
1035}
1036
1037
1038Failure* Failure::Construct(Type type, intptr_t value) {
1039  uintptr_t info =
1040      (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
1041  ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
1042  return reinterpret_cast<Failure*>((info << kFailureTagSize) | kFailureTag);
1043}
1044
1045
1046bool Smi::IsValid(intptr_t value) {
1047#ifdef DEBUG
1048  bool in_range = (value >= kMinValue) && (value <= kMaxValue);
1049#endif
1050
1051#ifdef V8_TARGET_ARCH_X64
1052  // To be representable as a long smi, the value must be a 32-bit integer.
1053  bool result = (value == static_cast<int32_t>(value));
1054#else
1055  // To be representable as an tagged small integer, the two
1056  // most-significant bits of 'value' must be either 00 or 11 due to
1057  // sign-extension. To check this we add 01 to the two
1058  // most-significant bits, and check if the most-significant bit is 0
1059  //
1060  // CAUTION: The original code below:
1061  // bool result = ((value + 0x40000000) & 0x80000000) == 0;
1062  // may lead to incorrect results according to the C language spec, and
1063  // in fact doesn't work correctly with gcc4.1.1 in some cases: The
1064  // compiler may produce undefined results in case of signed integer
1065  // overflow. The computation must be done w/ unsigned ints.
1066  bool result = (static_cast<uintptr_t>(value + 0x40000000U) < 0x80000000U);
1067#endif
1068  ASSERT(result == in_range);
1069  return result;
1070}
1071
1072
1073MapWord MapWord::FromMap(Map* map) {
1074  return MapWord(reinterpret_cast<uintptr_t>(map));
1075}
1076
1077
1078Map* MapWord::ToMap() {
1079  return reinterpret_cast<Map*>(value_);
1080}
1081
1082
1083bool MapWord::IsForwardingAddress() {
1084  return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1085}
1086
1087
1088MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1089  Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1090  return MapWord(reinterpret_cast<uintptr_t>(raw));
1091}
1092
1093
1094HeapObject* MapWord::ToForwardingAddress() {
1095  ASSERT(IsForwardingAddress());
1096  return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1097}
1098
1099
1100bool MapWord::IsMarked() {
1101  return (value_ & kMarkingMask) == 0;
1102}
1103
1104
1105void MapWord::SetMark() {
1106  value_ &= ~kMarkingMask;
1107}
1108
1109
1110void MapWord::ClearMark() {
1111  value_ |= kMarkingMask;
1112}
1113
1114
1115bool MapWord::IsOverflowed() {
1116  return (value_ & kOverflowMask) != 0;
1117}
1118
1119
1120void MapWord::SetOverflow() {
1121  value_ |= kOverflowMask;
1122}
1123
1124
1125void MapWord::ClearOverflow() {
1126  value_ &= ~kOverflowMask;
1127}
1128
1129
1130MapWord MapWord::EncodeAddress(Address map_address, int offset) {
1131  // Offset is the distance in live bytes from the first live object in the
1132  // same page. The offset between two objects in the same page should not
1133  // exceed the object area size of a page.
1134  ASSERT(0 <= offset && offset < Page::kObjectAreaSize);
1135
1136  uintptr_t compact_offset = offset >> kObjectAlignmentBits;
1137  ASSERT(compact_offset < (1 << kForwardingOffsetBits));
1138
1139  Page* map_page = Page::FromAddress(map_address);
1140  ASSERT_MAP_PAGE_INDEX(map_page->mc_page_index);
1141
1142  uintptr_t map_page_offset =
1143      map_page->Offset(map_address) >> kMapAlignmentBits;
1144
1145  uintptr_t encoding =
1146      (compact_offset << kForwardingOffsetShift) |
1147      (map_page_offset << kMapPageOffsetShift) |
1148      (map_page->mc_page_index << kMapPageIndexShift);
1149  return MapWord(encoding);
1150}
1151
1152
1153Address MapWord::DecodeMapAddress(MapSpace* map_space) {
1154  int map_page_index =
1155      static_cast<int>((value_ & kMapPageIndexMask) >> kMapPageIndexShift);
1156  ASSERT_MAP_PAGE_INDEX(map_page_index);
1157
1158  int map_page_offset = static_cast<int>(
1159      ((value_ & kMapPageOffsetMask) >> kMapPageOffsetShift) <<
1160      kMapAlignmentBits);
1161
1162  return (map_space->PageAddress(map_page_index) + map_page_offset);
1163}
1164
1165
1166int MapWord::DecodeOffset() {
1167  // The offset field is represented in the kForwardingOffsetBits
1168  // most-significant bits.
1169  uintptr_t offset = (value_ >> kForwardingOffsetShift) << kObjectAlignmentBits;
1170  ASSERT(offset < static_cast<uintptr_t>(Page::kObjectAreaSize));
1171  return static_cast<int>(offset);
1172}
1173
1174
1175MapWord MapWord::FromEncodedAddress(Address address) {
1176  return MapWord(reinterpret_cast<uintptr_t>(address));
1177}
1178
1179
1180Address MapWord::ToEncodedAddress() {
1181  return reinterpret_cast<Address>(value_);
1182}
1183
1184
1185#ifdef DEBUG
1186void HeapObject::VerifyObjectField(int offset) {
1187  VerifyPointer(READ_FIELD(this, offset));
1188}
1189
1190void HeapObject::VerifySmiField(int offset) {
1191  ASSERT(READ_FIELD(this, offset)->IsSmi());
1192}
1193#endif
1194
1195
1196Heap* HeapObject::GetHeap() {
1197  // During GC, the map pointer in HeapObject is used in various ways that
1198  // prevent us from retrieving Heap from the map.
1199  // Assert that we are not in GC, implement GC code in a way that it doesn't
1200  // pull heap from the map.
1201  ASSERT(HEAP->is_safe_to_read_maps());
1202  return map()->heap();
1203}
1204
1205
1206Isolate* HeapObject::GetIsolate() {
1207  return GetHeap()->isolate();
1208}
1209
1210
1211Map* HeapObject::map() {
1212  return map_word().ToMap();
1213}
1214
1215
1216void HeapObject::set_map(Map* value) {
1217  set_map_word(MapWord::FromMap(value));
1218}
1219
1220
1221MapWord HeapObject::map_word() {
1222  return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1223}
1224
1225
1226void HeapObject::set_map_word(MapWord map_word) {
1227  // WRITE_FIELD does not invoke write barrier, but there is no need
1228  // here.
1229  WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1230}
1231
1232
1233HeapObject* HeapObject::FromAddress(Address address) {
1234  ASSERT_TAG_ALIGNED(address);
1235  return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1236}
1237
1238
1239Address HeapObject::address() {
1240  return reinterpret_cast<Address>(this) - kHeapObjectTag;
1241}
1242
1243
1244int HeapObject::Size() {
1245  return SizeFromMap(map());
1246}
1247
1248
1249void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1250  v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1251                   reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1252}
1253
1254
1255void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1256  v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1257}
1258
1259
1260bool HeapObject::IsMarked() {
1261  return map_word().IsMarked();
1262}
1263
1264
1265void HeapObject::SetMark() {
1266  ASSERT(!IsMarked());
1267  MapWord first_word = map_word();
1268  first_word.SetMark();
1269  set_map_word(first_word);
1270}
1271
1272
1273void HeapObject::ClearMark() {
1274  ASSERT(IsMarked());
1275  MapWord first_word = map_word();
1276  first_word.ClearMark();
1277  set_map_word(first_word);
1278}
1279
1280
1281bool HeapObject::IsOverflowed() {
1282  return map_word().IsOverflowed();
1283}
1284
1285
1286void HeapObject::SetOverflow() {
1287  MapWord first_word = map_word();
1288  first_word.SetOverflow();
1289  set_map_word(first_word);
1290}
1291
1292
1293void HeapObject::ClearOverflow() {
1294  ASSERT(IsOverflowed());
1295  MapWord first_word = map_word();
1296  first_word.ClearOverflow();
1297  set_map_word(first_word);
1298}
1299
1300
1301double HeapNumber::value() {
1302  return READ_DOUBLE_FIELD(this, kValueOffset);
1303}
1304
1305
1306void HeapNumber::set_value(double value) {
1307  WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1308}
1309
1310
1311int HeapNumber::get_exponent() {
1312  return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1313          kExponentShift) - kExponentBias;
1314}
1315
1316
1317int HeapNumber::get_sign() {
1318  return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1319}
1320
1321
1322ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1323
1324
1325HeapObject* JSObject::elements() {
1326  Object* array = READ_FIELD(this, kElementsOffset);
1327  ASSERT(array->HasValidElements());
1328  return reinterpret_cast<HeapObject*>(array);
1329}
1330
1331
1332void JSObject::set_elements(HeapObject* value, WriteBarrierMode mode) {
1333  ASSERT(map()->has_fast_elements() ==
1334         (value->map() == GetHeap()->fixed_array_map() ||
1335          value->map() == GetHeap()->fixed_cow_array_map()));
1336  ASSERT(value->HasValidElements());
1337  WRITE_FIELD(this, kElementsOffset, value);
1338  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, mode);
1339}
1340
1341
1342void JSObject::initialize_properties() {
1343  ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1344  WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1345}
1346
1347
1348void JSObject::initialize_elements() {
1349  ASSERT(map()->has_fast_elements());
1350  ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1351  WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1352}
1353
1354
1355MaybeObject* JSObject::ResetElements() {
1356  Object* obj;
1357  { MaybeObject* maybe_obj = map()->GetFastElementsMap();
1358    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1359  }
1360  set_map(Map::cast(obj));
1361  initialize_elements();
1362  return this;
1363}
1364
1365
1366ACCESSORS(Oddball, to_string, String, kToStringOffset)
1367ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1368
1369
1370byte Oddball::kind() {
1371  return READ_BYTE_FIELD(this, kKindOffset);
1372}
1373
1374
1375void Oddball::set_kind(byte value) {
1376  WRITE_BYTE_FIELD(this, kKindOffset, value);
1377}
1378
1379
1380Object* JSGlobalPropertyCell::value() {
1381  return READ_FIELD(this, kValueOffset);
1382}
1383
1384
1385void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) {
1386  // The write barrier is not used for global property cells.
1387  ASSERT(!val->IsJSGlobalPropertyCell());
1388  WRITE_FIELD(this, kValueOffset, val);
1389}
1390
1391
1392int JSObject::GetHeaderSize() {
1393  InstanceType type = map()->instance_type();
1394  // Check for the most common kind of JavaScript object before
1395  // falling into the generic switch. This speeds up the internal
1396  // field operations considerably on average.
1397  if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1398  switch (type) {
1399    case JS_GLOBAL_PROXY_TYPE:
1400      return JSGlobalProxy::kSize;
1401    case JS_GLOBAL_OBJECT_TYPE:
1402      return JSGlobalObject::kSize;
1403    case JS_BUILTINS_OBJECT_TYPE:
1404      return JSBuiltinsObject::kSize;
1405    case JS_FUNCTION_TYPE:
1406      return JSFunction::kSize;
1407    case JS_VALUE_TYPE:
1408      return JSValue::kSize;
1409    case JS_ARRAY_TYPE:
1410      return JSValue::kSize;
1411    case JS_REGEXP_TYPE:
1412      return JSValue::kSize;
1413    case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1414      return JSObject::kHeaderSize;
1415    case JS_MESSAGE_OBJECT_TYPE:
1416      return JSMessageObject::kSize;
1417    default:
1418      UNREACHABLE();
1419      return 0;
1420  }
1421}
1422
1423
1424int JSObject::GetInternalFieldCount() {
1425  ASSERT(1 << kPointerSizeLog2 == kPointerSize);
1426  // Make sure to adjust for the number of in-object properties. These
1427  // properties do contribute to the size, but are not internal fields.
1428  return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1429         map()->inobject_properties();
1430}
1431
1432
1433int JSObject::GetInternalFieldOffset(int index) {
1434  ASSERT(index < GetInternalFieldCount() && index >= 0);
1435  return GetHeaderSize() + (kPointerSize * index);
1436}
1437
1438
1439Object* JSObject::GetInternalField(int index) {
1440  ASSERT(index < GetInternalFieldCount() && index >= 0);
1441  // Internal objects do follow immediately after the header, whereas in-object
1442  // properties are at the end of the object. Therefore there is no need
1443  // to adjust the index here.
1444  return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1445}
1446
1447
1448void JSObject::SetInternalField(int index, Object* value) {
1449  ASSERT(index < GetInternalFieldCount() && index >= 0);
1450  // Internal objects do follow immediately after the header, whereas in-object
1451  // properties are at the end of the object. Therefore there is no need
1452  // to adjust the index here.
1453  int offset = GetHeaderSize() + (kPointerSize * index);
1454  WRITE_FIELD(this, offset, value);
1455  WRITE_BARRIER(this, offset);
1456}
1457
1458
1459// Access fast-case object properties at index. The use of these routines
1460// is needed to correctly distinguish between properties stored in-object and
1461// properties stored in the properties array.
1462Object* JSObject::FastPropertyAt(int index) {
1463  // Adjust for the number of properties stored in the object.
1464  index -= map()->inobject_properties();
1465  if (index < 0) {
1466    int offset = map()->instance_size() + (index * kPointerSize);
1467    return READ_FIELD(this, offset);
1468  } else {
1469    ASSERT(index < properties()->length());
1470    return properties()->get(index);
1471  }
1472}
1473
1474
1475Object* JSObject::FastPropertyAtPut(int index, Object* value) {
1476  // Adjust for the number of properties stored in the object.
1477  index -= map()->inobject_properties();
1478  if (index < 0) {
1479    int offset = map()->instance_size() + (index * kPointerSize);
1480    WRITE_FIELD(this, offset, value);
1481    WRITE_BARRIER(this, offset);
1482  } else {
1483    ASSERT(index < properties()->length());
1484    properties()->set(index, value);
1485  }
1486  return value;
1487}
1488
1489
1490int JSObject::GetInObjectPropertyOffset(int index) {
1491  // Adjust for the number of properties stored in the object.
1492  index -= map()->inobject_properties();
1493  ASSERT(index < 0);
1494  return map()->instance_size() + (index * kPointerSize);
1495}
1496
1497
1498Object* JSObject::InObjectPropertyAt(int index) {
1499  // Adjust for the number of properties stored in the object.
1500  index -= map()->inobject_properties();
1501  ASSERT(index < 0);
1502  int offset = map()->instance_size() + (index * kPointerSize);
1503  return READ_FIELD(this, offset);
1504}
1505
1506
1507Object* JSObject::InObjectPropertyAtPut(int index,
1508                                        Object* value,
1509                                        WriteBarrierMode mode) {
1510  // Adjust for the number of properties stored in the object.
1511  index -= map()->inobject_properties();
1512  ASSERT(index < 0);
1513  int offset = map()->instance_size() + (index * kPointerSize);
1514  WRITE_FIELD(this, offset, value);
1515  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode);
1516  return value;
1517}
1518
1519
1520
1521void JSObject::InitializeBody(int object_size, Object* value) {
1522  ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
1523  for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1524    WRITE_FIELD(this, offset, value);
1525  }
1526}
1527
1528
1529bool JSObject::HasFastProperties() {
1530  return !properties()->IsDictionary();
1531}
1532
1533
1534int JSObject::MaxFastProperties() {
1535  // Allow extra fast properties if the object has more than
1536  // kMaxFastProperties in-object properties. When this is the case,
1537  // it is very unlikely that the object is being used as a dictionary
1538  // and there is a good chance that allowing more map transitions
1539  // will be worth it.
1540  return Max(map()->inobject_properties(), kMaxFastProperties);
1541}
1542
1543
1544void Struct::InitializeBody(int object_size) {
1545  Object* value = GetHeap()->undefined_value();
1546  for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1547    WRITE_FIELD(this, offset, value);
1548  }
1549}
1550
1551
1552bool Object::ToArrayIndex(uint32_t* index) {
1553  if (IsSmi()) {
1554    int value = Smi::cast(this)->value();
1555    if (value < 0) return false;
1556    *index = value;
1557    return true;
1558  }
1559  if (IsHeapNumber()) {
1560    double value = HeapNumber::cast(this)->value();
1561    uint32_t uint_value = static_cast<uint32_t>(value);
1562    if (value == static_cast<double>(uint_value)) {
1563      *index = uint_value;
1564      return true;
1565    }
1566  }
1567  return false;
1568}
1569
1570
1571bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
1572  if (!this->IsJSValue()) return false;
1573
1574  JSValue* js_value = JSValue::cast(this);
1575  if (!js_value->value()->IsString()) return false;
1576
1577  String* str = String::cast(js_value->value());
1578  if (index >= (uint32_t)str->length()) return false;
1579
1580  return true;
1581}
1582
1583
1584FixedArrayBase* FixedArrayBase::cast(Object* object) {
1585  ASSERT(object->IsFixedArray() || object->IsFixedDoubleArray());
1586  return reinterpret_cast<FixedArrayBase*>(object);
1587}
1588
1589
1590Object* FixedArray::get(int index) {
1591  ASSERT(index >= 0 && index < this->length());
1592  return READ_FIELD(this, kHeaderSize + index * kPointerSize);
1593}
1594
1595
1596void FixedArray::set(int index, Smi* value) {
1597  ASSERT(map() != HEAP->fixed_cow_array_map());
1598  ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1599  int offset = kHeaderSize + index * kPointerSize;
1600  WRITE_FIELD(this, offset, value);
1601}
1602
1603
1604void FixedArray::set(int index, Object* value) {
1605  ASSERT(map() != HEAP->fixed_cow_array_map());
1606  ASSERT(index >= 0 && index < this->length());
1607  int offset = kHeaderSize + index * kPointerSize;
1608  WRITE_FIELD(this, offset, value);
1609  WRITE_BARRIER(this, offset);
1610}
1611
1612
1613inline bool FixedDoubleArray::is_the_hole_nan(double value) {
1614  return BitCast<uint64_t, double>(value) == kHoleNanInt64;
1615}
1616
1617
1618inline double FixedDoubleArray::hole_nan_as_double() {
1619  return BitCast<double, uint64_t>(kHoleNanInt64);
1620}
1621
1622
1623inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
1624  ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
1625  ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
1626  return OS::nan_value();
1627}
1628
1629
1630double FixedDoubleArray::get(int index) {
1631  ASSERT(map() != HEAP->fixed_cow_array_map() &&
1632         map() != HEAP->fixed_array_map());
1633  ASSERT(index >= 0 && index < this->length());
1634  double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
1635  ASSERT(!is_the_hole_nan(result));
1636  return result;
1637}
1638
1639
1640void FixedDoubleArray::set(int index, double value) {
1641  ASSERT(map() != HEAP->fixed_cow_array_map() &&
1642         map() != HEAP->fixed_array_map());
1643  int offset = kHeaderSize + index * kDoubleSize;
1644  if (isnan(value)) value = canonical_not_the_hole_nan_as_double();
1645  WRITE_DOUBLE_FIELD(this, offset, value);
1646}
1647
1648
1649void FixedDoubleArray::set_the_hole(int index) {
1650  ASSERT(map() != HEAP->fixed_cow_array_map() &&
1651         map() != HEAP->fixed_array_map());
1652  int offset = kHeaderSize + index * kDoubleSize;
1653  WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1654}
1655
1656
1657bool FixedDoubleArray::is_the_hole(int index) {
1658  int offset = kHeaderSize + index * kDoubleSize;
1659  return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
1660}
1661
1662
1663void FixedDoubleArray::Initialize(FixedDoubleArray* from) {
1664  int old_length = from->length();
1665  ASSERT(old_length < length());
1666  OS::MemCopy(FIELD_ADDR(this, kHeaderSize),
1667              FIELD_ADDR(from, kHeaderSize),
1668              old_length * kDoubleSize);
1669  int offset = kHeaderSize + old_length * kDoubleSize;
1670  for (int current = from->length(); current < length(); ++current) {
1671    WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1672    offset += kDoubleSize;
1673  }
1674}
1675
1676
1677void FixedDoubleArray::Initialize(FixedArray* from) {
1678  int old_length = from->length();
1679  ASSERT(old_length < length());
1680  for (int i = 0; i < old_length; i++) {
1681    Object* hole_or_object = from->get(i);
1682    if (hole_or_object->IsTheHole()) {
1683      set_the_hole(i);
1684    } else {
1685      set(i, hole_or_object->Number());
1686    }
1687  }
1688  int offset = kHeaderSize + old_length * kDoubleSize;
1689  for (int current = from->length(); current < length(); ++current) {
1690    WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1691    offset += kDoubleSize;
1692  }
1693}
1694
1695
1696void FixedDoubleArray::Initialize(NumberDictionary* from) {
1697  int offset = kHeaderSize;
1698  for (int current = 0; current < length(); ++current) {
1699    WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1700    offset += kDoubleSize;
1701  }
1702  for (int i = 0; i < from->Capacity(); i++) {
1703    Object* key = from->KeyAt(i);
1704    if (key->IsNumber()) {
1705      uint32_t entry = static_cast<uint32_t>(key->Number());
1706      set(entry, from->ValueAt(i)->Number());
1707    }
1708  }
1709}
1710
1711
1712WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
1713  if (GetHeap()->InNewSpace(this)) return SKIP_WRITE_BARRIER;
1714  return UPDATE_WRITE_BARRIER;
1715}
1716
1717
1718void FixedArray::set(int index,
1719                     Object* value,
1720                     WriteBarrierMode mode) {
1721  ASSERT(map() != HEAP->fixed_cow_array_map());
1722  ASSERT(index >= 0 && index < this->length());
1723  int offset = kHeaderSize + index * kPointerSize;
1724  WRITE_FIELD(this, offset, value);
1725  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode);
1726}
1727
1728
1729void FixedArray::fast_set(FixedArray* array, int index, Object* value) {
1730  ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1731  ASSERT(index >= 0 && index < array->length());
1732  ASSERT(!HEAP->InNewSpace(value));
1733  WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
1734}
1735
1736
1737void FixedArray::set_undefined(int index) {
1738  ASSERT(map() != HEAP->fixed_cow_array_map());
1739  set_undefined(GetHeap(), index);
1740}
1741
1742
1743void FixedArray::set_undefined(Heap* heap, int index) {
1744  ASSERT(index >= 0 && index < this->length());
1745  ASSERT(!heap->InNewSpace(heap->undefined_value()));
1746  WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
1747              heap->undefined_value());
1748}
1749
1750
1751void FixedArray::set_null(int index) {
1752  set_null(GetHeap(), index);
1753}
1754
1755
1756void FixedArray::set_null(Heap* heap, int index) {
1757  ASSERT(index >= 0 && index < this->length());
1758  ASSERT(!heap->InNewSpace(heap->null_value()));
1759  WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1760}
1761
1762
1763void FixedArray::set_the_hole(int index) {
1764  ASSERT(map() != HEAP->fixed_cow_array_map());
1765  ASSERT(index >= 0 && index < this->length());
1766  ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
1767  WRITE_FIELD(this,
1768              kHeaderSize + index * kPointerSize,
1769              GetHeap()->the_hole_value());
1770}
1771
1772
1773void FixedArray::set_unchecked(int index, Smi* value) {
1774  ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1775  int offset = kHeaderSize + index * kPointerSize;
1776  WRITE_FIELD(this, offset, value);
1777}
1778
1779
1780void FixedArray::set_unchecked(Heap* heap,
1781                               int index,
1782                               Object* value,
1783                               WriteBarrierMode mode) {
1784  int offset = kHeaderSize + index * kPointerSize;
1785  WRITE_FIELD(this, offset, value);
1786  CONDITIONAL_WRITE_BARRIER(heap, this, offset, mode);
1787}
1788
1789
1790void FixedArray::set_null_unchecked(Heap* heap, int index) {
1791  ASSERT(index >= 0 && index < this->length());
1792  ASSERT(!HEAP->InNewSpace(heap->null_value()));
1793  WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1794}
1795
1796
1797Object** FixedArray::data_start() {
1798  return HeapObject::RawField(this, kHeaderSize);
1799}
1800
1801
1802bool DescriptorArray::IsEmpty() {
1803  ASSERT(this->IsSmi() ||
1804         this->length() > kFirstIndex ||
1805         this == HEAP->empty_descriptor_array());
1806  return this->IsSmi() || length() <= kFirstIndex;
1807}
1808
1809
1810int DescriptorArray::bit_field3_storage() {
1811  Object* storage = READ_FIELD(this, kBitField3StorageOffset);
1812  return Smi::cast(storage)->value();
1813}
1814
1815void DescriptorArray::set_bit_field3_storage(int value) {
1816  ASSERT(!IsEmpty());
1817  WRITE_FIELD(this, kBitField3StorageOffset, Smi::FromInt(value));
1818}
1819
1820
1821void DescriptorArray::fast_swap(FixedArray* array, int first, int second) {
1822  Object* tmp = array->get(first);
1823  fast_set(array, first, array->get(second));
1824  fast_set(array, second, tmp);
1825}
1826
1827
1828int DescriptorArray::Search(String* name) {
1829  SLOW_ASSERT(IsSortedNoDuplicates());
1830
1831  // Check for empty descriptor array.
1832  int nof = number_of_descriptors();
1833  if (nof == 0) return kNotFound;
1834
1835  // Fast case: do linear search for small arrays.
1836  const int kMaxElementsForLinearSearch = 8;
1837  if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) {
1838    return LinearSearch(name, nof);
1839  }
1840
1841  // Slow case: perform binary search.
1842  return BinarySearch(name, 0, nof - 1);
1843}
1844
1845
1846int DescriptorArray::SearchWithCache(String* name) {
1847  int number = GetIsolate()->descriptor_lookup_cache()->Lookup(this, name);
1848  if (number == DescriptorLookupCache::kAbsent) {
1849    number = Search(name);
1850    GetIsolate()->descriptor_lookup_cache()->Update(this, name, number);
1851  }
1852  return number;
1853}
1854
1855
1856String* DescriptorArray::GetKey(int descriptor_number) {
1857  ASSERT(descriptor_number < number_of_descriptors());
1858  return String::cast(get(ToKeyIndex(descriptor_number)));
1859}
1860
1861
1862Object* DescriptorArray::GetValue(int descriptor_number) {
1863  ASSERT(descriptor_number < number_of_descriptors());
1864  return GetContentArray()->get(ToValueIndex(descriptor_number));
1865}
1866
1867
1868Smi* DescriptorArray::GetDetails(int descriptor_number) {
1869  ASSERT(descriptor_number < number_of_descriptors());
1870  return Smi::cast(GetContentArray()->get(ToDetailsIndex(descriptor_number)));
1871}
1872
1873
1874PropertyType DescriptorArray::GetType(int descriptor_number) {
1875  ASSERT(descriptor_number < number_of_descriptors());
1876  return PropertyDetails(GetDetails(descriptor_number)).type();
1877}
1878
1879
1880int DescriptorArray::GetFieldIndex(int descriptor_number) {
1881  return Descriptor::IndexFromValue(GetValue(descriptor_number));
1882}
1883
1884
1885JSFunction* DescriptorArray::GetConstantFunction(int descriptor_number) {
1886  return JSFunction::cast(GetValue(descriptor_number));
1887}
1888
1889
1890Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
1891  ASSERT(GetType(descriptor_number) == CALLBACKS);
1892  return GetValue(descriptor_number);
1893}
1894
1895
1896AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
1897  ASSERT(GetType(descriptor_number) == CALLBACKS);
1898  Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
1899  return reinterpret_cast<AccessorDescriptor*>(p->address());
1900}
1901
1902
1903bool DescriptorArray::IsProperty(int descriptor_number) {
1904  return GetType(descriptor_number) < FIRST_PHANTOM_PROPERTY_TYPE;
1905}
1906
1907
1908bool DescriptorArray::IsTransition(int descriptor_number) {
1909  PropertyType t = GetType(descriptor_number);
1910  return t == MAP_TRANSITION || t == CONSTANT_TRANSITION ||
1911      t == EXTERNAL_ARRAY_TRANSITION;
1912}
1913
1914
1915bool DescriptorArray::IsNullDescriptor(int descriptor_number) {
1916  return GetType(descriptor_number) == NULL_DESCRIPTOR;
1917}
1918
1919
1920bool DescriptorArray::IsDontEnum(int descriptor_number) {
1921  return PropertyDetails(GetDetails(descriptor_number)).IsDontEnum();
1922}
1923
1924
1925void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
1926  desc->Init(GetKey(descriptor_number),
1927             GetValue(descriptor_number),
1928             PropertyDetails(GetDetails(descriptor_number)));
1929}
1930
1931
1932void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
1933  // Range check.
1934  ASSERT(descriptor_number < number_of_descriptors());
1935
1936  // Make sure none of the elements in desc are in new space.
1937  ASSERT(!HEAP->InNewSpace(desc->GetKey()));
1938  ASSERT(!HEAP->InNewSpace(desc->GetValue()));
1939
1940  fast_set(this, ToKeyIndex(descriptor_number), desc->GetKey());
1941  FixedArray* content_array = GetContentArray();
1942  fast_set(content_array, ToValueIndex(descriptor_number), desc->GetValue());
1943  fast_set(content_array, ToDetailsIndex(descriptor_number),
1944           desc->GetDetails().AsSmi());
1945}
1946
1947
1948void DescriptorArray::CopyFrom(int index, DescriptorArray* src, int src_index) {
1949  Descriptor desc;
1950  src->Get(src_index, &desc);
1951  Set(index, &desc);
1952}
1953
1954
1955void DescriptorArray::Swap(int first, int second) {
1956  fast_swap(this, ToKeyIndex(first), ToKeyIndex(second));
1957  FixedArray* content_array = GetContentArray();
1958  fast_swap(content_array, ToValueIndex(first), ToValueIndex(second));
1959  fast_swap(content_array, ToDetailsIndex(first),  ToDetailsIndex(second));
1960}
1961
1962
1963template<typename Shape, typename Key>
1964int HashTable<Shape, Key>::FindEntry(Key key) {
1965  return FindEntry(GetIsolate(), key);
1966}
1967
1968
1969// Find entry for key otherwise return kNotFound.
1970template<typename Shape, typename Key>
1971int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
1972  uint32_t capacity = Capacity();
1973  uint32_t entry = FirstProbe(Shape::Hash(key), capacity);
1974  uint32_t count = 1;
1975  // EnsureCapacity will guarantee the hash table is never full.
1976  while (true) {
1977    Object* element = KeyAt(entry);
1978    if (element == isolate->heap()->undefined_value()) break;  // Empty entry.
1979    if (element != isolate->heap()->null_value() &&
1980        Shape::IsMatch(key, element)) return entry;
1981    entry = NextProbe(entry, count++, capacity);
1982  }
1983  return kNotFound;
1984}
1985
1986
1987bool NumberDictionary::requires_slow_elements() {
1988  Object* max_index_object = get(kMaxNumberKeyIndex);
1989  if (!max_index_object->IsSmi()) return false;
1990  return 0 !=
1991      (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
1992}
1993
1994uint32_t NumberDictionary::max_number_key() {
1995  ASSERT(!requires_slow_elements());
1996  Object* max_index_object = get(kMaxNumberKeyIndex);
1997  if (!max_index_object->IsSmi()) return 0;
1998  uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
1999  return value >> kRequiresSlowElementsTagSize;
2000}
2001
2002void NumberDictionary::set_requires_slow_elements() {
2003  set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
2004}
2005
2006
2007// ------------------------------------
2008// Cast operations
2009
2010
2011CAST_ACCESSOR(FixedArray)
2012CAST_ACCESSOR(FixedDoubleArray)
2013CAST_ACCESSOR(DescriptorArray)
2014CAST_ACCESSOR(DeoptimizationInputData)
2015CAST_ACCESSOR(DeoptimizationOutputData)
2016CAST_ACCESSOR(SymbolTable)
2017CAST_ACCESSOR(JSFunctionResultCache)
2018CAST_ACCESSOR(NormalizedMapCache)
2019CAST_ACCESSOR(CompilationCacheTable)
2020CAST_ACCESSOR(CodeCacheHashTable)
2021CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
2022CAST_ACCESSOR(MapCache)
2023CAST_ACCESSOR(String)
2024CAST_ACCESSOR(SeqString)
2025CAST_ACCESSOR(SeqAsciiString)
2026CAST_ACCESSOR(SeqTwoByteString)
2027CAST_ACCESSOR(ConsString)
2028CAST_ACCESSOR(ExternalString)
2029CAST_ACCESSOR(ExternalAsciiString)
2030CAST_ACCESSOR(ExternalTwoByteString)
2031CAST_ACCESSOR(JSReceiver)
2032CAST_ACCESSOR(JSObject)
2033CAST_ACCESSOR(Smi)
2034CAST_ACCESSOR(HeapObject)
2035CAST_ACCESSOR(HeapNumber)
2036CAST_ACCESSOR(Oddball)
2037CAST_ACCESSOR(JSGlobalPropertyCell)
2038CAST_ACCESSOR(SharedFunctionInfo)
2039CAST_ACCESSOR(Map)
2040CAST_ACCESSOR(JSFunction)
2041CAST_ACCESSOR(GlobalObject)
2042CAST_ACCESSOR(JSGlobalProxy)
2043CAST_ACCESSOR(JSGlobalObject)
2044CAST_ACCESSOR(JSBuiltinsObject)
2045CAST_ACCESSOR(Code)
2046CAST_ACCESSOR(JSArray)
2047CAST_ACCESSOR(JSRegExp)
2048CAST_ACCESSOR(JSProxy)
2049CAST_ACCESSOR(JSFunctionProxy)
2050CAST_ACCESSOR(Foreign)
2051CAST_ACCESSOR(ByteArray)
2052CAST_ACCESSOR(ExternalArray)
2053CAST_ACCESSOR(ExternalByteArray)
2054CAST_ACCESSOR(ExternalUnsignedByteArray)
2055CAST_ACCESSOR(ExternalShortArray)
2056CAST_ACCESSOR(ExternalUnsignedShortArray)
2057CAST_ACCESSOR(ExternalIntArray)
2058CAST_ACCESSOR(ExternalUnsignedIntArray)
2059CAST_ACCESSOR(ExternalFloatArray)
2060CAST_ACCESSOR(ExternalDoubleArray)
2061CAST_ACCESSOR(ExternalPixelArray)
2062CAST_ACCESSOR(Struct)
2063
2064
2065#define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
2066  STRUCT_LIST(MAKE_STRUCT_CAST)
2067#undef MAKE_STRUCT_CAST
2068
2069
2070template <typename Shape, typename Key>
2071HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
2072  ASSERT(obj->IsHashTable());
2073  return reinterpret_cast<HashTable*>(obj);
2074}
2075
2076
2077SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
2078SMI_ACCESSORS(ByteArray, length, kLengthOffset)
2079
2080// TODO(1493): Investigate if it's possible to s/INT/SMI/ here (and
2081// subsequently unify H{Fixed,External}ArrayLength).
2082INT_ACCESSORS(ExternalArray, length, kLengthOffset)
2083
2084
2085SMI_ACCESSORS(String, length, kLengthOffset)
2086
2087
2088uint32_t String::hash_field() {
2089  return READ_UINT32_FIELD(this, kHashFieldOffset);
2090}
2091
2092
2093void String::set_hash_field(uint32_t value) {
2094  WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
2095#if V8_HOST_ARCH_64_BIT
2096  WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
2097#endif
2098}
2099
2100
2101bool String::Equals(String* other) {
2102  if (other == this) return true;
2103  if (StringShape(this).IsSymbol() && StringShape(other).IsSymbol()) {
2104    return false;
2105  }
2106  return SlowEquals(other);
2107}
2108
2109
2110MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
2111  if (!StringShape(this).IsCons()) return this;
2112  ConsString* cons = ConsString::cast(this);
2113  if (cons->second()->length() == 0) return cons->first();
2114  return SlowTryFlatten(pretenure);
2115}
2116
2117
2118String* String::TryFlattenGetString(PretenureFlag pretenure) {
2119  MaybeObject* flat = TryFlatten(pretenure);
2120  Object* successfully_flattened;
2121  if (flat->ToObject(&successfully_flattened)) {
2122    return String::cast(successfully_flattened);
2123  }
2124  return this;
2125}
2126
2127
2128uint16_t String::Get(int index) {
2129  ASSERT(index >= 0 && index < length());
2130  switch (StringShape(this).full_representation_tag()) {
2131    case kSeqStringTag | kAsciiStringTag:
2132      return SeqAsciiString::cast(this)->SeqAsciiStringGet(index);
2133    case kSeqStringTag | kTwoByteStringTag:
2134      return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
2135    case kConsStringTag | kAsciiStringTag:
2136    case kConsStringTag | kTwoByteStringTag:
2137      return ConsString::cast(this)->ConsStringGet(index);
2138    case kExternalStringTag | kAsciiStringTag:
2139      return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
2140    case kExternalStringTag | kTwoByteStringTag:
2141      return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
2142    default:
2143      break;
2144  }
2145
2146  UNREACHABLE();
2147  return 0;
2148}
2149
2150
2151void String::Set(int index, uint16_t value) {
2152  ASSERT(index >= 0 && index < length());
2153  ASSERT(StringShape(this).IsSequential());
2154
2155  return this->IsAsciiRepresentation()
2156      ? SeqAsciiString::cast(this)->SeqAsciiStringSet(index, value)
2157      : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
2158}
2159
2160
2161bool String::IsFlat() {
2162  switch (StringShape(this).representation_tag()) {
2163    case kConsStringTag: {
2164      String* second = ConsString::cast(this)->second();
2165      // Only flattened strings have second part empty.
2166      return second->length() == 0;
2167    }
2168    default:
2169      return true;
2170  }
2171}
2172
2173
2174uint16_t SeqAsciiString::SeqAsciiStringGet(int index) {
2175  ASSERT(index >= 0 && index < length());
2176  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2177}
2178
2179
2180void SeqAsciiString::SeqAsciiStringSet(int index, uint16_t value) {
2181  ASSERT(index >= 0 && index < length() && value <= kMaxAsciiCharCode);
2182  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
2183                   static_cast<byte>(value));
2184}
2185
2186
2187Address SeqAsciiString::GetCharsAddress() {
2188  return FIELD_ADDR(this, kHeaderSize);
2189}
2190
2191
2192char* SeqAsciiString::GetChars() {
2193  return reinterpret_cast<char*>(GetCharsAddress());
2194}
2195
2196
2197Address SeqTwoByteString::GetCharsAddress() {
2198  return FIELD_ADDR(this, kHeaderSize);
2199}
2200
2201
2202uc16* SeqTwoByteString::GetChars() {
2203  return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
2204}
2205
2206
2207uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
2208  ASSERT(index >= 0 && index < length());
2209  return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
2210}
2211
2212
2213void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
2214  ASSERT(index >= 0 && index < length());
2215  WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
2216}
2217
2218
2219int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
2220  return SizeFor(length());
2221}
2222
2223
2224int SeqAsciiString::SeqAsciiStringSize(InstanceType instance_type) {
2225  return SizeFor(length());
2226}
2227
2228
2229String* ConsString::first() {
2230  return String::cast(READ_FIELD(this, kFirstOffset));
2231}
2232
2233
2234Object* ConsString::unchecked_first() {
2235  return READ_FIELD(this, kFirstOffset);
2236}
2237
2238
2239void ConsString::set_first(String* value, WriteBarrierMode mode) {
2240  WRITE_FIELD(this, kFirstOffset, value);
2241  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, mode);
2242}
2243
2244
2245String* ConsString::second() {
2246  return String::cast(READ_FIELD(this, kSecondOffset));
2247}
2248
2249
2250Object* ConsString::unchecked_second() {
2251  return READ_FIELD(this, kSecondOffset);
2252}
2253
2254
2255void ConsString::set_second(String* value, WriteBarrierMode mode) {
2256  WRITE_FIELD(this, kSecondOffset, value);
2257  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, mode);
2258}
2259
2260
2261ExternalAsciiString::Resource* ExternalAsciiString::resource() {
2262  return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2263}
2264
2265
2266void ExternalAsciiString::set_resource(
2267    ExternalAsciiString::Resource* resource) {
2268  *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)) = resource;
2269}
2270
2271
2272ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
2273  return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2274}
2275
2276
2277void ExternalTwoByteString::set_resource(
2278    ExternalTwoByteString::Resource* resource) {
2279  *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)) = resource;
2280}
2281
2282
2283void JSFunctionResultCache::MakeZeroSize() {
2284  set_finger_index(kEntriesIndex);
2285  set_size(kEntriesIndex);
2286}
2287
2288
2289void JSFunctionResultCache::Clear() {
2290  int cache_size = size();
2291  Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
2292  MemsetPointer(entries_start,
2293                GetHeap()->the_hole_value(),
2294                cache_size - kEntriesIndex);
2295  MakeZeroSize();
2296}
2297
2298
2299int JSFunctionResultCache::size() {
2300  return Smi::cast(get(kCacheSizeIndex))->value();
2301}
2302
2303
2304void JSFunctionResultCache::set_size(int size) {
2305  set(kCacheSizeIndex, Smi::FromInt(size));
2306}
2307
2308
2309int JSFunctionResultCache::finger_index() {
2310  return Smi::cast(get(kFingerIndex))->value();
2311}
2312
2313
2314void JSFunctionResultCache::set_finger_index(int finger_index) {
2315  set(kFingerIndex, Smi::FromInt(finger_index));
2316}
2317
2318
2319byte ByteArray::get(int index) {
2320  ASSERT(index >= 0 && index < this->length());
2321  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2322}
2323
2324
2325void ByteArray::set(int index, byte value) {
2326  ASSERT(index >= 0 && index < this->length());
2327  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
2328}
2329
2330
2331int ByteArray::get_int(int index) {
2332  ASSERT(index >= 0 && (index * kIntSize) < this->length());
2333  return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
2334}
2335
2336
2337ByteArray* ByteArray::FromDataStartAddress(Address address) {
2338  ASSERT_TAG_ALIGNED(address);
2339  return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
2340}
2341
2342
2343Address ByteArray::GetDataStartAddress() {
2344  return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
2345}
2346
2347
2348uint8_t* ExternalPixelArray::external_pixel_pointer() {
2349  return reinterpret_cast<uint8_t*>(external_pointer());
2350}
2351
2352
2353uint8_t ExternalPixelArray::get(int index) {
2354  ASSERT((index >= 0) && (index < this->length()));
2355  uint8_t* ptr = external_pixel_pointer();
2356  return ptr[index];
2357}
2358
2359
2360void ExternalPixelArray::set(int index, uint8_t value) {
2361  ASSERT((index >= 0) && (index < this->length()));
2362  uint8_t* ptr = external_pixel_pointer();
2363  ptr[index] = value;
2364}
2365
2366
2367void* ExternalArray::external_pointer() {
2368  intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
2369  return reinterpret_cast<void*>(ptr);
2370}
2371
2372
2373void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
2374  intptr_t ptr = reinterpret_cast<intptr_t>(value);
2375  WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
2376}
2377
2378
2379int8_t ExternalByteArray::get(int index) {
2380  ASSERT((index >= 0) && (index < this->length()));
2381  int8_t* ptr = static_cast<int8_t*>(external_pointer());
2382  return ptr[index];
2383}
2384
2385
2386void ExternalByteArray::set(int index, int8_t value) {
2387  ASSERT((index >= 0) && (index < this->length()));
2388  int8_t* ptr = static_cast<int8_t*>(external_pointer());
2389  ptr[index] = value;
2390}
2391
2392
2393uint8_t ExternalUnsignedByteArray::get(int index) {
2394  ASSERT((index >= 0) && (index < this->length()));
2395  uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2396  return ptr[index];
2397}
2398
2399
2400void ExternalUnsignedByteArray::set(int index, uint8_t value) {
2401  ASSERT((index >= 0) && (index < this->length()));
2402  uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2403  ptr[index] = value;
2404}
2405
2406
2407int16_t ExternalShortArray::get(int index) {
2408  ASSERT((index >= 0) && (index < this->length()));
2409  int16_t* ptr = static_cast<int16_t*>(external_pointer());
2410  return ptr[index];
2411}
2412
2413
2414void ExternalShortArray::set(int index, int16_t value) {
2415  ASSERT((index >= 0) && (index < this->length()));
2416  int16_t* ptr = static_cast<int16_t*>(external_pointer());
2417  ptr[index] = value;
2418}
2419
2420
2421uint16_t ExternalUnsignedShortArray::get(int index) {
2422  ASSERT((index >= 0) && (index < this->length()));
2423  uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2424  return ptr[index];
2425}
2426
2427
2428void ExternalUnsignedShortArray::set(int index, uint16_t value) {
2429  ASSERT((index >= 0) && (index < this->length()));
2430  uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2431  ptr[index] = value;
2432}
2433
2434
2435int32_t ExternalIntArray::get(int index) {
2436  ASSERT((index >= 0) && (index < this->length()));
2437  int32_t* ptr = static_cast<int32_t*>(external_pointer());
2438  return ptr[index];
2439}
2440
2441
2442void ExternalIntArray::set(int index, int32_t value) {
2443  ASSERT((index >= 0) && (index < this->length()));
2444  int32_t* ptr = static_cast<int32_t*>(external_pointer());
2445  ptr[index] = value;
2446}
2447
2448
2449uint32_t ExternalUnsignedIntArray::get(int index) {
2450  ASSERT((index >= 0) && (index < this->length()));
2451  uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2452  return ptr[index];
2453}
2454
2455
2456void ExternalUnsignedIntArray::set(int index, uint32_t value) {
2457  ASSERT((index >= 0) && (index < this->length()));
2458  uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2459  ptr[index] = value;
2460}
2461
2462
2463float ExternalFloatArray::get(int index) {
2464  ASSERT((index >= 0) && (index < this->length()));
2465  float* ptr = static_cast<float*>(external_pointer());
2466  return ptr[index];
2467}
2468
2469
2470void ExternalFloatArray::set(int index, float value) {
2471  ASSERT((index >= 0) && (index < this->length()));
2472  float* ptr = static_cast<float*>(external_pointer());
2473  ptr[index] = value;
2474}
2475
2476
2477double ExternalDoubleArray::get(int index) {
2478  ASSERT((index >= 0) && (index < this->length()));
2479  double* ptr = static_cast<double*>(external_pointer());
2480  return ptr[index];
2481}
2482
2483
2484void ExternalDoubleArray::set(int index, double value) {
2485  ASSERT((index >= 0) && (index < this->length()));
2486  double* ptr = static_cast<double*>(external_pointer());
2487  ptr[index] = value;
2488}
2489
2490
2491int Map::visitor_id() {
2492  return READ_BYTE_FIELD(this, kVisitorIdOffset);
2493}
2494
2495
2496void Map::set_visitor_id(int id) {
2497  ASSERT(0 <= id && id < 256);
2498  WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
2499}
2500
2501
2502int Map::instance_size() {
2503  return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
2504}
2505
2506
2507int Map::inobject_properties() {
2508  return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
2509}
2510
2511
2512int Map::pre_allocated_property_fields() {
2513  return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
2514}
2515
2516
2517int HeapObject::SizeFromMap(Map* map) {
2518  int instance_size = map->instance_size();
2519  if (instance_size != kVariableSizeSentinel) return instance_size;
2520  // We can ignore the "symbol" bit becase it is only set for symbols
2521  // and implies a string type.
2522  int instance_type = static_cast<int>(map->instance_type()) & ~kIsSymbolMask;
2523  // Only inline the most frequent cases.
2524  if (instance_type == FIXED_ARRAY_TYPE) {
2525    return FixedArray::BodyDescriptor::SizeOf(map, this);
2526  }
2527  if (instance_type == ASCII_STRING_TYPE) {
2528    return SeqAsciiString::SizeFor(
2529        reinterpret_cast<SeqAsciiString*>(this)->length());
2530  }
2531  if (instance_type == BYTE_ARRAY_TYPE) {
2532    return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
2533  }
2534  if (instance_type == STRING_TYPE) {
2535    return SeqTwoByteString::SizeFor(
2536        reinterpret_cast<SeqTwoByteString*>(this)->length());
2537  }
2538  if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
2539    return FixedDoubleArray::SizeFor(
2540        reinterpret_cast<FixedDoubleArray*>(this)->length());
2541  }
2542  ASSERT(instance_type == CODE_TYPE);
2543  return reinterpret_cast<Code*>(this)->CodeSize();
2544}
2545
2546
2547void Map::set_instance_size(int value) {
2548  ASSERT_EQ(0, value & (kPointerSize - 1));
2549  value >>= kPointerSizeLog2;
2550  ASSERT(0 <= value && value < 256);
2551  WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
2552}
2553
2554
2555void Map::set_inobject_properties(int value) {
2556  ASSERT(0 <= value && value < 256);
2557  WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
2558}
2559
2560
2561void Map::set_pre_allocated_property_fields(int value) {
2562  ASSERT(0 <= value && value < 256);
2563  WRITE_BYTE_FIELD(this,
2564                   kPreAllocatedPropertyFieldsOffset,
2565                   static_cast<byte>(value));
2566}
2567
2568
2569InstanceType Map::instance_type() {
2570  return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
2571}
2572
2573
2574void Map::set_instance_type(InstanceType value) {
2575  WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
2576}
2577
2578
2579int Map::unused_property_fields() {
2580  return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
2581}
2582
2583
2584void Map::set_unused_property_fields(int value) {
2585  WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
2586}
2587
2588
2589byte Map::bit_field() {
2590  return READ_BYTE_FIELD(this, kBitFieldOffset);
2591}
2592
2593
2594void Map::set_bit_field(byte value) {
2595  WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
2596}
2597
2598
2599byte Map::bit_field2() {
2600  return READ_BYTE_FIELD(this, kBitField2Offset);
2601}
2602
2603
2604void Map::set_bit_field2(byte value) {
2605  WRITE_BYTE_FIELD(this, kBitField2Offset, value);
2606}
2607
2608
2609void Map::set_non_instance_prototype(bool value) {
2610  if (value) {
2611    set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
2612  } else {
2613    set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
2614  }
2615}
2616
2617
2618bool Map::has_non_instance_prototype() {
2619  return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
2620}
2621
2622
2623void Map::set_function_with_prototype(bool value) {
2624  if (value) {
2625    set_bit_field2(bit_field2() | (1 << kFunctionWithPrototype));
2626  } else {
2627    set_bit_field2(bit_field2() & ~(1 << kFunctionWithPrototype));
2628  }
2629}
2630
2631
2632bool Map::function_with_prototype() {
2633  return ((1 << kFunctionWithPrototype) & bit_field2()) != 0;
2634}
2635
2636
2637void Map::set_is_access_check_needed(bool access_check_needed) {
2638  if (access_check_needed) {
2639    set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
2640  } else {
2641    set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
2642  }
2643}
2644
2645
2646bool Map::is_access_check_needed() {
2647  return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
2648}
2649
2650
2651void Map::set_is_extensible(bool value) {
2652  if (value) {
2653    set_bit_field2(bit_field2() | (1 << kIsExtensible));
2654  } else {
2655    set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
2656  }
2657}
2658
2659bool Map::is_extensible() {
2660  return ((1 << kIsExtensible) & bit_field2()) != 0;
2661}
2662
2663
2664void Map::set_attached_to_shared_function_info(bool value) {
2665  if (value) {
2666    set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
2667  } else {
2668    set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
2669  }
2670}
2671
2672bool Map::attached_to_shared_function_info() {
2673  return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
2674}
2675
2676
2677void Map::set_is_shared(bool value) {
2678  if (value) {
2679    set_bit_field3(bit_field3() | (1 << kIsShared));
2680  } else {
2681    set_bit_field3(bit_field3() & ~(1 << kIsShared));
2682  }
2683}
2684
2685bool Map::is_shared() {
2686  return ((1 << kIsShared) & bit_field3()) != 0;
2687}
2688
2689
2690JSFunction* Map::unchecked_constructor() {
2691  return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset));
2692}
2693
2694
2695FixedArray* Map::unchecked_prototype_transitions() {
2696  return reinterpret_cast<FixedArray*>(
2697      READ_FIELD(this, kPrototypeTransitionsOffset));
2698}
2699
2700
2701Code::Flags Code::flags() {
2702  return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
2703}
2704
2705
2706void Code::set_flags(Code::Flags flags) {
2707  STATIC_ASSERT(Code::NUMBER_OF_KINDS <= (kFlagsKindMask >> kFlagsKindShift)+1);
2708  // Make sure that all call stubs have an arguments count.
2709  ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
2710          ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
2711         ExtractArgumentsCountFromFlags(flags) >= 0);
2712  WRITE_INT_FIELD(this, kFlagsOffset, flags);
2713}
2714
2715
2716Code::Kind Code::kind() {
2717  return ExtractKindFromFlags(flags());
2718}
2719
2720
2721InLoopFlag Code::ic_in_loop() {
2722  return ExtractICInLoopFromFlags(flags());
2723}
2724
2725
2726InlineCacheState Code::ic_state() {
2727  InlineCacheState result = ExtractICStateFromFlags(flags());
2728  // Only allow uninitialized or debugger states for non-IC code
2729  // objects. This is used in the debugger to determine whether or not
2730  // a call to code object has been replaced with a debug break call.
2731  ASSERT(is_inline_cache_stub() ||
2732         result == UNINITIALIZED ||
2733         result == DEBUG_BREAK ||
2734         result == DEBUG_PREPARE_STEP_IN);
2735  return result;
2736}
2737
2738
2739Code::ExtraICState Code::extra_ic_state() {
2740  ASSERT(is_inline_cache_stub());
2741  return ExtractExtraICStateFromFlags(flags());
2742}
2743
2744
2745PropertyType Code::type() {
2746  return ExtractTypeFromFlags(flags());
2747}
2748
2749
2750int Code::arguments_count() {
2751  ASSERT(is_call_stub() || is_keyed_call_stub() || kind() == STUB);
2752  return ExtractArgumentsCountFromFlags(flags());
2753}
2754
2755
2756int Code::major_key() {
2757  ASSERT(kind() == STUB ||
2758         kind() == UNARY_OP_IC ||
2759         kind() == BINARY_OP_IC ||
2760         kind() == COMPARE_IC);
2761  return READ_BYTE_FIELD(this, kStubMajorKeyOffset);
2762}
2763
2764
2765void Code::set_major_key(int major) {
2766  ASSERT(kind() == STUB ||
2767         kind() == UNARY_OP_IC ||
2768         kind() == BINARY_OP_IC ||
2769         kind() == COMPARE_IC);
2770  ASSERT(0 <= major && major < 256);
2771  WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major);
2772}
2773
2774
2775bool Code::optimizable() {
2776  ASSERT(kind() == FUNCTION);
2777  return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
2778}
2779
2780
2781void Code::set_optimizable(bool value) {
2782  ASSERT(kind() == FUNCTION);
2783  WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
2784}
2785
2786
2787bool Code::has_deoptimization_support() {
2788  ASSERT(kind() == FUNCTION);
2789  return READ_BYTE_FIELD(this, kHasDeoptimizationSupportOffset) == 1;
2790}
2791
2792
2793void Code::set_has_deoptimization_support(bool value) {
2794  ASSERT(kind() == FUNCTION);
2795  WRITE_BYTE_FIELD(this, kHasDeoptimizationSupportOffset, value ? 1 : 0);
2796}
2797
2798
2799int Code::allow_osr_at_loop_nesting_level() {
2800  ASSERT(kind() == FUNCTION);
2801  return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
2802}
2803
2804
2805void Code::set_allow_osr_at_loop_nesting_level(int level) {
2806  ASSERT(kind() == FUNCTION);
2807  ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
2808  WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
2809}
2810
2811
2812unsigned Code::stack_slots() {
2813  ASSERT(kind() == OPTIMIZED_FUNCTION);
2814  return READ_UINT32_FIELD(this, kStackSlotsOffset);
2815}
2816
2817
2818void Code::set_stack_slots(unsigned slots) {
2819  ASSERT(kind() == OPTIMIZED_FUNCTION);
2820  WRITE_UINT32_FIELD(this, kStackSlotsOffset, slots);
2821}
2822
2823
2824unsigned Code::safepoint_table_offset() {
2825  ASSERT(kind() == OPTIMIZED_FUNCTION);
2826  return READ_UINT32_FIELD(this, kSafepointTableOffsetOffset);
2827}
2828
2829
2830void Code::set_safepoint_table_offset(unsigned offset) {
2831  ASSERT(kind() == OPTIMIZED_FUNCTION);
2832  ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
2833  WRITE_UINT32_FIELD(this, kSafepointTableOffsetOffset, offset);
2834}
2835
2836
2837unsigned Code::stack_check_table_offset() {
2838  ASSERT(kind() == FUNCTION);
2839  return READ_UINT32_FIELD(this, kStackCheckTableOffsetOffset);
2840}
2841
2842
2843void Code::set_stack_check_table_offset(unsigned offset) {
2844  ASSERT(kind() == FUNCTION);
2845  ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
2846  WRITE_UINT32_FIELD(this, kStackCheckTableOffsetOffset, offset);
2847}
2848
2849
2850CheckType Code::check_type() {
2851  ASSERT(is_call_stub() || is_keyed_call_stub());
2852  byte type = READ_BYTE_FIELD(this, kCheckTypeOffset);
2853  return static_cast<CheckType>(type);
2854}
2855
2856
2857void Code::set_check_type(CheckType value) {
2858  ASSERT(is_call_stub() || is_keyed_call_stub());
2859  WRITE_BYTE_FIELD(this, kCheckTypeOffset, value);
2860}
2861
2862
2863byte Code::unary_op_type() {
2864  ASSERT(is_unary_op_stub());
2865  return READ_BYTE_FIELD(this, kUnaryOpTypeOffset);
2866}
2867
2868
2869void Code::set_unary_op_type(byte value) {
2870  ASSERT(is_unary_op_stub());
2871  WRITE_BYTE_FIELD(this, kUnaryOpTypeOffset, value);
2872}
2873
2874
2875byte Code::binary_op_type() {
2876  ASSERT(is_binary_op_stub());
2877  return READ_BYTE_FIELD(this, kBinaryOpTypeOffset);
2878}
2879
2880
2881void Code::set_binary_op_type(byte value) {
2882  ASSERT(is_binary_op_stub());
2883  WRITE_BYTE_FIELD(this, kBinaryOpTypeOffset, value);
2884}
2885
2886
2887byte Code::binary_op_result_type() {
2888  ASSERT(is_binary_op_stub());
2889  return READ_BYTE_FIELD(this, kBinaryOpReturnTypeOffset);
2890}
2891
2892
2893void Code::set_binary_op_result_type(byte value) {
2894  ASSERT(is_binary_op_stub());
2895  WRITE_BYTE_FIELD(this, kBinaryOpReturnTypeOffset, value);
2896}
2897
2898
2899byte Code::compare_state() {
2900  ASSERT(is_compare_ic_stub());
2901  return READ_BYTE_FIELD(this, kCompareStateOffset);
2902}
2903
2904
2905void Code::set_compare_state(byte value) {
2906  ASSERT(is_compare_ic_stub());
2907  WRITE_BYTE_FIELD(this, kCompareStateOffset, value);
2908}
2909
2910
2911bool Code::is_inline_cache_stub() {
2912  Kind kind = this->kind();
2913  return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
2914}
2915
2916
2917Code::Flags Code::ComputeFlags(Kind kind,
2918                               InLoopFlag in_loop,
2919                               InlineCacheState ic_state,
2920                               ExtraICState extra_ic_state,
2921                               PropertyType type,
2922                               int argc,
2923                               InlineCacheHolderFlag holder) {
2924  // Extra IC state is only allowed for call IC stubs or for store IC
2925  // stubs.
2926  ASSERT(extra_ic_state == kNoExtraICState ||
2927         (kind == CALL_IC) ||
2928         (kind == STORE_IC) ||
2929         (kind == KEYED_STORE_IC));
2930  // Compute the bit mask.
2931  int bits = kind << kFlagsKindShift;
2932  if (in_loop) bits |= kFlagsICInLoopMask;
2933  bits |= ic_state << kFlagsICStateShift;
2934  bits |= type << kFlagsTypeShift;
2935  bits |= extra_ic_state << kFlagsExtraICStateShift;
2936  bits |= argc << kFlagsArgumentsCountShift;
2937  if (holder == PROTOTYPE_MAP) bits |= kFlagsCacheInPrototypeMapMask;
2938  // Cast to flags and validate result before returning it.
2939  Flags result = static_cast<Flags>(bits);
2940  ASSERT(ExtractKindFromFlags(result) == kind);
2941  ASSERT(ExtractICStateFromFlags(result) == ic_state);
2942  ASSERT(ExtractICInLoopFromFlags(result) == in_loop);
2943  ASSERT(ExtractTypeFromFlags(result) == type);
2944  ASSERT(ExtractExtraICStateFromFlags(result) == extra_ic_state);
2945  ASSERT(ExtractArgumentsCountFromFlags(result) == argc);
2946  return result;
2947}
2948
2949
2950Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
2951                                          PropertyType type,
2952                                          ExtraICState extra_ic_state,
2953                                          InlineCacheHolderFlag holder,
2954                                          InLoopFlag in_loop,
2955                                          int argc) {
2956  return ComputeFlags(
2957      kind, in_loop, MONOMORPHIC, extra_ic_state, type, argc, holder);
2958}
2959
2960
2961Code::Kind Code::ExtractKindFromFlags(Flags flags) {
2962  int bits = (flags & kFlagsKindMask) >> kFlagsKindShift;
2963  return static_cast<Kind>(bits);
2964}
2965
2966
2967InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
2968  int bits = (flags & kFlagsICStateMask) >> kFlagsICStateShift;
2969  return static_cast<InlineCacheState>(bits);
2970}
2971
2972
2973Code::ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
2974  int bits = (flags & kFlagsExtraICStateMask) >> kFlagsExtraICStateShift;
2975  return static_cast<ExtraICState>(bits);
2976}
2977
2978
2979InLoopFlag Code::ExtractICInLoopFromFlags(Flags flags) {
2980  int bits = (flags & kFlagsICInLoopMask);
2981  return bits != 0 ? IN_LOOP : NOT_IN_LOOP;
2982}
2983
2984
2985PropertyType Code::ExtractTypeFromFlags(Flags flags) {
2986  int bits = (flags & kFlagsTypeMask) >> kFlagsTypeShift;
2987  return static_cast<PropertyType>(bits);
2988}
2989
2990
2991int Code::ExtractArgumentsCountFromFlags(Flags flags) {
2992  return (flags & kFlagsArgumentsCountMask) >> kFlagsArgumentsCountShift;
2993}
2994
2995
2996InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
2997  int bits = (flags & kFlagsCacheInPrototypeMapMask);
2998  return bits != 0 ? PROTOTYPE_MAP : OWN_MAP;
2999}
3000
3001
3002Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
3003  int bits = flags & ~kFlagsTypeMask;
3004  return static_cast<Flags>(bits);
3005}
3006
3007
3008Code* Code::GetCodeFromTargetAddress(Address address) {
3009  HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
3010  // GetCodeFromTargetAddress might be called when marking objects during mark
3011  // sweep. reinterpret_cast is therefore used instead of the more appropriate
3012  // Code::cast. Code::cast does not work when the object's map is
3013  // marked.
3014  Code* result = reinterpret_cast<Code*>(code);
3015  return result;
3016}
3017
3018
3019Isolate* Map::isolate() {
3020  return heap()->isolate();
3021}
3022
3023
3024Heap* Map::heap() {
3025  // NOTE: address() helper is not used to save one instruction.
3026  Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap_;
3027  ASSERT(heap != NULL);
3028  ASSERT(heap->isolate() == Isolate::Current());
3029  return heap;
3030}
3031
3032
3033Heap* Code::heap() {
3034  // NOTE: address() helper is not used to save one instruction.
3035  Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap_;
3036  ASSERT(heap != NULL);
3037  ASSERT(heap->isolate() == Isolate::Current());
3038  return heap;
3039}
3040
3041
3042Isolate* Code::isolate() {
3043  return heap()->isolate();
3044}
3045
3046
3047Heap* JSGlobalPropertyCell::heap() {
3048  // NOTE: address() helper is not used to save one instruction.
3049  Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap_;
3050  ASSERT(heap != NULL);
3051  ASSERT(heap->isolate() == Isolate::Current());
3052  return heap;
3053}
3054
3055
3056Isolate* JSGlobalPropertyCell::isolate() {
3057  return heap()->isolate();
3058}
3059
3060
3061Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
3062  return HeapObject::
3063      FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
3064}
3065
3066
3067Object* Map::prototype() {
3068  return READ_FIELD(this, kPrototypeOffset);
3069}
3070
3071
3072void Map::set_prototype(Object* value, WriteBarrierMode mode) {
3073  ASSERT(value->IsNull() || value->IsJSReceiver());
3074  WRITE_FIELD(this, kPrototypeOffset, value);
3075  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, mode);
3076}
3077
3078
3079MaybeObject* Map::GetFastElementsMap() {
3080  if (has_fast_elements()) return this;
3081  Object* obj;
3082  { MaybeObject* maybe_obj = CopyDropTransitions();
3083    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3084  }
3085  Map* new_map = Map::cast(obj);
3086  new_map->set_elements_kind(JSObject::FAST_ELEMENTS);
3087  isolate()->counters()->map_to_fast_elements()->Increment();
3088  return new_map;
3089}
3090
3091
3092MaybeObject* Map::GetFastDoubleElementsMap() {
3093  if (has_fast_double_elements()) return this;
3094  Object* obj;
3095  { MaybeObject* maybe_obj = CopyDropTransitions();
3096    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3097  }
3098  Map* new_map = Map::cast(obj);
3099  new_map->set_elements_kind(JSObject::FAST_DOUBLE_ELEMENTS);
3100  isolate()->counters()->map_to_fast_double_elements()->Increment();
3101  return new_map;
3102}
3103
3104
3105MaybeObject* Map::GetSlowElementsMap() {
3106  if (!has_fast_elements() && !has_fast_double_elements()) return this;
3107  Object* obj;
3108  { MaybeObject* maybe_obj = CopyDropTransitions();
3109    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3110  }
3111  Map* new_map = Map::cast(obj);
3112  new_map->set_elements_kind(JSObject::DICTIONARY_ELEMENTS);
3113  isolate()->counters()->map_to_slow_elements()->Increment();
3114  return new_map;
3115}
3116
3117
3118DescriptorArray* Map::instance_descriptors() {
3119  Object* object = READ_FIELD(this, kInstanceDescriptorsOrBitField3Offset);
3120  if (object->IsSmi()) {
3121    return HEAP->empty_descriptor_array();
3122  } else {
3123    return DescriptorArray::cast(object);
3124  }
3125}
3126
3127
3128void Map::init_instance_descriptors() {
3129  WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, Smi::FromInt(0));
3130}
3131
3132
3133void Map::clear_instance_descriptors() {
3134  Object* object = READ_FIELD(this,
3135                              kInstanceDescriptorsOrBitField3Offset);
3136  if (!object->IsSmi()) {
3137    WRITE_FIELD(
3138        this,
3139        kInstanceDescriptorsOrBitField3Offset,
3140        Smi::FromInt(DescriptorArray::cast(object)->bit_field3_storage()));
3141  }
3142}
3143
3144
3145void Map::set_instance_descriptors(DescriptorArray* value,
3146                                   WriteBarrierMode mode) {
3147  Object* object = READ_FIELD(this,
3148                              kInstanceDescriptorsOrBitField3Offset);
3149  if (value == isolate()->heap()->empty_descriptor_array()) {
3150    clear_instance_descriptors();
3151    return;
3152  } else {
3153    if (object->IsSmi()) {
3154      value->set_bit_field3_storage(Smi::cast(object)->value());
3155    } else {
3156      value->set_bit_field3_storage(
3157          DescriptorArray::cast(object)->bit_field3_storage());
3158    }
3159  }
3160  ASSERT(!is_shared());
3161  WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, value);
3162  CONDITIONAL_WRITE_BARRIER(GetHeap(),
3163                            this,
3164                            kInstanceDescriptorsOrBitField3Offset,
3165                            mode);
3166}
3167
3168
3169int Map::bit_field3() {
3170  Object* object = READ_FIELD(this,
3171                              kInstanceDescriptorsOrBitField3Offset);
3172  if (object->IsSmi()) {
3173    return Smi::cast(object)->value();
3174  } else {
3175    return DescriptorArray::cast(object)->bit_field3_storage();
3176  }
3177}
3178
3179
3180void Map::set_bit_field3(int value) {
3181  ASSERT(Smi::IsValid(value));
3182  Object* object = READ_FIELD(this,
3183                              kInstanceDescriptorsOrBitField3Offset);
3184  if (object->IsSmi()) {
3185    WRITE_FIELD(this,
3186                kInstanceDescriptorsOrBitField3Offset,
3187                Smi::FromInt(value));
3188  } else {
3189    DescriptorArray::cast(object)->set_bit_field3_storage(value);
3190  }
3191}
3192
3193
3194ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
3195ACCESSORS(Map, prototype_transitions, FixedArray, kPrototypeTransitionsOffset)
3196ACCESSORS(Map, constructor, Object, kConstructorOffset)
3197
3198ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
3199ACCESSORS(JSFunction, literals, FixedArray, kLiteralsOffset)
3200ACCESSORS_GCSAFE(JSFunction, next_function_link, Object,
3201                 kNextFunctionLinkOffset)
3202
3203ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
3204ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
3205ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
3206
3207ACCESSORS(JSGlobalProxy, context, Object, kContextOffset)
3208
3209ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
3210ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
3211ACCESSORS(AccessorInfo, data, Object, kDataOffset)
3212ACCESSORS(AccessorInfo, name, Object, kNameOffset)
3213ACCESSORS(AccessorInfo, flag, Smi, kFlagOffset)
3214
3215ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
3216ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
3217ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
3218
3219ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
3220ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
3221ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
3222ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
3223ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
3224ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
3225
3226ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
3227ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
3228
3229ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
3230ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
3231
3232ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
3233ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
3234ACCESSORS(FunctionTemplateInfo, property_accessors, Object,
3235          kPropertyAccessorsOffset)
3236ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
3237          kPrototypeTemplateOffset)
3238ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
3239ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
3240          kNamedPropertyHandlerOffset)
3241ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
3242          kIndexedPropertyHandlerOffset)
3243ACCESSORS(FunctionTemplateInfo, instance_template, Object,
3244          kInstanceTemplateOffset)
3245ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
3246ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
3247ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
3248          kInstanceCallHandlerOffset)
3249ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
3250          kAccessCheckInfoOffset)
3251ACCESSORS(FunctionTemplateInfo, flag, Smi, kFlagOffset)
3252ACCESSORS(FunctionTemplateInfo, prototype_attributes, Smi,
3253          kPrototypeAttributesOffset)
3254
3255ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
3256ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
3257          kInternalFieldCountOffset)
3258
3259ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
3260ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
3261
3262ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
3263
3264ACCESSORS(Script, source, Object, kSourceOffset)
3265ACCESSORS(Script, name, Object, kNameOffset)
3266ACCESSORS(Script, id, Object, kIdOffset)
3267ACCESSORS(Script, line_offset, Smi, kLineOffsetOffset)
3268ACCESSORS(Script, column_offset, Smi, kColumnOffsetOffset)
3269ACCESSORS(Script, data, Object, kDataOffset)
3270ACCESSORS(Script, context_data, Object, kContextOffset)
3271ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
3272ACCESSORS(Script, type, Smi, kTypeOffset)
3273ACCESSORS(Script, compilation_type, Smi, kCompilationTypeOffset)
3274ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
3275ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
3276ACCESSORS(Script, eval_from_instructions_offset, Smi,
3277          kEvalFrominstructionsOffsetOffset)
3278
3279#ifdef ENABLE_DEBUGGER_SUPPORT
3280ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
3281ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
3282ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
3283ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
3284
3285ACCESSORS(BreakPointInfo, code_position, Smi, kCodePositionIndex)
3286ACCESSORS(BreakPointInfo, source_position, Smi, kSourcePositionIndex)
3287ACCESSORS(BreakPointInfo, statement_position, Smi, kStatementPositionIndex)
3288ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
3289#endif
3290
3291ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
3292ACCESSORS_GCSAFE(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
3293ACCESSORS_GCSAFE(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
3294ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
3295          kInstanceClassNameOffset)
3296ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
3297ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
3298ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
3299ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
3300ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
3301          kThisPropertyAssignmentsOffset)
3302
3303BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
3304               kHiddenPrototypeBit)
3305BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
3306BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
3307               kNeedsAccessCheckBit)
3308BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
3309               kIsExpressionBit)
3310BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
3311               kIsTopLevelBit)
3312BOOL_GETTER(SharedFunctionInfo,
3313            compiler_hints,
3314            has_only_simple_this_property_assignments,
3315            kHasOnlySimpleThisPropertyAssignments)
3316BOOL_ACCESSORS(SharedFunctionInfo,
3317               compiler_hints,
3318               allows_lazy_compilation,
3319               kAllowLazyCompilation)
3320BOOL_ACCESSORS(SharedFunctionInfo,
3321               compiler_hints,
3322               uses_arguments,
3323               kUsesArguments)
3324BOOL_ACCESSORS(SharedFunctionInfo,
3325               compiler_hints,
3326               has_duplicate_parameters,
3327               kHasDuplicateParameters)
3328
3329
3330#if V8_HOST_ARCH_32_BIT
3331SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
3332SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
3333              kFormalParameterCountOffset)
3334SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
3335              kExpectedNofPropertiesOffset)
3336SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3337SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
3338              kStartPositionAndTypeOffset)
3339SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
3340SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
3341              kFunctionTokenPositionOffset)
3342SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
3343              kCompilerHintsOffset)
3344SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
3345              kThisPropertyAssignmentsCountOffset)
3346SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
3347#else
3348
3349#define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset)             \
3350  STATIC_ASSERT(holder::offset % kPointerSize == 0);              \
3351  int holder::name() {                                            \
3352    int value = READ_INT_FIELD(this, offset);                     \
3353    ASSERT(kHeapObjectTag == 1);                                  \
3354    ASSERT((value & kHeapObjectTag) == 0);                        \
3355    return value >> 1;                                            \
3356  }                                                               \
3357  void holder::set_##name(int value) {                            \
3358    ASSERT(kHeapObjectTag == 1);                                  \
3359    ASSERT((value & 0xC0000000) == 0xC0000000 ||                  \
3360           (value & 0xC0000000) == 0x000000000);                  \
3361    WRITE_INT_FIELD(this,                                         \
3362                    offset,                                       \
3363                    (value << 1) & ~kHeapObjectTag);              \
3364  }
3365
3366#define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset)             \
3367  STATIC_ASSERT(holder::offset % kPointerSize == kIntSize);       \
3368  INT_ACCESSORS(holder, name, offset)
3369
3370
3371PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
3372PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3373                        formal_parameter_count,
3374                        kFormalParameterCountOffset)
3375
3376PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3377                        expected_nof_properties,
3378                        kExpectedNofPropertiesOffset)
3379PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3380
3381PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
3382PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3383                        start_position_and_type,
3384                        kStartPositionAndTypeOffset)
3385
3386PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3387                        function_token_position,
3388                        kFunctionTokenPositionOffset)
3389PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3390                        compiler_hints,
3391                        kCompilerHintsOffset)
3392
3393PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3394                        this_property_assignments_count,
3395                        kThisPropertyAssignmentsCountOffset)
3396PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
3397#endif
3398
3399
3400int SharedFunctionInfo::construction_count() {
3401  return READ_BYTE_FIELD(this, kConstructionCountOffset);
3402}
3403
3404
3405void SharedFunctionInfo::set_construction_count(int value) {
3406  ASSERT(0 <= value && value < 256);
3407  WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
3408}
3409
3410
3411BOOL_ACCESSORS(SharedFunctionInfo,
3412               compiler_hints,
3413               live_objects_may_exist,
3414               kLiveObjectsMayExist)
3415
3416
3417bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
3418  return initial_map() != HEAP->undefined_value();
3419}
3420
3421
3422BOOL_GETTER(SharedFunctionInfo,
3423            compiler_hints,
3424            optimization_disabled,
3425            kOptimizationDisabled)
3426
3427
3428void SharedFunctionInfo::set_optimization_disabled(bool disable) {
3429  set_compiler_hints(BooleanBit::set(compiler_hints(),
3430                                     kOptimizationDisabled,
3431                                     disable));
3432  // If disabling optimizations we reflect that in the code object so
3433  // it will not be counted as optimizable code.
3434  if ((code()->kind() == Code::FUNCTION) && disable) {
3435    code()->set_optimizable(false);
3436  }
3437}
3438
3439
3440BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, strict_mode,
3441               kStrictModeFunction)
3442BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
3443BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
3444               name_should_print_as_anonymous,
3445               kNameShouldPrintAsAnonymous)
3446BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
3447BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
3448
3449ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
3450ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
3451
3452ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
3453
3454bool Script::HasValidSource() {
3455  Object* src = this->source();
3456  if (!src->IsString()) return true;
3457  String* src_str = String::cast(src);
3458  if (!StringShape(src_str).IsExternal()) return true;
3459  if (src_str->IsAsciiRepresentation()) {
3460    return ExternalAsciiString::cast(src)->resource() != NULL;
3461  } else if (src_str->IsTwoByteRepresentation()) {
3462    return ExternalTwoByteString::cast(src)->resource() != NULL;
3463  }
3464  return true;
3465}
3466
3467
3468void SharedFunctionInfo::DontAdaptArguments() {
3469  ASSERT(code()->kind() == Code::BUILTIN);
3470  set_formal_parameter_count(kDontAdaptArgumentsSentinel);
3471}
3472
3473
3474int SharedFunctionInfo::start_position() {
3475  return start_position_and_type() >> kStartPositionShift;
3476}
3477
3478
3479void SharedFunctionInfo::set_start_position(int start_position) {
3480  set_start_position_and_type((start_position << kStartPositionShift)
3481    | (start_position_and_type() & ~kStartPositionMask));
3482}
3483
3484
3485Code* SharedFunctionInfo::code() {
3486  return Code::cast(READ_FIELD(this, kCodeOffset));
3487}
3488
3489
3490Code* SharedFunctionInfo::unchecked_code() {
3491  return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
3492}
3493
3494
3495void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
3496  WRITE_FIELD(this, kCodeOffset, value);
3497  ASSERT(!Isolate::Current()->heap()->InNewSpace(value));
3498}
3499
3500
3501SerializedScopeInfo* SharedFunctionInfo::scope_info() {
3502  return reinterpret_cast<SerializedScopeInfo*>(
3503      READ_FIELD(this, kScopeInfoOffset));
3504}
3505
3506
3507void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value,
3508                                        WriteBarrierMode mode) {
3509  WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
3510  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kScopeInfoOffset, mode);
3511}
3512
3513
3514Smi* SharedFunctionInfo::deopt_counter() {
3515  return reinterpret_cast<Smi*>(READ_FIELD(this, kDeoptCounterOffset));
3516}
3517
3518
3519void SharedFunctionInfo::set_deopt_counter(Smi* value) {
3520  WRITE_FIELD(this, kDeoptCounterOffset, value);
3521}
3522
3523
3524bool SharedFunctionInfo::is_compiled() {
3525  return code() !=
3526      Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);
3527}
3528
3529
3530bool SharedFunctionInfo::IsApiFunction() {
3531  return function_data()->IsFunctionTemplateInfo();
3532}
3533
3534
3535FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
3536  ASSERT(IsApiFunction());
3537  return FunctionTemplateInfo::cast(function_data());
3538}
3539
3540
3541bool SharedFunctionInfo::HasBuiltinFunctionId() {
3542  return function_data()->IsSmi();
3543}
3544
3545
3546BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
3547  ASSERT(HasBuiltinFunctionId());
3548  return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
3549}
3550
3551
3552int SharedFunctionInfo::code_age() {
3553  return (compiler_hints() >> kCodeAgeShift) & kCodeAgeMask;
3554}
3555
3556
3557void SharedFunctionInfo::set_code_age(int code_age) {
3558  set_compiler_hints(compiler_hints() |
3559                     ((code_age & kCodeAgeMask) << kCodeAgeShift));
3560}
3561
3562
3563bool SharedFunctionInfo::has_deoptimization_support() {
3564  Code* code = this->code();
3565  return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
3566}
3567
3568
3569bool JSFunction::IsBuiltin() {
3570  return context()->global()->IsJSBuiltinsObject();
3571}
3572
3573
3574bool JSFunction::NeedsArgumentsAdaption() {
3575  return shared()->formal_parameter_count() !=
3576      SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3577}
3578
3579
3580bool JSFunction::IsOptimized() {
3581  return code()->kind() == Code::OPTIMIZED_FUNCTION;
3582}
3583
3584
3585bool JSFunction::IsOptimizable() {
3586  return code()->kind() == Code::FUNCTION && code()->optimizable();
3587}
3588
3589
3590bool JSFunction::IsMarkedForLazyRecompilation() {
3591  return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
3592}
3593
3594
3595Code* JSFunction::code() {
3596  return Code::cast(unchecked_code());
3597}
3598
3599
3600Code* JSFunction::unchecked_code() {
3601  return reinterpret_cast<Code*>(
3602      Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
3603}
3604
3605
3606void JSFunction::set_code(Code* value) {
3607  // Skip the write barrier because code is never in new space.
3608  ASSERT(!HEAP->InNewSpace(value));
3609  Address entry = value->entry();
3610  WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
3611}
3612
3613
3614void JSFunction::ReplaceCode(Code* code) {
3615  bool was_optimized = IsOptimized();
3616  bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
3617
3618  set_code(code);
3619
3620  // Add/remove the function from the list of optimized functions for this
3621  // context based on the state change.
3622  if (!was_optimized && is_optimized) {
3623    context()->global_context()->AddOptimizedFunction(this);
3624  }
3625  if (was_optimized && !is_optimized) {
3626    context()->global_context()->RemoveOptimizedFunction(this);
3627  }
3628}
3629
3630
3631Context* JSFunction::context() {
3632  return Context::cast(READ_FIELD(this, kContextOffset));
3633}
3634
3635
3636Object* JSFunction::unchecked_context() {
3637  return READ_FIELD(this, kContextOffset);
3638}
3639
3640
3641SharedFunctionInfo* JSFunction::unchecked_shared() {
3642  return reinterpret_cast<SharedFunctionInfo*>(
3643      READ_FIELD(this, kSharedFunctionInfoOffset));
3644}
3645
3646
3647void JSFunction::set_context(Object* value) {
3648  ASSERT(value->IsUndefined() || value->IsContext());
3649  WRITE_FIELD(this, kContextOffset, value);
3650  WRITE_BARRIER(this, kContextOffset);
3651}
3652
3653ACCESSORS(JSFunction, prototype_or_initial_map, Object,
3654          kPrototypeOrInitialMapOffset)
3655
3656
3657Map* JSFunction::initial_map() {
3658  return Map::cast(prototype_or_initial_map());
3659}
3660
3661
3662void JSFunction::set_initial_map(Map* value) {
3663  set_prototype_or_initial_map(value);
3664}
3665
3666
3667bool JSFunction::has_initial_map() {
3668  return prototype_or_initial_map()->IsMap();
3669}
3670
3671
3672bool JSFunction::has_instance_prototype() {
3673  return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
3674}
3675
3676
3677bool JSFunction::has_prototype() {
3678  return map()->has_non_instance_prototype() || has_instance_prototype();
3679}
3680
3681
3682Object* JSFunction::instance_prototype() {
3683  ASSERT(has_instance_prototype());
3684  if (has_initial_map()) return initial_map()->prototype();
3685  // When there is no initial map and the prototype is a JSObject, the
3686  // initial map field is used for the prototype field.
3687  return prototype_or_initial_map();
3688}
3689
3690
3691Object* JSFunction::prototype() {
3692  ASSERT(has_prototype());
3693  // If the function's prototype property has been set to a non-JSObject
3694  // value, that value is stored in the constructor field of the map.
3695  if (map()->has_non_instance_prototype()) return map()->constructor();
3696  return instance_prototype();
3697}
3698
3699bool JSFunction::should_have_prototype() {
3700  return map()->function_with_prototype();
3701}
3702
3703
3704bool JSFunction::is_compiled() {
3705  return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
3706}
3707
3708
3709int JSFunction::NumberOfLiterals() {
3710  return literals()->length();
3711}
3712
3713
3714Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
3715  ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
3716  return READ_FIELD(this, OffsetOfFunctionWithId(id));
3717}
3718
3719
3720void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
3721                                              Object* value) {
3722  ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
3723  WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
3724  WRITE_BARRIER(this, OffsetOfFunctionWithId(id));
3725}
3726
3727
3728Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
3729  ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
3730  return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
3731}
3732
3733
3734void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
3735                                                   Code* value) {
3736  ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
3737  WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
3738  ASSERT(!HEAP->InNewSpace(value));
3739}
3740
3741
3742ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
3743ACCESSORS(JSProxy, padding, Object, kPaddingOffset)
3744
3745
3746Address Foreign::address() {
3747  return AddressFrom<Address>(READ_INTPTR_FIELD(this, kAddressOffset));
3748}
3749
3750
3751void Foreign::set_address(Address value) {
3752  WRITE_INTPTR_FIELD(this, kAddressOffset, OffsetFrom(value));
3753}
3754
3755
3756ACCESSORS(JSValue, value, Object, kValueOffset)
3757
3758
3759JSValue* JSValue::cast(Object* obj) {
3760  ASSERT(obj->IsJSValue());
3761  ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
3762  return reinterpret_cast<JSValue*>(obj);
3763}
3764
3765
3766ACCESSORS(JSMessageObject, type, String, kTypeOffset)
3767ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
3768ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
3769ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
3770ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
3771SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
3772SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
3773
3774
3775JSMessageObject* JSMessageObject::cast(Object* obj) {
3776  ASSERT(obj->IsJSMessageObject());
3777  ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
3778  return reinterpret_cast<JSMessageObject*>(obj);
3779}
3780
3781
3782INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
3783ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
3784ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
3785ACCESSORS(Code, next_code_flushing_candidate,
3786          Object, kNextCodeFlushingCandidateOffset)
3787
3788
3789byte* Code::instruction_start()  {
3790  return FIELD_ADDR(this, kHeaderSize);
3791}
3792
3793
3794byte* Code::instruction_end()  {
3795  return instruction_start() + instruction_size();
3796}
3797
3798
3799int Code::body_size() {
3800  return RoundUp(instruction_size(), kObjectAlignment);
3801}
3802
3803
3804FixedArray* Code::unchecked_deoptimization_data() {
3805  return reinterpret_cast<FixedArray*>(
3806      READ_FIELD(this, kDeoptimizationDataOffset));
3807}
3808
3809
3810ByteArray* Code::unchecked_relocation_info() {
3811  return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
3812}
3813
3814
3815byte* Code::relocation_start() {
3816  return unchecked_relocation_info()->GetDataStartAddress();
3817}
3818
3819
3820int Code::relocation_size() {
3821  return unchecked_relocation_info()->length();
3822}
3823
3824
3825byte* Code::entry() {
3826  return instruction_start();
3827}
3828
3829
3830bool Code::contains(byte* pc) {
3831  return (instruction_start() <= pc) &&
3832      (pc <= instruction_start() + instruction_size());
3833}
3834
3835
3836ACCESSORS(JSArray, length, Object, kLengthOffset)
3837
3838
3839ACCESSORS(JSRegExp, data, Object, kDataOffset)
3840
3841
3842JSRegExp::Type JSRegExp::TypeTag() {
3843  Object* data = this->data();
3844  if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
3845  Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
3846  return static_cast<JSRegExp::Type>(smi->value());
3847}
3848
3849
3850JSRegExp::Type JSRegExp::TypeTagUnchecked() {
3851  Smi* smi = Smi::cast(DataAtUnchecked(kTagIndex));
3852  return static_cast<JSRegExp::Type>(smi->value());
3853}
3854
3855
3856int JSRegExp::CaptureCount() {
3857  switch (TypeTag()) {
3858    case ATOM:
3859      return 0;
3860    case IRREGEXP:
3861      return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
3862    default:
3863      UNREACHABLE();
3864      return -1;
3865  }
3866}
3867
3868
3869JSRegExp::Flags JSRegExp::GetFlags() {
3870  ASSERT(this->data()->IsFixedArray());
3871  Object* data = this->data();
3872  Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
3873  return Flags(smi->value());
3874}
3875
3876
3877String* JSRegExp::Pattern() {
3878  ASSERT(this->data()->IsFixedArray());
3879  Object* data = this->data();
3880  String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
3881  return pattern;
3882}
3883
3884
3885Object* JSRegExp::DataAt(int index) {
3886  ASSERT(TypeTag() != NOT_COMPILED);
3887  return FixedArray::cast(data())->get(index);
3888}
3889
3890
3891Object* JSRegExp::DataAtUnchecked(int index) {
3892  FixedArray* fa = reinterpret_cast<FixedArray*>(data());
3893  int offset = FixedArray::kHeaderSize + index * kPointerSize;
3894  return READ_FIELD(fa, offset);
3895}
3896
3897
3898void JSRegExp::SetDataAt(int index, Object* value) {
3899  ASSERT(TypeTag() != NOT_COMPILED);
3900  ASSERT(index >= kDataIndex);  // Only implementation data can be set this way.
3901  FixedArray::cast(data())->set(index, value);
3902}
3903
3904
3905void JSRegExp::SetDataAtUnchecked(int index, Object* value, Heap* heap) {
3906  ASSERT(index >= kDataIndex);  // Only implementation data can be set this way.
3907  FixedArray* fa = reinterpret_cast<FixedArray*>(data());
3908  if (value->IsSmi()) {
3909    fa->set_unchecked(index, Smi::cast(value));
3910  } else {
3911    fa->set_unchecked(heap, index, value, SKIP_WRITE_BARRIER);
3912  }
3913}
3914
3915
3916JSObject::ElementsKind JSObject::GetElementsKind() {
3917  ElementsKind kind = map()->elements_kind();
3918  ASSERT((kind == FAST_ELEMENTS &&
3919          (elements()->map() == GetHeap()->fixed_array_map() ||
3920           elements()->map() == GetHeap()->fixed_cow_array_map())) ||
3921         (kind == FAST_DOUBLE_ELEMENTS &&
3922          elements()->IsFixedDoubleArray()) ||
3923         (kind == DICTIONARY_ELEMENTS &&
3924          elements()->IsFixedArray() &&
3925          elements()->IsDictionary()) ||
3926         (kind > DICTIONARY_ELEMENTS));
3927  return kind;
3928}
3929
3930
3931bool JSObject::HasFastElements() {
3932  return GetElementsKind() == FAST_ELEMENTS;
3933}
3934
3935
3936bool JSObject::HasFastDoubleElements() {
3937  return GetElementsKind() == FAST_DOUBLE_ELEMENTS;
3938}
3939
3940
3941bool JSObject::HasDictionaryElements() {
3942  return GetElementsKind() == DICTIONARY_ELEMENTS;
3943}
3944
3945
3946bool JSObject::HasExternalArrayElements() {
3947  HeapObject* array = elements();
3948  ASSERT(array != NULL);
3949  return array->IsExternalArray();
3950}
3951
3952
3953#define EXTERNAL_ELEMENTS_CHECK(name, type)          \
3954bool JSObject::HasExternal##name##Elements() {       \
3955  HeapObject* array = elements();                    \
3956  ASSERT(array != NULL);                             \
3957  if (!array->IsHeapObject())                        \
3958    return false;                                    \
3959  return array->map()->instance_type() == type;      \
3960}
3961
3962
3963EXTERNAL_ELEMENTS_CHECK(Byte, EXTERNAL_BYTE_ARRAY_TYPE)
3964EXTERNAL_ELEMENTS_CHECK(UnsignedByte, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
3965EXTERNAL_ELEMENTS_CHECK(Short, EXTERNAL_SHORT_ARRAY_TYPE)
3966EXTERNAL_ELEMENTS_CHECK(UnsignedShort,
3967                        EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
3968EXTERNAL_ELEMENTS_CHECK(Int, EXTERNAL_INT_ARRAY_TYPE)
3969EXTERNAL_ELEMENTS_CHECK(UnsignedInt,
3970                        EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
3971EXTERNAL_ELEMENTS_CHECK(Float,
3972                        EXTERNAL_FLOAT_ARRAY_TYPE)
3973EXTERNAL_ELEMENTS_CHECK(Double,
3974                        EXTERNAL_DOUBLE_ARRAY_TYPE)
3975EXTERNAL_ELEMENTS_CHECK(Pixel, EXTERNAL_PIXEL_ARRAY_TYPE)
3976
3977
3978bool JSObject::HasNamedInterceptor() {
3979  return map()->has_named_interceptor();
3980}
3981
3982
3983bool JSObject::HasIndexedInterceptor() {
3984  return map()->has_indexed_interceptor();
3985}
3986
3987
3988bool JSObject::AllowsSetElementsLength() {
3989  bool result = elements()->IsFixedArray() ||
3990      elements()->IsFixedDoubleArray();
3991  ASSERT(result == !HasExternalArrayElements());
3992  return result;
3993}
3994
3995
3996MaybeObject* JSObject::EnsureWritableFastElements() {
3997  ASSERT(HasFastElements());
3998  FixedArray* elems = FixedArray::cast(elements());
3999  Isolate* isolate = GetIsolate();
4000  if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
4001  Object* writable_elems;
4002  { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
4003      elems, isolate->heap()->fixed_array_map());
4004    if (!maybe_writable_elems->ToObject(&writable_elems)) {
4005      return maybe_writable_elems;
4006    }
4007  }
4008  set_elements(FixedArray::cast(writable_elems));
4009  isolate->counters()->cow_arrays_converted()->Increment();
4010  return writable_elems;
4011}
4012
4013
4014StringDictionary* JSObject::property_dictionary() {
4015  ASSERT(!HasFastProperties());
4016  return StringDictionary::cast(properties());
4017}
4018
4019
4020NumberDictionary* JSObject::element_dictionary() {
4021  ASSERT(HasDictionaryElements());
4022  return NumberDictionary::cast(elements());
4023}
4024
4025
4026bool String::IsHashFieldComputed(uint32_t field) {
4027  return (field & kHashNotComputedMask) == 0;
4028}
4029
4030
4031bool String::HasHashCode() {
4032  return IsHashFieldComputed(hash_field());
4033}
4034
4035
4036uint32_t String::Hash() {
4037  // Fast case: has hash code already been computed?
4038  uint32_t field = hash_field();
4039  if (IsHashFieldComputed(field)) return field >> kHashShift;
4040  // Slow case: compute hash code and set it.
4041  return ComputeAndSetHash();
4042}
4043
4044
4045StringHasher::StringHasher(int length)
4046  : length_(length),
4047    raw_running_hash_(0),
4048    array_index_(0),
4049    is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
4050    is_first_char_(true),
4051    is_valid_(true) { }
4052
4053
4054bool StringHasher::has_trivial_hash() {
4055  return length_ > String::kMaxHashCalcLength;
4056}
4057
4058
4059void StringHasher::AddCharacter(uc32 c) {
4060  // Use the Jenkins one-at-a-time hash function to update the hash
4061  // for the given character.
4062  raw_running_hash_ += c;
4063  raw_running_hash_ += (raw_running_hash_ << 10);
4064  raw_running_hash_ ^= (raw_running_hash_ >> 6);
4065  // Incremental array index computation.
4066  if (is_array_index_) {
4067    if (c < '0' || c > '9') {
4068      is_array_index_ = false;
4069    } else {
4070      int d = c - '0';
4071      if (is_first_char_) {
4072        is_first_char_ = false;
4073        if (c == '0' && length_ > 1) {
4074          is_array_index_ = false;
4075          return;
4076        }
4077      }
4078      if (array_index_ > 429496729U - ((d + 2) >> 3)) {
4079        is_array_index_ = false;
4080      } else {
4081        array_index_ = array_index_ * 10 + d;
4082      }
4083    }
4084  }
4085}
4086
4087
4088void StringHasher::AddCharacterNoIndex(uc32 c) {
4089  ASSERT(!is_array_index());
4090  raw_running_hash_ += c;
4091  raw_running_hash_ += (raw_running_hash_ << 10);
4092  raw_running_hash_ ^= (raw_running_hash_ >> 6);
4093}
4094
4095
4096uint32_t StringHasher::GetHash() {
4097  // Get the calculated raw hash value and do some more bit ops to distribute
4098  // the hash further. Ensure that we never return zero as the hash value.
4099  uint32_t result = raw_running_hash_;
4100  result += (result << 3);
4101  result ^= (result >> 11);
4102  result += (result << 15);
4103  if (result == 0) {
4104    result = 27;
4105  }
4106  return result;
4107}
4108
4109
4110template <typename schar>
4111uint32_t HashSequentialString(const schar* chars, int length) {
4112  StringHasher hasher(length);
4113  if (!hasher.has_trivial_hash()) {
4114    int i;
4115    for (i = 0; hasher.is_array_index() && (i < length); i++) {
4116      hasher.AddCharacter(chars[i]);
4117    }
4118    for (; i < length; i++) {
4119      hasher.AddCharacterNoIndex(chars[i]);
4120    }
4121  }
4122  return hasher.GetHashField();
4123}
4124
4125
4126bool String::AsArrayIndex(uint32_t* index) {
4127  uint32_t field = hash_field();
4128  if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
4129    return false;
4130  }
4131  return SlowAsArrayIndex(index);
4132}
4133
4134
4135Object* JSReceiver::GetPrototype() {
4136  return HeapObject::cast(this)->map()->prototype();
4137}
4138
4139
4140bool JSReceiver::HasProperty(String* name) {
4141  if (IsJSProxy()) {
4142    return JSProxy::cast(this)->HasPropertyWithHandler(name);
4143  }
4144  return GetPropertyAttribute(name) != ABSENT;
4145}
4146
4147
4148bool JSReceiver::HasLocalProperty(String* name) {
4149  if (IsJSProxy()) {
4150    return JSProxy::cast(this)->HasPropertyWithHandler(name);
4151  }
4152  return GetLocalPropertyAttribute(name) != ABSENT;
4153}
4154
4155
4156PropertyAttributes JSReceiver::GetPropertyAttribute(String* key) {
4157  return GetPropertyAttributeWithReceiver(this, key);
4158}
4159
4160// TODO(504): this may be useful in other places too where JSGlobalProxy
4161// is used.
4162Object* JSObject::BypassGlobalProxy() {
4163  if (IsJSGlobalProxy()) {
4164    Object* proto = GetPrototype();
4165    if (proto->IsNull()) return GetHeap()->undefined_value();
4166    ASSERT(proto->IsJSGlobalObject());
4167    return proto;
4168  }
4169  return this;
4170}
4171
4172
4173bool JSObject::HasHiddenPropertiesObject() {
4174  ASSERT(!IsJSGlobalProxy());
4175  return GetPropertyAttributePostInterceptor(this,
4176                                             GetHeap()->hidden_symbol(),
4177                                             false) != ABSENT;
4178}
4179
4180
4181Object* JSObject::GetHiddenPropertiesObject() {
4182  ASSERT(!IsJSGlobalProxy());
4183  PropertyAttributes attributes;
4184  // You can't install a getter on a property indexed by the hidden symbol,
4185  // so we can be sure that GetLocalPropertyPostInterceptor returns a real
4186  // object.
4187  Object* result =
4188      GetLocalPropertyPostInterceptor(this,
4189                                      GetHeap()->hidden_symbol(),
4190                                      &attributes)->ToObjectUnchecked();
4191  return result;
4192}
4193
4194
4195MaybeObject* JSObject::SetHiddenPropertiesObject(Object* hidden_obj) {
4196  ASSERT(!IsJSGlobalProxy());
4197  return SetPropertyPostInterceptor(GetHeap()->hidden_symbol(),
4198                                    hidden_obj,
4199                                    DONT_ENUM,
4200                                    kNonStrictMode);
4201}
4202
4203
4204bool JSObject::HasElement(uint32_t index) {
4205  return HasElementWithReceiver(this, index);
4206}
4207
4208
4209bool AccessorInfo::all_can_read() {
4210  return BooleanBit::get(flag(), kAllCanReadBit);
4211}
4212
4213
4214void AccessorInfo::set_all_can_read(bool value) {
4215  set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
4216}
4217
4218
4219bool AccessorInfo::all_can_write() {
4220  return BooleanBit::get(flag(), kAllCanWriteBit);
4221}
4222
4223
4224void AccessorInfo::set_all_can_write(bool value) {
4225  set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
4226}
4227
4228
4229bool AccessorInfo::prohibits_overwriting() {
4230  return BooleanBit::get(flag(), kProhibitsOverwritingBit);
4231}
4232
4233
4234void AccessorInfo::set_prohibits_overwriting(bool value) {
4235  set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
4236}
4237
4238
4239PropertyAttributes AccessorInfo::property_attributes() {
4240  return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
4241}
4242
4243
4244void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
4245  ASSERT(AttributesField::is_valid(attributes));
4246  int rest_value = flag()->value() & ~AttributesField::mask();
4247  set_flag(Smi::FromInt(rest_value | AttributesField::encode(attributes)));
4248}
4249
4250
4251template<typename Shape, typename Key>
4252void Dictionary<Shape, Key>::SetEntry(int entry,
4253                                      Object* key,
4254                                      Object* value) {
4255  SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
4256}
4257
4258
4259template<typename Shape, typename Key>
4260void Dictionary<Shape, Key>::SetEntry(int entry,
4261                                      Object* key,
4262                                      Object* value,
4263                                      PropertyDetails details) {
4264  ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0);
4265  int index = HashTable<Shape, Key>::EntryToIndex(entry);
4266  AssertNoAllocation no_gc;
4267  WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
4268  FixedArray::set(index, key, mode);
4269  FixedArray::set(index+1, value, mode);
4270  FixedArray::fast_set(this, index+2, details.AsSmi());
4271}
4272
4273
4274bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
4275  ASSERT(other->IsNumber());
4276  return key == static_cast<uint32_t>(other->Number());
4277}
4278
4279
4280uint32_t NumberDictionaryShape::Hash(uint32_t key) {
4281  return ComputeIntegerHash(key);
4282}
4283
4284
4285uint32_t NumberDictionaryShape::HashForObject(uint32_t key, Object* other) {
4286  ASSERT(other->IsNumber());
4287  return ComputeIntegerHash(static_cast<uint32_t>(other->Number()));
4288}
4289
4290
4291MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) {
4292  return Isolate::Current()->heap()->NumberFromUint32(key);
4293}
4294
4295
4296bool StringDictionaryShape::IsMatch(String* key, Object* other) {
4297  // We know that all entries in a hash table had their hash keys created.
4298  // Use that knowledge to have fast failure.
4299  if (key->Hash() != String::cast(other)->Hash()) return false;
4300  return key->Equals(String::cast(other));
4301}
4302
4303
4304uint32_t StringDictionaryShape::Hash(String* key) {
4305  return key->Hash();
4306}
4307
4308
4309uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) {
4310  return String::cast(other)->Hash();
4311}
4312
4313
4314MaybeObject* StringDictionaryShape::AsObject(String* key) {
4315  return key;
4316}
4317
4318
4319void Map::ClearCodeCache(Heap* heap) {
4320  // No write barrier is needed since empty_fixed_array is not in new space.
4321  // Please note this function is used during marking:
4322  //  - MarkCompactCollector::MarkUnmarkedObject
4323  ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
4324  WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
4325}
4326
4327
4328void JSArray::EnsureSize(int required_size) {
4329  ASSERT(HasFastElements());
4330  FixedArray* elts = FixedArray::cast(elements());
4331  const int kArraySizeThatFitsComfortablyInNewSpace = 128;
4332  if (elts->length() < required_size) {
4333    // Doubling in size would be overkill, but leave some slack to avoid
4334    // constantly growing.
4335    Expand(required_size + (required_size >> 3));
4336    // It's a performance benefit to keep a frequently used array in new-space.
4337  } else if (!GetHeap()->new_space()->Contains(elts) &&
4338             required_size < kArraySizeThatFitsComfortablyInNewSpace) {
4339    // Expand will allocate a new backing store in new space even if the size
4340    // we asked for isn't larger than what we had before.
4341    Expand(required_size);
4342  }
4343}
4344
4345
4346void JSArray::set_length(Smi* length) {
4347  set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
4348}
4349
4350
4351void JSArray::SetContent(FixedArray* storage) {
4352  set_length(Smi::FromInt(storage->length()));
4353  set_elements(storage);
4354}
4355
4356
4357MaybeObject* FixedArray::Copy() {
4358  if (length() == 0) return this;
4359  return GetHeap()->CopyFixedArray(this);
4360}
4361
4362
4363Relocatable::Relocatable(Isolate* isolate) {
4364  ASSERT(isolate == Isolate::Current());
4365  isolate_ = isolate;
4366  prev_ = isolate->relocatable_top();
4367  isolate->set_relocatable_top(this);
4368}
4369
4370
4371Relocatable::~Relocatable() {
4372  ASSERT(isolate_ == Isolate::Current());
4373  ASSERT_EQ(isolate_->relocatable_top(), this);
4374  isolate_->set_relocatable_top(prev_);
4375}
4376
4377
4378int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
4379  return map->instance_size();
4380}
4381
4382
4383void Foreign::ForeignIterateBody(ObjectVisitor* v) {
4384  v->VisitExternalReference(
4385      reinterpret_cast<Address *>(FIELD_ADDR(this, kAddressOffset)));
4386}
4387
4388
4389template<typename StaticVisitor>
4390void Foreign::ForeignIterateBody() {
4391  StaticVisitor::VisitExternalReference(
4392      reinterpret_cast<Address *>(FIELD_ADDR(this, kAddressOffset)));
4393}
4394
4395
4396void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
4397  typedef v8::String::ExternalAsciiStringResource Resource;
4398  v->VisitExternalAsciiString(
4399      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4400}
4401
4402
4403template<typename StaticVisitor>
4404void ExternalAsciiString::ExternalAsciiStringIterateBody() {
4405  typedef v8::String::ExternalAsciiStringResource Resource;
4406  StaticVisitor::VisitExternalAsciiString(
4407      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4408}
4409
4410
4411void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
4412  typedef v8::String::ExternalStringResource Resource;
4413  v->VisitExternalTwoByteString(
4414      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4415}
4416
4417
4418template<typename StaticVisitor>
4419void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
4420  typedef v8::String::ExternalStringResource Resource;
4421  StaticVisitor::VisitExternalTwoByteString(
4422      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4423}
4424
4425#define SLOT_ADDR(obj, offset) \
4426  reinterpret_cast<Object**>((obj)->address() + offset)
4427
4428template<int start_offset, int end_offset, int size>
4429void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
4430    HeapObject* obj,
4431    ObjectVisitor* v) {
4432    v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, end_offset));
4433}
4434
4435
4436template<int start_offset>
4437void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
4438                                                       int object_size,
4439                                                       ObjectVisitor* v) {
4440  v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, object_size));
4441}
4442
4443#undef SLOT_ADDR
4444
4445
4446#undef CAST_ACCESSOR
4447#undef INT_ACCESSORS
4448#undef SMI_ACCESSORS
4449#undef ACCESSORS
4450#undef FIELD_ADDR
4451#undef READ_FIELD
4452#undef WRITE_FIELD
4453#undef WRITE_BARRIER
4454#undef CONDITIONAL_WRITE_BARRIER
4455#undef READ_MEMADDR_FIELD
4456#undef WRITE_MEMADDR_FIELD
4457#undef READ_DOUBLE_FIELD
4458#undef WRITE_DOUBLE_FIELD
4459#undef READ_INT_FIELD
4460#undef WRITE_INT_FIELD
4461#undef READ_SHORT_FIELD
4462#undef WRITE_SHORT_FIELD
4463#undef READ_BYTE_FIELD
4464#undef WRITE_BYTE_FIELD
4465
4466
4467} }  // namespace v8::internal
4468
4469#endif  // V8_OBJECTS_INL_H_
4470