1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4//
5// Review notes:
6//
7// - The use of macros in these inline functions may seem superfluous
8// but it is absolutely needed to make sure gcc generates optimal
9// code. gcc is not happy when attempting to inline too deep.
10//
11
12#ifndef V8_OBJECTS_INL_H_
13#define V8_OBJECTS_INL_H_
14
15#include "src/base/atomicops.h"
16#include "src/base/bits.h"
17#include "src/builtins/builtins.h"
18#include "src/contexts-inl.h"
19#include "src/conversions-inl.h"
20#include "src/factory.h"
21#include "src/feedback-vector-inl.h"
22#include "src/field-index-inl.h"
23#include "src/field-type.h"
24#include "src/handles-inl.h"
25#include "src/heap/heap-inl.h"
26#include "src/heap/heap.h"
27#include "src/isolate-inl.h"
28#include "src/isolate.h"
29#include "src/keys.h"
30#include "src/layout-descriptor-inl.h"
31#include "src/lookup-cache-inl.h"
32#include "src/lookup.h"
33#include "src/objects.h"
34#include "src/objects/literal-objects.h"
35#include "src/objects/module-info.h"
36#include "src/objects/regexp-match-info.h"
37#include "src/objects/scope-info.h"
38#include "src/property.h"
39#include "src/prototype.h"
40#include "src/transitions-inl.h"
41#include "src/v8memory.h"
42
43namespace v8 {
44namespace internal {
45
46PropertyDetails::PropertyDetails(Smi* smi) {
47  value_ = smi->value();
48}
49
50
51Smi* PropertyDetails::AsSmi() const {
52  // Ensure the upper 2 bits have the same value by sign extending it. This is
53  // necessary to be able to use the 31st bit of the property details.
54  int value = value_ << 1;
55  return Smi::FromInt(value >> 1);
56}
57
58
59int PropertyDetails::field_width_in_words() const {
60  DCHECK(location() == kField);
61  if (!FLAG_unbox_double_fields) return 1;
62  if (kDoubleSize == kPointerSize) return 1;
63  return representation().IsDouble() ? kDoubleSize / kPointerSize : 1;
64}
65
66#define INT_ACCESSORS(holder, name, offset)                                   \
67  int holder::name() const { return READ_INT_FIELD(this, offset); }           \
68  void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
69
70#define ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, \
71                           set_condition)                             \
72  type* holder::name() const {                                        \
73    DCHECK(get_condition);                                            \
74    return type::cast(READ_FIELD(this, offset));                      \
75  }                                                                   \
76  void holder::set_##name(type* value, WriteBarrierMode mode) {       \
77    DCHECK(set_condition);                                            \
78    WRITE_FIELD(this, offset, value);                                 \
79    CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);  \
80  }
81#define ACCESSORS_CHECKED(holder, name, type, offset, condition) \
82  ACCESSORS_CHECKED2(holder, name, type, offset, condition, condition)
83
84#define ACCESSORS(holder, name, type, offset) \
85  ACCESSORS_CHECKED(holder, name, type, offset, true)
86
87// Getter that returns a Smi as an int and writes an int as a Smi.
88#define SMI_ACCESSORS_CHECKED(holder, name, offset, condition) \
89  int holder::name() const {                                   \
90    DCHECK(condition);                                         \
91    Object* value = READ_FIELD(this, offset);                  \
92    return Smi::cast(value)->value();                          \
93  }                                                            \
94  void holder::set_##name(int value) {                         \
95    DCHECK(condition);                                         \
96    WRITE_FIELD(this, offset, Smi::FromInt(value));            \
97  }
98
99#define SMI_ACCESSORS(holder, name, offset) \
100  SMI_ACCESSORS_CHECKED(holder, name, offset, true)
101
102#define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset)    \
103  int holder::synchronized_##name() const {                 \
104    Object* value = ACQUIRE_READ_FIELD(this, offset);       \
105    return Smi::cast(value)->value();                       \
106  }                                                         \
107  void holder::synchronized_set_##name(int value) {         \
108    RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
109  }
110
111#define NOBARRIER_SMI_ACCESSORS(holder, name, offset)          \
112  int holder::nobarrier_##name() const {                       \
113    Object* value = NOBARRIER_READ_FIELD(this, offset);        \
114    return Smi::cast(value)->value();                          \
115  }                                                            \
116  void holder::nobarrier_set_##name(int value) {               \
117    NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value));  \
118  }
119
120#define BOOL_GETTER(holder, field, name, offset)           \
121  bool holder::name() const {                              \
122    return BooleanBit::get(field(), offset);               \
123  }                                                        \
124
125
126#define BOOL_ACCESSORS(holder, field, name, offset)        \
127  bool holder::name() const {                              \
128    return BooleanBit::get(field(), offset);               \
129  }                                                        \
130  void holder::set_##name(bool value) {                    \
131    set_##field(BooleanBit::set(field(), offset, value));  \
132  }
133
134#define TYPE_CHECKER(type, instancetype)           \
135  bool HeapObject::Is##type() const {              \
136    return map()->instance_type() == instancetype; \
137  }
138
139TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
140TYPE_CHECKER(BytecodeArray, BYTECODE_ARRAY_TYPE)
141TYPE_CHECKER(Cell, CELL_TYPE)
142TYPE_CHECKER(Code, CODE_TYPE)
143TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
144TYPE_CHECKER(Foreign, FOREIGN_TYPE)
145TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
146TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
147TYPE_CHECKER(JSArgumentsObject, JS_ARGUMENTS_TYPE)
148TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
149TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
150TYPE_CHECKER(JSBoundFunction, JS_BOUND_FUNCTION_TYPE)
151TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
152TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
153TYPE_CHECKER(JSDate, JS_DATE_TYPE)
154TYPE_CHECKER(JSError, JS_ERROR_TYPE)
155TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
156TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
157TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
158TYPE_CHECKER(JSMap, JS_MAP_TYPE)
159TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
160TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
161TYPE_CHECKER(JSModuleNamespace, JS_MODULE_NAMESPACE_TYPE)
162TYPE_CHECKER(JSPromiseCapability, JS_PROMISE_CAPABILITY_TYPE)
163TYPE_CHECKER(JSPromise, JS_PROMISE_TYPE)
164TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
165TYPE_CHECKER(JSSet, JS_SET_TYPE)
166TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
167TYPE_CHECKER(JSAsyncFromSyncIterator, JS_ASYNC_FROM_SYNC_ITERATOR_TYPE)
168TYPE_CHECKER(JSStringIterator, JS_STRING_ITERATOR_TYPE)
169TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
170TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
171TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
172TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
173TYPE_CHECKER(Map, MAP_TYPE)
174TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE)
175TYPE_CHECKER(Oddball, ODDBALL_TYPE)
176TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
177TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
178TYPE_CHECKER(Symbol, SYMBOL_TYPE)
179TYPE_CHECKER(TransitionArray, TRANSITION_ARRAY_TYPE)
180TYPE_CHECKER(WeakCell, WEAK_CELL_TYPE)
181TYPE_CHECKER(WeakFixedArray, FIXED_ARRAY_TYPE)
182
183#define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
184  TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
185TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
186#undef TYPED_ARRAY_TYPE_CHECKER
187
188#undef TYPE_CHECKER
189
190bool HeapObject::IsFixedArrayBase() const {
191  return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase();
192}
193
194bool HeapObject::IsFixedArray() const {
195  InstanceType instance_type = map()->instance_type();
196  return instance_type == FIXED_ARRAY_TYPE ||
197         instance_type == TRANSITION_ARRAY_TYPE;
198}
199
200bool HeapObject::IsBoilerplateDescription() const { return IsFixedArray(); }
201
202// External objects are not extensible, so the map check is enough.
203bool HeapObject::IsExternal() const {
204  return map() == GetHeap()->external_map();
205}
206
207#define IS_TYPE_FUNCTION_DEF(type_)                               \
208  bool Object::Is##type_() const {                                \
209    return IsHeapObject() && HeapObject::cast(this)->Is##type_(); \
210  }
211HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DEF)
212#undef IS_TYPE_FUNCTION_DEF
213
214#define IS_TYPE_FUNCTION_DEF(Type, Value)             \
215  bool Object::Is##Type(Isolate* isolate) const {     \
216    return this == isolate->heap()->Value();          \
217  }                                                   \
218  bool HeapObject::Is##Type(Isolate* isolate) const { \
219    return this == isolate->heap()->Value();          \
220  }
221ODDBALL_LIST(IS_TYPE_FUNCTION_DEF)
222#undef IS_TYPE_FUNCTION_DEF
223
224bool Object::IsNullOrUndefined(Isolate* isolate) const {
225  Heap* heap = isolate->heap();
226  return this == heap->null_value() || this == heap->undefined_value();
227}
228
229bool HeapObject::IsNullOrUndefined(Isolate* isolate) const {
230  Heap* heap = isolate->heap();
231  return this == heap->null_value() || this == heap->undefined_value();
232}
233
234bool HeapObject::IsString() const {
235  return map()->instance_type() < FIRST_NONSTRING_TYPE;
236}
237
238bool HeapObject::IsName() const {
239  return map()->instance_type() <= LAST_NAME_TYPE;
240}
241
242bool HeapObject::IsUniqueName() const {
243  return IsInternalizedString() || IsSymbol();
244}
245
246bool Name::IsUniqueName() const {
247  uint32_t type = map()->instance_type();
248  return (type & (kIsNotStringMask | kIsNotInternalizedMask)) !=
249         (kStringTag | kNotInternalizedTag);
250}
251
252bool HeapObject::IsFunction() const {
253  STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
254  return map()->instance_type() >= FIRST_FUNCTION_TYPE;
255}
256
257bool HeapObject::IsCallable() const { return map()->is_callable(); }
258
259bool HeapObject::IsConstructor() const { return map()->is_constructor(); }
260
261bool HeapObject::IsTemplateInfo() const {
262  return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
263}
264
265bool HeapObject::IsInternalizedString() const {
266  uint32_t type = map()->instance_type();
267  STATIC_ASSERT(kNotInternalizedTag != 0);
268  return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
269      (kStringTag | kInternalizedTag);
270}
271
272bool HeapObject::IsConsString() const {
273  if (!IsString()) return false;
274  return StringShape(String::cast(this)).IsCons();
275}
276
277bool HeapObject::IsThinString() const {
278  if (!IsString()) return false;
279  return StringShape(String::cast(this)).IsThin();
280}
281
282bool HeapObject::IsSlicedString() const {
283  if (!IsString()) return false;
284  return StringShape(String::cast(this)).IsSliced();
285}
286
287bool HeapObject::IsSeqString() const {
288  if (!IsString()) return false;
289  return StringShape(String::cast(this)).IsSequential();
290}
291
292bool HeapObject::IsSeqOneByteString() const {
293  if (!IsString()) return false;
294  return StringShape(String::cast(this)).IsSequential() &&
295         String::cast(this)->IsOneByteRepresentation();
296}
297
298bool HeapObject::IsSeqTwoByteString() const {
299  if (!IsString()) return false;
300  return StringShape(String::cast(this)).IsSequential() &&
301         String::cast(this)->IsTwoByteRepresentation();
302}
303
304bool HeapObject::IsExternalString() const {
305  if (!IsString()) return false;
306  return StringShape(String::cast(this)).IsExternal();
307}
308
309bool HeapObject::IsExternalOneByteString() const {
310  if (!IsString()) return false;
311  return StringShape(String::cast(this)).IsExternal() &&
312         String::cast(this)->IsOneByteRepresentation();
313}
314
315bool HeapObject::IsExternalTwoByteString() const {
316  if (!IsString()) return false;
317  return StringShape(String::cast(this)).IsExternal() &&
318         String::cast(this)->IsTwoByteRepresentation();
319}
320
321bool Object::IsNumber() const { return IsSmi() || IsHeapNumber(); }
322
323bool HeapObject::IsFiller() const {
324  InstanceType instance_type = map()->instance_type();
325  return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
326}
327
328bool HeapObject::IsFixedTypedArrayBase() const {
329  InstanceType instance_type = map()->instance_type();
330  return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
331          instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
332}
333
334bool HeapObject::IsJSReceiver() const {
335  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
336  return map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
337}
338
339bool HeapObject::IsJSObject() const {
340  STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
341  return map()->IsJSObjectMap();
342}
343
344bool HeapObject::IsJSProxy() const { return map()->IsJSProxyMap(); }
345
346bool HeapObject::IsJSArrayIterator() const {
347  InstanceType instance_type = map()->instance_type();
348  return (instance_type >= FIRST_ARRAY_ITERATOR_TYPE &&
349          instance_type <= LAST_ARRAY_ITERATOR_TYPE);
350}
351
352bool HeapObject::IsJSWeakCollection() const {
353  return IsJSWeakMap() || IsJSWeakSet();
354}
355
356bool HeapObject::IsJSCollection() const { return IsJSMap() || IsJSSet(); }
357
358bool HeapObject::IsDescriptorArray() const { return IsFixedArray(); }
359
360bool HeapObject::IsFrameArray() const { return IsFixedArray(); }
361
362bool HeapObject::IsArrayList() const { return IsFixedArray(); }
363
364bool HeapObject::IsRegExpMatchInfo() const { return IsFixedArray(); }
365
366bool Object::IsLayoutDescriptor() const {
367  return IsSmi() || IsFixedTypedArrayBase();
368}
369
370bool HeapObject::IsFeedbackVector() const {
371  return map() == GetHeap()->feedback_vector_map();
372}
373
374bool HeapObject::IsFeedbackMetadata() const { return IsFixedArray(); }
375
376bool HeapObject::IsDeoptimizationInputData() const {
377  // Must be a fixed array.
378  if (!IsFixedArray()) return false;
379
380  // There's no sure way to detect the difference between a fixed array and
381  // a deoptimization data array.  Since this is used for asserts we can
382  // check that the length is zero or else the fixed size plus a multiple of
383  // the entry size.
384  int length = FixedArray::cast(this)->length();
385  if (length == 0) return true;
386
387  length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
388  return length >= 0 && length % DeoptimizationInputData::kDeoptEntrySize == 0;
389}
390
391bool HeapObject::IsDeoptimizationOutputData() const {
392  if (!IsFixedArray()) return false;
393  // There's actually no way to see the difference between a fixed array and
394  // a deoptimization data array.  Since this is used for asserts we can check
395  // that the length is plausible though.
396  if (FixedArray::cast(this)->length() % 2 != 0) return false;
397  return true;
398}
399
400bool HeapObject::IsHandlerTable() const {
401  if (!IsFixedArray()) return false;
402  // There's actually no way to see the difference between a fixed array and
403  // a handler table array.
404  return true;
405}
406
407bool HeapObject::IsTemplateList() const {
408  if (!IsFixedArray()) return false;
409  // There's actually no way to see the difference between a fixed array and
410  // a template list.
411  if (FixedArray::cast(this)->length() < 1) return false;
412  return true;
413}
414
415bool HeapObject::IsDependentCode() const {
416  if (!IsFixedArray()) return false;
417  // There's actually no way to see the difference between a fixed array and
418  // a dependent codes array.
419  return true;
420}
421
422bool HeapObject::IsContext() const {
423  Map* map = this->map();
424  Heap* heap = GetHeap();
425  return (
426      map == heap->function_context_map() || map == heap->catch_context_map() ||
427      map == heap->with_context_map() || map == heap->native_context_map() ||
428      map == heap->block_context_map() || map == heap->module_context_map() ||
429      map == heap->eval_context_map() || map == heap->script_context_map() ||
430      map == heap->debug_evaluate_context_map());
431}
432
433bool HeapObject::IsNativeContext() const {
434  return map() == GetHeap()->native_context_map();
435}
436
437bool HeapObject::IsScriptContextTable() const {
438  return map() == GetHeap()->script_context_table_map();
439}
440
441bool HeapObject::IsScopeInfo() const {
442  return map() == GetHeap()->scope_info_map();
443}
444
445bool HeapObject::IsModuleInfo() const {
446  return map() == GetHeap()->module_info_map();
447}
448
449template <>
450inline bool Is<JSFunction>(Object* obj) {
451  return obj->IsJSFunction();
452}
453
454bool HeapObject::IsAbstractCode() const {
455  return IsBytecodeArray() || IsCode();
456}
457
458bool HeapObject::IsStringWrapper() const {
459  return IsJSValue() && JSValue::cast(this)->value()->IsString();
460}
461
462bool HeapObject::IsBoolean() const {
463  return IsOddball() &&
464         ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
465}
466
467bool HeapObject::IsJSArrayBufferView() const {
468  return IsJSDataView() || IsJSTypedArray();
469}
470
471template <>
472inline bool Is<JSArray>(Object* obj) {
473  return obj->IsJSArray();
474}
475
476bool HeapObject::IsHashTable() const {
477  return map() == GetHeap()->hash_table_map();
478}
479
480bool HeapObject::IsWeakHashTable() const { return IsHashTable(); }
481
482bool HeapObject::IsDictionary() const {
483  return IsHashTable() && this != GetHeap()->string_table();
484}
485
486bool Object::IsNameDictionary() const { return IsDictionary(); }
487
488bool Object::IsGlobalDictionary() const { return IsDictionary(); }
489
490bool Object::IsSeededNumberDictionary() const { return IsDictionary(); }
491
492bool HeapObject::IsUnseededNumberDictionary() const {
493  return map() == GetHeap()->unseeded_number_dictionary_map();
494}
495
496bool HeapObject::IsStringTable() const { return IsHashTable(); }
497
498bool HeapObject::IsStringSet() const { return IsHashTable(); }
499
500bool HeapObject::IsObjectHashSet() const { return IsHashTable(); }
501
502bool HeapObject::IsNormalizedMapCache() const {
503  return NormalizedMapCache::IsNormalizedMapCache(this);
504}
505
506int NormalizedMapCache::GetIndex(Handle<Map> map) {
507  return map->Hash() % NormalizedMapCache::kEntries;
508}
509
510bool NormalizedMapCache::IsNormalizedMapCache(const HeapObject* obj) {
511  if (!obj->IsFixedArray()) return false;
512  if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
513    return false;
514  }
515#ifdef VERIFY_HEAP
516  if (FLAG_verify_heap) {
517    reinterpret_cast<NormalizedMapCache*>(const_cast<HeapObject*>(obj))
518        ->NormalizedMapCacheVerify();
519  }
520#endif
521  return true;
522}
523
524bool HeapObject::IsCompilationCacheTable() const { return IsHashTable(); }
525
526bool HeapObject::IsCodeCacheHashTable() const { return IsHashTable(); }
527
528bool HeapObject::IsMapCache() const { return IsHashTable(); }
529
530bool HeapObject::IsObjectHashTable() const { return IsHashTable(); }
531
532bool HeapObject::IsOrderedHashTable() const {
533  return map() == GetHeap()->ordered_hash_table_map();
534}
535
536bool Object::IsOrderedHashSet() const { return IsOrderedHashTable(); }
537
538bool Object::IsOrderedHashMap() const { return IsOrderedHashTable(); }
539
540bool Object::IsPrimitive() const {
541  return IsSmi() || HeapObject::cast(this)->map()->IsPrimitiveMap();
542}
543
544bool HeapObject::IsJSGlobalProxy() const {
545  bool result = map()->instance_type() == JS_GLOBAL_PROXY_TYPE;
546  DCHECK(!result || map()->is_access_check_needed());
547  return result;
548}
549
550bool HeapObject::IsUndetectable() const { return map()->is_undetectable(); }
551
552bool HeapObject::IsAccessCheckNeeded() const {
553  if (IsJSGlobalProxy()) {
554    const JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
555    JSGlobalObject* global = proxy->GetIsolate()->context()->global_object();
556    return proxy->IsDetachedFrom(global);
557  }
558  return map()->is_access_check_needed();
559}
560
561bool HeapObject::IsStruct() const {
562  switch (map()->instance_type()) {
563#define MAKE_STRUCT_CASE(NAME, Name, name) \
564  case NAME##_TYPE:                        \
565    return true;
566    STRUCT_LIST(MAKE_STRUCT_CASE)
567#undef MAKE_STRUCT_CASE
568    default:
569      return false;
570  }
571}
572
573#define MAKE_STRUCT_PREDICATE(NAME, Name, name)                  \
574  bool Object::Is##Name() const {                                \
575    return IsHeapObject() && HeapObject::cast(this)->Is##Name(); \
576  }                                                              \
577  bool HeapObject::Is##Name() const {                            \
578    return map()->instance_type() == NAME##_TYPE;                \
579  }
580STRUCT_LIST(MAKE_STRUCT_PREDICATE)
581#undef MAKE_STRUCT_PREDICATE
582
583double Object::Number() const {
584  DCHECK(IsNumber());
585  return IsSmi()
586             ? static_cast<double>(reinterpret_cast<const Smi*>(this)->value())
587             : reinterpret_cast<const HeapNumber*>(this)->value();
588}
589
590bool Object::IsNaN() const {
591  return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
592}
593
594bool Object::IsMinusZero() const {
595  return this->IsHeapNumber() &&
596         i::IsMinusZero(HeapNumber::cast(this)->value());
597}
598
599// ------------------------------------
600// Cast operations
601
602#define CAST_ACCESSOR(type)                       \
603  type* type::cast(Object* object) {              \
604    SLOW_DCHECK(object->Is##type());              \
605    return reinterpret_cast<type*>(object);       \
606  }                                               \
607  const type* type::cast(const Object* object) {  \
608    SLOW_DCHECK(object->Is##type());              \
609    return reinterpret_cast<const type*>(object); \
610  }
611
612CAST_ACCESSOR(AbstractCode)
613CAST_ACCESSOR(ArrayList)
614CAST_ACCESSOR(BoilerplateDescription)
615CAST_ACCESSOR(ByteArray)
616CAST_ACCESSOR(BytecodeArray)
617CAST_ACCESSOR(Cell)
618CAST_ACCESSOR(Code)
619CAST_ACCESSOR(CodeCacheHashTable)
620CAST_ACCESSOR(CompilationCacheTable)
621CAST_ACCESSOR(ConsString)
622CAST_ACCESSOR(DeoptimizationInputData)
623CAST_ACCESSOR(DeoptimizationOutputData)
624CAST_ACCESSOR(DependentCode)
625CAST_ACCESSOR(DescriptorArray)
626CAST_ACCESSOR(ExternalOneByteString)
627CAST_ACCESSOR(ExternalString)
628CAST_ACCESSOR(ExternalTwoByteString)
629CAST_ACCESSOR(FixedArray)
630CAST_ACCESSOR(FixedArrayBase)
631CAST_ACCESSOR(FixedDoubleArray)
632CAST_ACCESSOR(FixedTypedArrayBase)
633CAST_ACCESSOR(Foreign)
634CAST_ACCESSOR(FrameArray)
635CAST_ACCESSOR(GlobalDictionary)
636CAST_ACCESSOR(HandlerTable)
637CAST_ACCESSOR(HeapObject)
638CAST_ACCESSOR(JSArray)
639CAST_ACCESSOR(JSArrayBuffer)
640CAST_ACCESSOR(JSArrayBufferView)
641CAST_ACCESSOR(JSBoundFunction)
642CAST_ACCESSOR(JSDataView)
643CAST_ACCESSOR(JSDate)
644CAST_ACCESSOR(JSFunction)
645CAST_ACCESSOR(JSGeneratorObject)
646CAST_ACCESSOR(JSGlobalObject)
647CAST_ACCESSOR(JSGlobalProxy)
648CAST_ACCESSOR(JSMap)
649CAST_ACCESSOR(JSMapIterator)
650CAST_ACCESSOR(JSMessageObject)
651CAST_ACCESSOR(JSModuleNamespace)
652CAST_ACCESSOR(JSObject)
653CAST_ACCESSOR(JSProxy)
654CAST_ACCESSOR(JSReceiver)
655CAST_ACCESSOR(JSRegExp)
656CAST_ACCESSOR(JSPromiseCapability)
657CAST_ACCESSOR(JSPromise)
658CAST_ACCESSOR(JSSet)
659CAST_ACCESSOR(JSSetIterator)
660CAST_ACCESSOR(JSAsyncFromSyncIterator)
661CAST_ACCESSOR(JSStringIterator)
662CAST_ACCESSOR(JSArrayIterator)
663CAST_ACCESSOR(JSTypedArray)
664CAST_ACCESSOR(JSValue)
665CAST_ACCESSOR(JSWeakCollection)
666CAST_ACCESSOR(JSWeakMap)
667CAST_ACCESSOR(JSWeakSet)
668CAST_ACCESSOR(LayoutDescriptor)
669CAST_ACCESSOR(Map)
670CAST_ACCESSOR(ModuleInfo)
671CAST_ACCESSOR(Name)
672CAST_ACCESSOR(NameDictionary)
673CAST_ACCESSOR(NormalizedMapCache)
674CAST_ACCESSOR(Object)
675CAST_ACCESSOR(ObjectHashTable)
676CAST_ACCESSOR(ObjectHashSet)
677CAST_ACCESSOR(Oddball)
678CAST_ACCESSOR(OrderedHashMap)
679CAST_ACCESSOR(OrderedHashSet)
680CAST_ACCESSOR(PropertyCell)
681CAST_ACCESSOR(TemplateList)
682CAST_ACCESSOR(RegExpMatchInfo)
683CAST_ACCESSOR(ScopeInfo)
684CAST_ACCESSOR(SeededNumberDictionary)
685CAST_ACCESSOR(SeqOneByteString)
686CAST_ACCESSOR(SeqString)
687CAST_ACCESSOR(SeqTwoByteString)
688CAST_ACCESSOR(SharedFunctionInfo)
689CAST_ACCESSOR(SlicedString)
690CAST_ACCESSOR(Smi)
691CAST_ACCESSOR(String)
692CAST_ACCESSOR(StringSet)
693CAST_ACCESSOR(StringTable)
694CAST_ACCESSOR(Struct)
695CAST_ACCESSOR(Symbol)
696CAST_ACCESSOR(TemplateInfo)
697CAST_ACCESSOR(ThinString)
698CAST_ACCESSOR(UnseededNumberDictionary)
699CAST_ACCESSOR(WeakCell)
700CAST_ACCESSOR(WeakFixedArray)
701CAST_ACCESSOR(WeakHashTable)
702
703#define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
704STRUCT_LIST(MAKE_STRUCT_CAST)
705#undef MAKE_STRUCT_CAST
706
707#undef CAST_ACCESSOR
708
709bool Object::HasValidElements() {
710  // Dictionary is covered under FixedArray.
711  return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase();
712}
713
714bool Object::KeyEquals(Object* second) {
715  Object* first = this;
716  if (second->IsNumber()) {
717    if (first->IsNumber()) return first->Number() == second->Number();
718    Object* temp = first;
719    first = second;
720    second = temp;
721  }
722  if (first->IsNumber()) {
723    DCHECK_LE(0, first->Number());
724    uint32_t expected = static_cast<uint32_t>(first->Number());
725    uint32_t index;
726    return Name::cast(second)->AsArrayIndex(&index) && index == expected;
727  }
728  return Name::cast(first)->Equals(Name::cast(second));
729}
730
731bool Object::FilterKey(PropertyFilter filter) {
732  if (IsSymbol()) {
733    if (filter & SKIP_SYMBOLS) return true;
734    if (Symbol::cast(this)->is_private()) return true;
735  } else {
736    if (filter & SKIP_STRINGS) return true;
737  }
738  return false;
739}
740
741Handle<Object> Object::NewStorageFor(Isolate* isolate, Handle<Object> object,
742                                     Representation representation) {
743  if (!representation.IsDouble()) return object;
744  Handle<HeapNumber> result = isolate->factory()->NewHeapNumber(MUTABLE);
745  if (object->IsUninitialized(isolate)) {
746    result->set_value_as_bits(kHoleNanInt64);
747  } else if (object->IsMutableHeapNumber()) {
748    // Ensure that all bits of the double value are preserved.
749    result->set_value_as_bits(HeapNumber::cast(*object)->value_as_bits());
750  } else {
751    result->set_value(object->Number());
752  }
753  return result;
754}
755
756Handle<Object> Object::WrapForRead(Isolate* isolate, Handle<Object> object,
757                                   Representation representation) {
758  DCHECK(!object->IsUninitialized(isolate));
759  if (!representation.IsDouble()) {
760    DCHECK(object->FitsRepresentation(representation));
761    return object;
762  }
763  return isolate->factory()->NewHeapNumber(HeapNumber::cast(*object)->value());
764}
765
766StringShape::StringShape(const String* str)
767    : type_(str->map()->instance_type()) {
768  set_valid();
769  DCHECK((type_ & kIsNotStringMask) == kStringTag);
770}
771
772StringShape::StringShape(Map* map) : type_(map->instance_type()) {
773  set_valid();
774  DCHECK((type_ & kIsNotStringMask) == kStringTag);
775}
776
777StringShape::StringShape(InstanceType t) : type_(static_cast<uint32_t>(t)) {
778  set_valid();
779  DCHECK((type_ & kIsNotStringMask) == kStringTag);
780}
781
782bool StringShape::IsInternalized() {
783  DCHECK(valid());
784  STATIC_ASSERT(kNotInternalizedTag != 0);
785  return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
786         (kStringTag | kInternalizedTag);
787}
788
789bool String::IsOneByteRepresentation() const {
790  uint32_t type = map()->instance_type();
791  return (type & kStringEncodingMask) == kOneByteStringTag;
792}
793
794bool String::IsTwoByteRepresentation() const {
795  uint32_t type = map()->instance_type();
796  return (type & kStringEncodingMask) == kTwoByteStringTag;
797}
798
799bool String::IsOneByteRepresentationUnderneath() {
800  uint32_t type = map()->instance_type();
801  STATIC_ASSERT(kIsIndirectStringTag != 0);
802  STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
803  DCHECK(IsFlat());
804  switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
805    case kOneByteStringTag:
806      return true;
807    case kTwoByteStringTag:
808      return false;
809    default:  // Cons or sliced string.  Need to go deeper.
810      return GetUnderlying()->IsOneByteRepresentation();
811  }
812}
813
814bool String::IsTwoByteRepresentationUnderneath() {
815  uint32_t type = map()->instance_type();
816  STATIC_ASSERT(kIsIndirectStringTag != 0);
817  STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
818  DCHECK(IsFlat());
819  switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
820    case kOneByteStringTag:
821      return false;
822    case kTwoByteStringTag:
823      return true;
824    default:  // Cons or sliced string.  Need to go deeper.
825      return GetUnderlying()->IsTwoByteRepresentation();
826  }
827}
828
829bool String::HasOnlyOneByteChars() {
830  uint32_t type = map()->instance_type();
831  return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
832         IsOneByteRepresentation();
833}
834
835bool StringShape::IsCons() {
836  return (type_ & kStringRepresentationMask) == kConsStringTag;
837}
838
839bool StringShape::IsThin() {
840  return (type_ & kStringRepresentationMask) == kThinStringTag;
841}
842
843bool StringShape::IsSliced() {
844  return (type_ & kStringRepresentationMask) == kSlicedStringTag;
845}
846
847bool StringShape::IsIndirect() {
848  return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
849}
850
851bool StringShape::IsExternal() {
852  return (type_ & kStringRepresentationMask) == kExternalStringTag;
853}
854
855bool StringShape::IsSequential() {
856  return (type_ & kStringRepresentationMask) == kSeqStringTag;
857}
858
859StringRepresentationTag StringShape::representation_tag() {
860  uint32_t tag = (type_ & kStringRepresentationMask);
861  return static_cast<StringRepresentationTag>(tag);
862}
863
864uint32_t StringShape::encoding_tag() { return type_ & kStringEncodingMask; }
865
866uint32_t StringShape::full_representation_tag() {
867  return (type_ & (kStringRepresentationMask | kStringEncodingMask));
868}
869
870STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) ==
871              Internals::kFullStringRepresentationMask);
872
873STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) ==
874              Internals::kStringEncodingMask);
875
876bool StringShape::IsSequentialOneByte() {
877  return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
878}
879
880bool StringShape::IsSequentialTwoByte() {
881  return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
882}
883
884bool StringShape::IsExternalOneByte() {
885  return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
886}
887
888STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) ==
889              Internals::kExternalOneByteRepresentationTag);
890
891STATIC_ASSERT(v8::String::ONE_BYTE_ENCODING == kOneByteStringTag);
892
893bool StringShape::IsExternalTwoByte() {
894  return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
895}
896
897STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) ==
898              Internals::kExternalTwoByteRepresentationTag);
899
900STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
901
902uc32 FlatStringReader::Get(int index) {
903  if (is_one_byte_) {
904    return Get<uint8_t>(index);
905  } else {
906    return Get<uc16>(index);
907  }
908}
909
910template <typename Char>
911Char FlatStringReader::Get(int index) {
912  DCHECK_EQ(is_one_byte_, sizeof(Char) == 1);
913  DCHECK(0 <= index && index <= length_);
914  if (sizeof(Char) == 1) {
915    return static_cast<Char>(static_cast<const uint8_t*>(start_)[index]);
916  } else {
917    return static_cast<Char>(static_cast<const uc16*>(start_)[index]);
918  }
919}
920
921Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
922  return key->AsHandle(isolate);
923}
924
925Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
926                                               HashTableKey* key) {
927  return key->AsHandle(isolate);
928}
929
930Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
931                                                 HashTableKey* key) {
932  return key->AsHandle(isolate);
933}
934
935template <typename Char>
936class SequentialStringKey : public HashTableKey {
937 public:
938  explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
939      : string_(string), hash_field_(0), seed_(seed) {}
940
941  uint32_t Hash() override {
942    hash_field_ = StringHasher::HashSequentialString<Char>(
943        string_.start(), string_.length(), seed_);
944
945    uint32_t result = hash_field_ >> String::kHashShift;
946    DCHECK(result != 0);  // Ensure that the hash value of 0 is never computed.
947    return result;
948  }
949
950  uint32_t HashForObject(Object* other) override {
951    return String::cast(other)->Hash();
952  }
953
954  Vector<const Char> string_;
955  uint32_t hash_field_;
956  uint32_t seed_;
957};
958
959class OneByteStringKey : public SequentialStringKey<uint8_t> {
960 public:
961  OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
962      : SequentialStringKey<uint8_t>(str, seed) {}
963
964  bool IsMatch(Object* string) override {
965    return String::cast(string)->IsOneByteEqualTo(string_);
966  }
967
968  Handle<Object> AsHandle(Isolate* isolate) override;
969};
970
971class SeqOneByteSubStringKey : public HashTableKey {
972 public:
973  SeqOneByteSubStringKey(Handle<SeqOneByteString> string, int from, int length)
974      : string_(string), from_(from), length_(length) {
975    DCHECK(string_->IsSeqOneByteString());
976  }
977
978  uint32_t Hash() override {
979    DCHECK(length_ >= 0);
980    DCHECK(from_ + length_ <= string_->length());
981    const uint8_t* chars = string_->GetChars() + from_;
982    hash_field_ = StringHasher::HashSequentialString(
983        chars, length_, string_->GetHeap()->HashSeed());
984    uint32_t result = hash_field_ >> String::kHashShift;
985    DCHECK(result != 0);  // Ensure that the hash value of 0 is never computed.
986    return result;
987  }
988
989  uint32_t HashForObject(Object* other) override {
990    return String::cast(other)->Hash();
991  }
992
993  bool IsMatch(Object* string) override;
994  Handle<Object> AsHandle(Isolate* isolate) override;
995
996 private:
997  Handle<SeqOneByteString> string_;
998  int from_;
999  int length_;
1000  uint32_t hash_field_;
1001};
1002
1003class TwoByteStringKey : public SequentialStringKey<uc16> {
1004 public:
1005  explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
1006      : SequentialStringKey<uc16>(str, seed) {}
1007
1008  bool IsMatch(Object* string) override {
1009    return String::cast(string)->IsTwoByteEqualTo(string_);
1010  }
1011
1012  Handle<Object> AsHandle(Isolate* isolate) override;
1013};
1014
1015// Utf8StringKey carries a vector of chars as key.
1016class Utf8StringKey : public HashTableKey {
1017 public:
1018  explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
1019      : string_(string), hash_field_(0), seed_(seed) {}
1020
1021  bool IsMatch(Object* string) override {
1022    return String::cast(string)->IsUtf8EqualTo(string_);
1023  }
1024
1025  uint32_t Hash() override {
1026    if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
1027    hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
1028    uint32_t result = hash_field_ >> String::kHashShift;
1029    DCHECK(result != 0);  // Ensure that the hash value of 0 is never computed.
1030    return result;
1031  }
1032
1033  uint32_t HashForObject(Object* other) override {
1034    return String::cast(other)->Hash();
1035  }
1036
1037  Handle<Object> AsHandle(Isolate* isolate) override {
1038    if (hash_field_ == 0) Hash();
1039    return isolate->factory()->NewInternalizedStringFromUtf8(string_, chars_,
1040                                                             hash_field_);
1041  }
1042
1043  Vector<const char> string_;
1044  uint32_t hash_field_;
1045  int chars_;  // Caches the number of characters when computing the hash code.
1046  uint32_t seed_;
1047};
1048
1049Representation Object::OptimalRepresentation() {
1050  if (!FLAG_track_fields) return Representation::Tagged();
1051  if (IsSmi()) {
1052    return Representation::Smi();
1053  } else if (FLAG_track_double_fields && IsHeapNumber()) {
1054    return Representation::Double();
1055  } else if (FLAG_track_computed_fields &&
1056             IsUninitialized(HeapObject::cast(this)->GetIsolate())) {
1057    return Representation::None();
1058  } else if (FLAG_track_heap_object_fields) {
1059    DCHECK(IsHeapObject());
1060    return Representation::HeapObject();
1061  } else {
1062    return Representation::Tagged();
1063  }
1064}
1065
1066
1067ElementsKind Object::OptimalElementsKind() {
1068  if (IsSmi()) return FAST_SMI_ELEMENTS;
1069  if (IsNumber()) return FAST_DOUBLE_ELEMENTS;
1070  return FAST_ELEMENTS;
1071}
1072
1073
1074bool Object::FitsRepresentation(Representation representation) {
1075  if (FLAG_track_fields && representation.IsSmi()) {
1076    return IsSmi();
1077  } else if (FLAG_track_double_fields && representation.IsDouble()) {
1078    return IsMutableHeapNumber() || IsNumber();
1079  } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
1080    return IsHeapObject();
1081  } else if (FLAG_track_fields && representation.IsNone()) {
1082    return false;
1083  }
1084  return true;
1085}
1086
1087bool Object::ToUint32(uint32_t* value) {
1088  if (IsSmi()) {
1089    int num = Smi::cast(this)->value();
1090    if (num < 0) return false;
1091    *value = static_cast<uint32_t>(num);
1092    return true;
1093  }
1094  if (IsHeapNumber()) {
1095    double num = HeapNumber::cast(this)->value();
1096    return DoubleToUint32IfEqualToSelf(num, value);
1097  }
1098  return false;
1099}
1100
1101// static
1102MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1103                                         Handle<Object> object) {
1104  if (object->IsJSReceiver()) return Handle<JSReceiver>::cast(object);
1105  return ToObject(isolate, object, isolate->native_context());
1106}
1107
1108
1109// static
1110MaybeHandle<Name> Object::ToName(Isolate* isolate, Handle<Object> input) {
1111  if (input->IsName()) return Handle<Name>::cast(input);
1112  return ConvertToName(isolate, input);
1113}
1114
1115// static
1116MaybeHandle<Object> Object::ToPropertyKey(Isolate* isolate,
1117                                          Handle<Object> value) {
1118  if (value->IsSmi() || HeapObject::cast(*value)->IsName()) return value;
1119  return ConvertToPropertyKey(isolate, value);
1120}
1121
1122// static
1123MaybeHandle<Object> Object::ToPrimitive(Handle<Object> input,
1124                                        ToPrimitiveHint hint) {
1125  if (input->IsPrimitive()) return input;
1126  return JSReceiver::ToPrimitive(Handle<JSReceiver>::cast(input), hint);
1127}
1128
1129// static
1130MaybeHandle<Object> Object::ToNumber(Handle<Object> input) {
1131  if (input->IsNumber()) return input;
1132  return ConvertToNumber(HeapObject::cast(*input)->GetIsolate(), input);
1133}
1134
1135// static
1136MaybeHandle<Object> Object::ToInteger(Isolate* isolate, Handle<Object> input) {
1137  if (input->IsSmi()) return input;
1138  return ConvertToInteger(isolate, input);
1139}
1140
1141// static
1142MaybeHandle<Object> Object::ToInt32(Isolate* isolate, Handle<Object> input) {
1143  if (input->IsSmi()) return input;
1144  return ConvertToInt32(isolate, input);
1145}
1146
1147// static
1148MaybeHandle<Object> Object::ToUint32(Isolate* isolate, Handle<Object> input) {
1149  if (input->IsSmi()) return handle(Smi::cast(*input)->ToUint32Smi(), isolate);
1150  return ConvertToUint32(isolate, input);
1151}
1152
1153// static
1154MaybeHandle<String> Object::ToString(Isolate* isolate, Handle<Object> input) {
1155  if (input->IsString()) return Handle<String>::cast(input);
1156  return ConvertToString(isolate, input);
1157}
1158
1159// static
1160MaybeHandle<Object> Object::ToLength(Isolate* isolate, Handle<Object> input) {
1161  if (input->IsSmi()) {
1162    int value = std::max(Smi::cast(*input)->value(), 0);
1163    return handle(Smi::FromInt(value), isolate);
1164  }
1165  return ConvertToLength(isolate, input);
1166}
1167
1168// static
1169MaybeHandle<Object> Object::ToIndex(Isolate* isolate, Handle<Object> input,
1170                                    MessageTemplate::Template error_index) {
1171  if (input->IsSmi() && Smi::cast(*input)->value() >= 0) return input;
1172  return ConvertToIndex(isolate, input, error_index);
1173}
1174
1175bool Object::HasSpecificClassOf(String* name) {
1176  return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1177}
1178
1179MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
1180                                        Handle<Name> name) {
1181  LookupIterator it(object, name);
1182  if (!it.IsFound()) return it.factory()->undefined_value();
1183  return GetProperty(&it);
1184}
1185
1186MaybeHandle<Object> JSReceiver::GetProperty(Handle<JSReceiver> receiver,
1187                                            Handle<Name> name) {
1188  LookupIterator it(receiver, name, receiver);
1189  if (!it.IsFound()) return it.factory()->undefined_value();
1190  return Object::GetProperty(&it);
1191}
1192
1193MaybeHandle<Object> Object::GetElement(Isolate* isolate, Handle<Object> object,
1194                                       uint32_t index) {
1195  LookupIterator it(isolate, object, index);
1196  if (!it.IsFound()) return it.factory()->undefined_value();
1197  return GetProperty(&it);
1198}
1199
1200MaybeHandle<Object> JSReceiver::GetElement(Isolate* isolate,
1201                                           Handle<JSReceiver> receiver,
1202                                           uint32_t index) {
1203  LookupIterator it(isolate, receiver, index, receiver);
1204  if (!it.IsFound()) return it.factory()->undefined_value();
1205  return Object::GetProperty(&it);
1206}
1207
1208Handle<Object> JSReceiver::GetDataProperty(Handle<JSReceiver> object,
1209                                           Handle<Name> name) {
1210  LookupIterator it(object, name, object,
1211                    LookupIterator::PROTOTYPE_CHAIN_SKIP_INTERCEPTOR);
1212  if (!it.IsFound()) return it.factory()->undefined_value();
1213  return GetDataProperty(&it);
1214}
1215
1216MaybeHandle<Object> Object::SetElement(Isolate* isolate, Handle<Object> object,
1217                                       uint32_t index, Handle<Object> value,
1218                                       LanguageMode language_mode) {
1219  LookupIterator it(isolate, object, index);
1220  MAYBE_RETURN_NULL(
1221      SetProperty(&it, value, language_mode, MAY_BE_STORE_FROM_KEYED));
1222  return value;
1223}
1224
1225MaybeHandle<Object> JSReceiver::GetPrototype(Isolate* isolate,
1226                                             Handle<JSReceiver> receiver) {
1227  // We don't expect access checks to be needed on JSProxy objects.
1228  DCHECK(!receiver->IsAccessCheckNeeded() || receiver->IsJSObject());
1229  PrototypeIterator iter(isolate, receiver, kStartAtReceiver,
1230                         PrototypeIterator::END_AT_NON_HIDDEN);
1231  do {
1232    if (!iter.AdvanceFollowingProxies()) return MaybeHandle<Object>();
1233  } while (!iter.IsAtEnd());
1234  return PrototypeIterator::GetCurrent(iter);
1235}
1236
1237MaybeHandle<Object> JSReceiver::GetProperty(Isolate* isolate,
1238                                            Handle<JSReceiver> receiver,
1239                                            const char* name) {
1240  Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
1241  return GetProperty(receiver, str);
1242}
1243
1244// static
1245MUST_USE_RESULT MaybeHandle<FixedArray> JSReceiver::OwnPropertyKeys(
1246    Handle<JSReceiver> object) {
1247  return KeyAccumulator::GetKeys(object, KeyCollectionMode::kOwnOnly,
1248                                 ALL_PROPERTIES,
1249                                 GetKeysConversion::kConvertToString);
1250}
1251
1252bool JSObject::PrototypeHasNoElements(Isolate* isolate, JSObject* object) {
1253  DisallowHeapAllocation no_gc;
1254  HeapObject* prototype = HeapObject::cast(object->map()->prototype());
1255  HeapObject* null = isolate->heap()->null_value();
1256  HeapObject* empty = isolate->heap()->empty_fixed_array();
1257  while (prototype != null) {
1258    Map* map = prototype->map();
1259    if (map->instance_type() <= LAST_CUSTOM_ELEMENTS_RECEIVER) return false;
1260    if (JSObject::cast(prototype)->elements() != empty) return false;
1261    prototype = HeapObject::cast(map->prototype());
1262  }
1263  return true;
1264}
1265
1266#define FIELD_ADDR(p, offset) \
1267  (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1268
1269#define FIELD_ADDR_CONST(p, offset) \
1270  (reinterpret_cast<const byte*>(p) + offset - kHeapObjectTag)
1271
1272#define READ_FIELD(p, offset) \
1273  (*reinterpret_cast<Object* const*>(FIELD_ADDR_CONST(p, offset)))
1274
1275#define ACQUIRE_READ_FIELD(p, offset)           \
1276  reinterpret_cast<Object*>(base::Acquire_Load( \
1277      reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1278
1279#define NOBARRIER_READ_FIELD(p, offset)           \
1280  reinterpret_cast<Object*>(base::NoBarrier_Load( \
1281      reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1282
1283#define WRITE_FIELD(p, offset, value) \
1284  (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1285
1286#define RELEASE_WRITE_FIELD(p, offset, value)                     \
1287  base::Release_Store(                                            \
1288      reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1289      reinterpret_cast<base::AtomicWord>(value));
1290
1291#define NOBARRIER_WRITE_FIELD(p, offset, value)                   \
1292  base::NoBarrier_Store(                                          \
1293      reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1294      reinterpret_cast<base::AtomicWord>(value));
1295
1296#define WRITE_BARRIER(heap, object, offset, value)          \
1297  heap->incremental_marking()->RecordWrite(                 \
1298      object, HeapObject::RawField(object, offset), value); \
1299  heap->RecordWrite(object, offset, value);
1300
1301#define FIXED_ARRAY_ELEMENTS_WRITE_BARRIER(heap, array, start, length) \
1302  do {                                                                 \
1303    heap->RecordFixedArrayElements(array, start, length);              \
1304    heap->incremental_marking()->IterateBlackObject(array);            \
1305  } while (false)
1306
1307#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1308  if (mode != SKIP_WRITE_BARRIER) {                                  \
1309    if (mode == UPDATE_WRITE_BARRIER) {                              \
1310      heap->incremental_marking()->RecordWrite(                      \
1311          object, HeapObject::RawField(object, offset), value);      \
1312    }                                                                \
1313    heap->RecordWrite(object, offset, value);                        \
1314  }
1315
1316#define READ_DOUBLE_FIELD(p, offset) \
1317  ReadDoubleValue(FIELD_ADDR_CONST(p, offset))
1318
1319#define WRITE_DOUBLE_FIELD(p, offset, value) \
1320  WriteDoubleValue(FIELD_ADDR(p, offset), value)
1321
1322#define READ_INT_FIELD(p, offset) \
1323  (*reinterpret_cast<const int*>(FIELD_ADDR_CONST(p, offset)))
1324
1325#define WRITE_INT_FIELD(p, offset, value) \
1326  (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1327
1328#define READ_INTPTR_FIELD(p, offset) \
1329  (*reinterpret_cast<const intptr_t*>(FIELD_ADDR_CONST(p, offset)))
1330
1331#define WRITE_INTPTR_FIELD(p, offset, value) \
1332  (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1333
1334#define READ_UINT8_FIELD(p, offset) \
1335  (*reinterpret_cast<const uint8_t*>(FIELD_ADDR_CONST(p, offset)))
1336
1337#define WRITE_UINT8_FIELD(p, offset, value) \
1338  (*reinterpret_cast<uint8_t*>(FIELD_ADDR(p, offset)) = value)
1339
1340#define READ_INT8_FIELD(p, offset) \
1341  (*reinterpret_cast<const int8_t*>(FIELD_ADDR_CONST(p, offset)))
1342
1343#define WRITE_INT8_FIELD(p, offset, value) \
1344  (*reinterpret_cast<int8_t*>(FIELD_ADDR(p, offset)) = value)
1345
1346#define READ_UINT16_FIELD(p, offset) \
1347  (*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset)))
1348
1349#define WRITE_UINT16_FIELD(p, offset, value) \
1350  (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1351
1352#define READ_INT16_FIELD(p, offset) \
1353  (*reinterpret_cast<const int16_t*>(FIELD_ADDR_CONST(p, offset)))
1354
1355#define WRITE_INT16_FIELD(p, offset, value) \
1356  (*reinterpret_cast<int16_t*>(FIELD_ADDR(p, offset)) = value)
1357
1358#define READ_UINT32_FIELD(p, offset) \
1359  (*reinterpret_cast<const uint32_t*>(FIELD_ADDR_CONST(p, offset)))
1360
1361#define WRITE_UINT32_FIELD(p, offset, value) \
1362  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1363
1364#define READ_INT32_FIELD(p, offset) \
1365  (*reinterpret_cast<const int32_t*>(FIELD_ADDR_CONST(p, offset)))
1366
1367#define WRITE_INT32_FIELD(p, offset, value) \
1368  (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1369
1370#define READ_FLOAT_FIELD(p, offset) \
1371  (*reinterpret_cast<const float*>(FIELD_ADDR_CONST(p, offset)))
1372
1373#define WRITE_FLOAT_FIELD(p, offset, value) \
1374  (*reinterpret_cast<float*>(FIELD_ADDR(p, offset)) = value)
1375
1376#define READ_UINT64_FIELD(p, offset) \
1377  (*reinterpret_cast<const uint64_t*>(FIELD_ADDR_CONST(p, offset)))
1378
1379#define WRITE_UINT64_FIELD(p, offset, value) \
1380  (*reinterpret_cast<uint64_t*>(FIELD_ADDR(p, offset)) = value)
1381
1382#define READ_INT64_FIELD(p, offset) \
1383  (*reinterpret_cast<const int64_t*>(FIELD_ADDR_CONST(p, offset)))
1384
1385#define WRITE_INT64_FIELD(p, offset, value) \
1386  (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1387
1388#define READ_BYTE_FIELD(p, offset) \
1389  (*reinterpret_cast<const byte*>(FIELD_ADDR_CONST(p, offset)))
1390
1391#define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1392  static_cast<byte>(base::NoBarrier_Load(    \
1393      reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
1394
1395#define WRITE_BYTE_FIELD(p, offset, value) \
1396  (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1397
1398#define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value)           \
1399  base::NoBarrier_Store(                                       \
1400      reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1401      static_cast<base::Atomic8>(value));
1402
1403Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1404  return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset));
1405}
1406
1407
1408MapWord MapWord::FromMap(const Map* map) {
1409  return MapWord(reinterpret_cast<uintptr_t>(map));
1410}
1411
1412
1413Map* MapWord::ToMap() {
1414  return reinterpret_cast<Map*>(value_);
1415}
1416
1417bool MapWord::IsForwardingAddress() const {
1418  return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1419}
1420
1421
1422MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1423  Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1424  return MapWord(reinterpret_cast<uintptr_t>(raw));
1425}
1426
1427
1428HeapObject* MapWord::ToForwardingAddress() {
1429  DCHECK(IsForwardingAddress());
1430  return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1431}
1432
1433
1434#ifdef VERIFY_HEAP
1435void HeapObject::VerifyObjectField(int offset) {
1436  VerifyPointer(READ_FIELD(this, offset));
1437}
1438
1439void HeapObject::VerifySmiField(int offset) {
1440  CHECK(READ_FIELD(this, offset)->IsSmi());
1441}
1442#endif
1443
1444
1445Heap* HeapObject::GetHeap() const {
1446  Heap* heap = MemoryChunk::FromAddress(
1447                   reinterpret_cast<Address>(const_cast<HeapObject*>(this)))
1448                   ->heap();
1449  SLOW_DCHECK(heap != NULL);
1450  return heap;
1451}
1452
1453
1454Isolate* HeapObject::GetIsolate() const {
1455  return GetHeap()->isolate();
1456}
1457
1458
1459Map* HeapObject::map() const {
1460#ifdef DEBUG
1461  // Clear mark potentially added by PathTracer.
1462  uintptr_t raw_value =
1463      map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
1464  return MapWord::FromRawValue(raw_value).ToMap();
1465#else
1466  return map_word().ToMap();
1467#endif
1468}
1469
1470
1471void HeapObject::set_map(Map* value) {
1472  set_map_word(MapWord::FromMap(value));
1473  if (value != nullptr) {
1474    // TODO(1600) We are passing NULL as a slot because maps can never be on
1475    // evacuation candidate.
1476    value->GetHeap()->incremental_marking()->RecordWrite(this, nullptr, value);
1477#ifdef VERIFY_HEAP
1478    value->GetHeap()->VerifyObjectLayoutChange(this, value);
1479#endif
1480  }
1481}
1482
1483
1484Map* HeapObject::synchronized_map() {
1485  return synchronized_map_word().ToMap();
1486}
1487
1488
1489void HeapObject::synchronized_set_map(Map* value) {
1490  synchronized_set_map_word(MapWord::FromMap(value));
1491  if (value != nullptr) {
1492    // TODO(1600) We are passing NULL as a slot because maps can never be on
1493    // evacuation candidate.
1494    value->GetHeap()->incremental_marking()->RecordWrite(this, nullptr, value);
1495#ifdef VERIFY_HEAP
1496    value->GetHeap()->VerifyObjectLayoutChange(this, value);
1497#endif
1498  }
1499}
1500
1501
1502void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
1503  synchronized_set_map_word(MapWord::FromMap(value));
1504}
1505
1506
1507// Unsafe accessor omitting write barrier.
1508void HeapObject::set_map_no_write_barrier(Map* value) {
1509  set_map_word(MapWord::FromMap(value));
1510}
1511
1512
1513MapWord HeapObject::map_word() const {
1514  return MapWord(
1515      reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
1516}
1517
1518
1519void HeapObject::set_map_word(MapWord map_word) {
1520  NOBARRIER_WRITE_FIELD(
1521      this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1522}
1523
1524
1525MapWord HeapObject::synchronized_map_word() const {
1526  return MapWord(
1527      reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1528}
1529
1530
1531void HeapObject::synchronized_set_map_word(MapWord map_word) {
1532  RELEASE_WRITE_FIELD(
1533      this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1534}
1535
1536
1537int HeapObject::Size() {
1538  return SizeFromMap(map());
1539}
1540
1541
1542double HeapNumber::value() const {
1543  return READ_DOUBLE_FIELD(this, kValueOffset);
1544}
1545
1546
1547void HeapNumber::set_value(double value) {
1548  WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1549}
1550
1551uint64_t HeapNumber::value_as_bits() const {
1552  return READ_UINT64_FIELD(this, kValueOffset);
1553}
1554
1555void HeapNumber::set_value_as_bits(uint64_t bits) {
1556  WRITE_UINT64_FIELD(this, kValueOffset, bits);
1557}
1558
1559int HeapNumber::get_exponent() {
1560  return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1561          kExponentShift) - kExponentBias;
1562}
1563
1564
1565int HeapNumber::get_sign() {
1566  return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1567}
1568
1569ACCESSORS(JSReceiver, properties, FixedArray, kPropertiesOffset)
1570
1571
1572Object** FixedArray::GetFirstElementAddress() {
1573  return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1574}
1575
1576
1577bool FixedArray::ContainsOnlySmisOrHoles() {
1578  Object* the_hole = GetHeap()->the_hole_value();
1579  Object** current = GetFirstElementAddress();
1580  for (int i = 0; i < length(); ++i) {
1581    Object* candidate = *current++;
1582    if (!candidate->IsSmi() && candidate != the_hole) return false;
1583  }
1584  return true;
1585}
1586
1587
1588FixedArrayBase* JSObject::elements() const {
1589  Object* array = READ_FIELD(this, kElementsOffset);
1590  return static_cast<FixedArrayBase*>(array);
1591}
1592
1593
1594void AllocationSite::Initialize() {
1595  set_transition_info(Smi::kZero);
1596  SetElementsKind(GetInitialFastElementsKind());
1597  set_nested_site(Smi::kZero);
1598  set_pretenure_data(0);
1599  set_pretenure_create_count(0);
1600  set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1601                     SKIP_WRITE_BARRIER);
1602}
1603
1604
1605bool AllocationSite::IsZombie() { return pretenure_decision() == kZombie; }
1606
1607
1608bool AllocationSite::IsMaybeTenure() {
1609  return pretenure_decision() == kMaybeTenure;
1610}
1611
1612
1613bool AllocationSite::PretenuringDecisionMade() {
1614  return pretenure_decision() != kUndecided;
1615}
1616
1617
1618void AllocationSite::MarkZombie() {
1619  DCHECK(!IsZombie());
1620  Initialize();
1621  set_pretenure_decision(kZombie);
1622}
1623
1624
1625ElementsKind AllocationSite::GetElementsKind() {
1626  DCHECK(!SitePointsToLiteral());
1627  int value = Smi::cast(transition_info())->value();
1628  return ElementsKindBits::decode(value);
1629}
1630
1631
1632void AllocationSite::SetElementsKind(ElementsKind kind) {
1633  int value = Smi::cast(transition_info())->value();
1634  set_transition_info(Smi::FromInt(ElementsKindBits::update(value, kind)),
1635                      SKIP_WRITE_BARRIER);
1636}
1637
1638
1639bool AllocationSite::CanInlineCall() {
1640  int value = Smi::cast(transition_info())->value();
1641  return DoNotInlineBit::decode(value) == 0;
1642}
1643
1644
1645void AllocationSite::SetDoNotInlineCall() {
1646  int value = Smi::cast(transition_info())->value();
1647  set_transition_info(Smi::FromInt(DoNotInlineBit::update(value, true)),
1648                      SKIP_WRITE_BARRIER);
1649}
1650
1651
1652bool AllocationSite::SitePointsToLiteral() {
1653  // If transition_info is a smi, then it represents an ElementsKind
1654  // for a constructed array. Otherwise, it must be a boilerplate
1655  // for an object or array literal.
1656  return transition_info()->IsJSArray() || transition_info()->IsJSObject();
1657}
1658
1659
1660// Heuristic: We only need to create allocation site info if the boilerplate
1661// elements kind is the initial elements kind.
1662AllocationSiteMode AllocationSite::GetMode(
1663    ElementsKind boilerplate_elements_kind) {
1664  if (IsFastSmiElementsKind(boilerplate_elements_kind)) {
1665    return TRACK_ALLOCATION_SITE;
1666  }
1667
1668  return DONT_TRACK_ALLOCATION_SITE;
1669}
1670
1671inline bool AllocationSite::CanTrack(InstanceType type) {
1672  if (FLAG_turbo) {
1673    // TurboFan doesn't care at all about String pretenuring feedback,
1674    // so don't bother even trying to track that.
1675    return type == JS_ARRAY_TYPE || type == JS_OBJECT_TYPE;
1676  }
1677  if (FLAG_allocation_site_pretenuring) {
1678    return type == JS_ARRAY_TYPE ||
1679        type == JS_OBJECT_TYPE ||
1680        type < FIRST_NONSTRING_TYPE;
1681  }
1682  return type == JS_ARRAY_TYPE;
1683}
1684
1685
1686AllocationSite::PretenureDecision AllocationSite::pretenure_decision() {
1687  int value = pretenure_data();
1688  return PretenureDecisionBits::decode(value);
1689}
1690
1691
1692void AllocationSite::set_pretenure_decision(PretenureDecision decision) {
1693  int value = pretenure_data();
1694  set_pretenure_data(PretenureDecisionBits::update(value, decision));
1695}
1696
1697
1698bool AllocationSite::deopt_dependent_code() {
1699  int value = pretenure_data();
1700  return DeoptDependentCodeBit::decode(value);
1701}
1702
1703
1704void AllocationSite::set_deopt_dependent_code(bool deopt) {
1705  int value = pretenure_data();
1706  set_pretenure_data(DeoptDependentCodeBit::update(value, deopt));
1707}
1708
1709
1710int AllocationSite::memento_found_count() {
1711  int value = pretenure_data();
1712  return MementoFoundCountBits::decode(value);
1713}
1714
1715
1716inline void AllocationSite::set_memento_found_count(int count) {
1717  int value = pretenure_data();
1718  // Verify that we can count more mementos than we can possibly find in one
1719  // new space collection.
1720  DCHECK((GetHeap()->MaxSemiSpaceSize() /
1721          (Heap::kMinObjectSizeInWords * kPointerSize +
1722           AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1723  DCHECK(count < MementoFoundCountBits::kMax);
1724  set_pretenure_data(MementoFoundCountBits::update(value, count));
1725}
1726
1727
1728int AllocationSite::memento_create_count() { return pretenure_create_count(); }
1729
1730
1731void AllocationSite::set_memento_create_count(int count) {
1732  set_pretenure_create_count(count);
1733}
1734
1735
1736bool AllocationSite::IncrementMementoFoundCount(int increment) {
1737  if (IsZombie()) return false;
1738
1739  int value = memento_found_count();
1740  set_memento_found_count(value + increment);
1741  return memento_found_count() >= kPretenureMinimumCreated;
1742}
1743
1744
1745inline void AllocationSite::IncrementMementoCreateCount() {
1746  DCHECK(FLAG_allocation_site_pretenuring);
1747  int value = memento_create_count();
1748  set_memento_create_count(value + 1);
1749}
1750
1751
1752inline bool AllocationSite::MakePretenureDecision(
1753    PretenureDecision current_decision,
1754    double ratio,
1755    bool maximum_size_scavenge) {
1756  // Here we just allow state transitions from undecided or maybe tenure
1757  // to don't tenure, maybe tenure, or tenure.
1758  if ((current_decision == kUndecided || current_decision == kMaybeTenure)) {
1759    if (ratio >= kPretenureRatio) {
1760      // We just transition into tenure state when the semi-space was at
1761      // maximum capacity.
1762      if (maximum_size_scavenge) {
1763        set_deopt_dependent_code(true);
1764        set_pretenure_decision(kTenure);
1765        // Currently we just need to deopt when we make a state transition to
1766        // tenure.
1767        return true;
1768      }
1769      set_pretenure_decision(kMaybeTenure);
1770    } else {
1771      set_pretenure_decision(kDontTenure);
1772    }
1773  }
1774  return false;
1775}
1776
1777
1778inline bool AllocationSite::DigestPretenuringFeedback(
1779    bool maximum_size_scavenge) {
1780  bool deopt = false;
1781  int create_count = memento_create_count();
1782  int found_count = memento_found_count();
1783  bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1784  double ratio =
1785      minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1786          static_cast<double>(found_count) / create_count : 0.0;
1787  PretenureDecision current_decision = pretenure_decision();
1788
1789  if (minimum_mementos_created) {
1790    deopt = MakePretenureDecision(
1791        current_decision, ratio, maximum_size_scavenge);
1792  }
1793
1794  if (FLAG_trace_pretenuring_statistics) {
1795    PrintIsolate(GetIsolate(),
1796                 "pretenuring: AllocationSite(%p): (created, found, ratio) "
1797                 "(%d, %d, %f) %s => %s\n",
1798                 static_cast<void*>(this), create_count, found_count, ratio,
1799                 PretenureDecisionName(current_decision),
1800                 PretenureDecisionName(pretenure_decision()));
1801  }
1802
1803  // Clear feedback calculation fields until the next gc.
1804  set_memento_found_count(0);
1805  set_memento_create_count(0);
1806  return deopt;
1807}
1808
1809
1810bool AllocationMemento::IsValid() {
1811  return allocation_site()->IsAllocationSite() &&
1812         !AllocationSite::cast(allocation_site())->IsZombie();
1813}
1814
1815
1816AllocationSite* AllocationMemento::GetAllocationSite() {
1817  DCHECK(IsValid());
1818  return AllocationSite::cast(allocation_site());
1819}
1820
1821Address AllocationMemento::GetAllocationSiteUnchecked() {
1822  return reinterpret_cast<Address>(allocation_site());
1823}
1824
1825void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1826  JSObject::ValidateElements(object);
1827  ElementsKind elements_kind = object->map()->elements_kind();
1828  if (!IsFastObjectElementsKind(elements_kind)) {
1829    if (IsFastHoleyElementsKind(elements_kind)) {
1830      TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1831    } else {
1832      TransitionElementsKind(object, FAST_ELEMENTS);
1833    }
1834  }
1835}
1836
1837
1838void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1839                                        Object** objects,
1840                                        uint32_t count,
1841                                        EnsureElementsMode mode) {
1842  ElementsKind current_kind = object->GetElementsKind();
1843  ElementsKind target_kind = current_kind;
1844  {
1845    DisallowHeapAllocation no_allocation;
1846    DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1847    bool is_holey = IsFastHoleyElementsKind(current_kind);
1848    if (current_kind == FAST_HOLEY_ELEMENTS) return;
1849    Object* the_hole = object->GetHeap()->the_hole_value();
1850    for (uint32_t i = 0; i < count; ++i) {
1851      Object* current = *objects++;
1852      if (current == the_hole) {
1853        is_holey = true;
1854        target_kind = GetHoleyElementsKind(target_kind);
1855      } else if (!current->IsSmi()) {
1856        if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1857          if (IsFastSmiElementsKind(target_kind)) {
1858            if (is_holey) {
1859              target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1860            } else {
1861              target_kind = FAST_DOUBLE_ELEMENTS;
1862            }
1863          }
1864        } else if (is_holey) {
1865          target_kind = FAST_HOLEY_ELEMENTS;
1866          break;
1867        } else {
1868          target_kind = FAST_ELEMENTS;
1869        }
1870      }
1871    }
1872  }
1873  if (target_kind != current_kind) {
1874    TransitionElementsKind(object, target_kind);
1875  }
1876}
1877
1878
1879void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1880                                        Handle<FixedArrayBase> elements,
1881                                        uint32_t length,
1882                                        EnsureElementsMode mode) {
1883  Heap* heap = object->GetHeap();
1884  if (elements->map() != heap->fixed_double_array_map()) {
1885    DCHECK(elements->map() == heap->fixed_array_map() ||
1886           elements->map() == heap->fixed_cow_array_map());
1887    if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1888      mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1889    }
1890    Object** objects =
1891        Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1892    EnsureCanContainElements(object, objects, length, mode);
1893    return;
1894  }
1895
1896  DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1897  if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1898    TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1899  } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1900    Handle<FixedDoubleArray> double_array =
1901        Handle<FixedDoubleArray>::cast(elements);
1902    for (uint32_t i = 0; i < length; ++i) {
1903      if (double_array->is_the_hole(i)) {
1904        TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1905        return;
1906      }
1907    }
1908    TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
1909  }
1910}
1911
1912
1913void JSObject::SetMapAndElements(Handle<JSObject> object,
1914                                 Handle<Map> new_map,
1915                                 Handle<FixedArrayBase> value) {
1916  JSObject::MigrateToMap(object, new_map);
1917  DCHECK((object->map()->has_fast_smi_or_object_elements() ||
1918          (*value == object->GetHeap()->empty_fixed_array()) ||
1919          object->map()->has_fast_string_wrapper_elements()) ==
1920         (value->map() == object->GetHeap()->fixed_array_map() ||
1921          value->map() == object->GetHeap()->fixed_cow_array_map()));
1922  DCHECK((*value == object->GetHeap()->empty_fixed_array()) ||
1923         (object->map()->has_fast_double_elements() ==
1924          value->IsFixedDoubleArray()));
1925  object->set_elements(*value);
1926}
1927
1928
1929void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1930  WRITE_FIELD(this, kElementsOffset, value);
1931  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1932}
1933
1934
1935void JSObject::initialize_elements() {
1936  FixedArrayBase* elements = map()->GetInitialElements();
1937  WRITE_FIELD(this, kElementsOffset, elements);
1938}
1939
1940
1941InterceptorInfo* JSObject::GetIndexedInterceptor() {
1942  return map()->GetIndexedInterceptor();
1943}
1944
1945InterceptorInfo* JSObject::GetNamedInterceptor() {
1946  return map()->GetNamedInterceptor();
1947}
1948
1949InterceptorInfo* Map::GetNamedInterceptor() {
1950  DCHECK(has_named_interceptor());
1951  JSFunction* constructor = JSFunction::cast(GetConstructor());
1952  DCHECK(constructor->shared()->IsApiFunction());
1953  return InterceptorInfo::cast(
1954      constructor->shared()->get_api_func_data()->named_property_handler());
1955}
1956
1957InterceptorInfo* Map::GetIndexedInterceptor() {
1958  DCHECK(has_indexed_interceptor());
1959  JSFunction* constructor = JSFunction::cast(GetConstructor());
1960  DCHECK(constructor->shared()->IsApiFunction());
1961  return InterceptorInfo::cast(
1962      constructor->shared()->get_api_func_data()->indexed_property_handler());
1963}
1964
1965double Oddball::to_number_raw() const {
1966  return READ_DOUBLE_FIELD(this, kToNumberRawOffset);
1967}
1968
1969void Oddball::set_to_number_raw(double value) {
1970  WRITE_DOUBLE_FIELD(this, kToNumberRawOffset, value);
1971}
1972
1973ACCESSORS(Oddball, to_string, String, kToStringOffset)
1974ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1975ACCESSORS(Oddball, type_of, String, kTypeOfOffset)
1976
1977
1978byte Oddball::kind() const {
1979  return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1980}
1981
1982
1983void Oddball::set_kind(byte value) {
1984  WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1985}
1986
1987
1988// static
1989Handle<Object> Oddball::ToNumber(Handle<Oddball> input) {
1990  return handle(input->to_number(), input->GetIsolate());
1991}
1992
1993
1994ACCESSORS(Cell, value, Object, kValueOffset)
1995ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
1996ACCESSORS(PropertyCell, property_details_raw, Object, kDetailsOffset)
1997ACCESSORS(PropertyCell, value, Object, kValueOffset)
1998
1999
2000PropertyDetails PropertyCell::property_details() {
2001  return PropertyDetails(Smi::cast(property_details_raw()));
2002}
2003
2004
2005void PropertyCell::set_property_details(PropertyDetails details) {
2006  set_property_details_raw(details.AsSmi());
2007}
2008
2009
2010Object* WeakCell::value() const { return READ_FIELD(this, kValueOffset); }
2011
2012
2013void WeakCell::clear() {
2014  // Either the garbage collector is clearing the cell or we are simply
2015  // initializing the root empty weak cell.
2016  DCHECK(GetHeap()->gc_state() == Heap::MARK_COMPACT ||
2017         this == GetHeap()->empty_weak_cell());
2018  WRITE_FIELD(this, kValueOffset, Smi::kZero);
2019}
2020
2021
2022void WeakCell::initialize(HeapObject* val) {
2023  WRITE_FIELD(this, kValueOffset, val);
2024  // We just have to execute the generational barrier here because we never
2025  // mark through a weak cell and collect evacuation candidates when we process
2026  // all weak cells.
2027  WriteBarrierMode mode = ObjectMarking::IsBlack(this)
2028                              ? UPDATE_WRITE_BARRIER
2029                              : UPDATE_WEAK_WRITE_BARRIER;
2030  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kValueOffset, val, mode);
2031}
2032
2033bool WeakCell::cleared() const { return value() == Smi::kZero; }
2034
2035Object* WeakCell::next() const { return READ_FIELD(this, kNextOffset); }
2036
2037
2038void WeakCell::set_next(Object* val, WriteBarrierMode mode) {
2039  WRITE_FIELD(this, kNextOffset, val);
2040  if (mode == UPDATE_WRITE_BARRIER) {
2041    WRITE_BARRIER(GetHeap(), this, kNextOffset, val);
2042  }
2043}
2044
2045
2046void WeakCell::clear_next(Object* the_hole_value) {
2047  DCHECK_EQ(GetHeap()->the_hole_value(), the_hole_value);
2048  set_next(the_hole_value, SKIP_WRITE_BARRIER);
2049}
2050
2051bool WeakCell::next_cleared() { return next()->IsTheHole(GetIsolate()); }
2052
2053int JSObject::GetHeaderSize() { return GetHeaderSize(map()->instance_type()); }
2054
2055
2056int JSObject::GetHeaderSize(InstanceType type) {
2057  // Check for the most common kind of JavaScript object before
2058  // falling into the generic switch. This speeds up the internal
2059  // field operations considerably on average.
2060  if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
2061  switch (type) {
2062    case JS_API_OBJECT_TYPE:
2063    case JS_SPECIAL_API_OBJECT_TYPE:
2064      return JSObject::kHeaderSize;
2065    case JS_GENERATOR_OBJECT_TYPE:
2066      return JSGeneratorObject::kSize;
2067    case JS_GLOBAL_PROXY_TYPE:
2068      return JSGlobalProxy::kSize;
2069    case JS_GLOBAL_OBJECT_TYPE:
2070      return JSGlobalObject::kSize;
2071    case JS_BOUND_FUNCTION_TYPE:
2072      return JSBoundFunction::kSize;
2073    case JS_FUNCTION_TYPE:
2074      return JSFunction::kSize;
2075    case JS_VALUE_TYPE:
2076      return JSValue::kSize;
2077    case JS_DATE_TYPE:
2078      return JSDate::kSize;
2079    case JS_ARRAY_TYPE:
2080      return JSArray::kSize;
2081    case JS_ARRAY_BUFFER_TYPE:
2082      return JSArrayBuffer::kSize;
2083    case JS_TYPED_ARRAY_TYPE:
2084      return JSTypedArray::kSize;
2085    case JS_DATA_VIEW_TYPE:
2086      return JSDataView::kSize;
2087    case JS_SET_TYPE:
2088      return JSSet::kSize;
2089    case JS_MAP_TYPE:
2090      return JSMap::kSize;
2091    case JS_SET_ITERATOR_TYPE:
2092      return JSSetIterator::kSize;
2093    case JS_MAP_ITERATOR_TYPE:
2094      return JSMapIterator::kSize;
2095    case JS_WEAK_MAP_TYPE:
2096      return JSWeakMap::kSize;
2097    case JS_WEAK_SET_TYPE:
2098      return JSWeakSet::kSize;
2099    case JS_PROMISE_CAPABILITY_TYPE:
2100      return JSPromiseCapability::kSize;
2101    case JS_PROMISE_TYPE:
2102      return JSPromise::kSize;
2103    case JS_REGEXP_TYPE:
2104      return JSRegExp::kSize;
2105    case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
2106      return JSObject::kHeaderSize;
2107    case JS_MESSAGE_OBJECT_TYPE:
2108      return JSMessageObject::kSize;
2109    case JS_ARGUMENTS_TYPE:
2110      return JSArgumentsObject::kHeaderSize;
2111    case JS_ERROR_TYPE:
2112      return JSObject::kHeaderSize;
2113    case JS_STRING_ITERATOR_TYPE:
2114      return JSStringIterator::kSize;
2115    case JS_MODULE_NAMESPACE_TYPE:
2116      return JSModuleNamespace::kHeaderSize;
2117    default:
2118      if (type >= FIRST_ARRAY_ITERATOR_TYPE &&
2119          type <= LAST_ARRAY_ITERATOR_TYPE) {
2120        return JSArrayIterator::kSize;
2121      }
2122      UNREACHABLE();
2123      return 0;
2124  }
2125}
2126
2127inline bool IsSpecialReceiverInstanceType(InstanceType instance_type) {
2128  return instance_type <= LAST_SPECIAL_RECEIVER_TYPE;
2129}
2130
2131int JSObject::GetInternalFieldCount(Map* map) {
2132  int instance_size = map->instance_size();
2133  if (instance_size == kVariableSizeSentinel) return 0;
2134  InstanceType instance_type = map->instance_type();
2135  return ((instance_size - GetHeaderSize(instance_type)) >> kPointerSizeLog2) -
2136         map->GetInObjectProperties();
2137}
2138
2139
2140int JSObject::GetInternalFieldCount() { return GetInternalFieldCount(map()); }
2141
2142
2143int JSObject::GetInternalFieldOffset(int index) {
2144  DCHECK(index < GetInternalFieldCount() && index >= 0);
2145  return GetHeaderSize() + (kPointerSize * index);
2146}
2147
2148
2149Object* JSObject::GetInternalField(int index) {
2150  DCHECK(index < GetInternalFieldCount() && index >= 0);
2151  // Internal objects do follow immediately after the header, whereas in-object
2152  // properties are at the end of the object. Therefore there is no need
2153  // to adjust the index here.
2154  return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
2155}
2156
2157
2158void JSObject::SetInternalField(int index, Object* value) {
2159  DCHECK(index < GetInternalFieldCount() && index >= 0);
2160  // Internal objects do follow immediately after the header, whereas in-object
2161  // properties are at the end of the object. Therefore there is no need
2162  // to adjust the index here.
2163  int offset = GetHeaderSize() + (kPointerSize * index);
2164  WRITE_FIELD(this, offset, value);
2165  WRITE_BARRIER(GetHeap(), this, offset, value);
2166}
2167
2168
2169void JSObject::SetInternalField(int index, Smi* value) {
2170  DCHECK(index < GetInternalFieldCount() && index >= 0);
2171  // Internal objects do follow immediately after the header, whereas in-object
2172  // properties are at the end of the object. Therefore there is no need
2173  // to adjust the index here.
2174  int offset = GetHeaderSize() + (kPointerSize * index);
2175  WRITE_FIELD(this, offset, value);
2176}
2177
2178
2179bool JSObject::IsUnboxedDoubleField(FieldIndex index) {
2180  if (!FLAG_unbox_double_fields) return false;
2181  return map()->IsUnboxedDoubleField(index);
2182}
2183
2184
2185bool Map::IsUnboxedDoubleField(FieldIndex index) {
2186  if (!FLAG_unbox_double_fields) return false;
2187  if (index.is_hidden_field() || !index.is_inobject()) return false;
2188  return !layout_descriptor()->IsTagged(index.property_index());
2189}
2190
2191
2192// Access fast-case object properties at index. The use of these routines
2193// is needed to correctly distinguish between properties stored in-object and
2194// properties stored in the properties array.
2195Object* JSObject::RawFastPropertyAt(FieldIndex index) {
2196  DCHECK(!IsUnboxedDoubleField(index));
2197  if (index.is_inobject()) {
2198    return READ_FIELD(this, index.offset());
2199  } else {
2200    return properties()->get(index.outobject_array_index());
2201  }
2202}
2203
2204
2205double JSObject::RawFastDoublePropertyAt(FieldIndex index) {
2206  DCHECK(IsUnboxedDoubleField(index));
2207  return READ_DOUBLE_FIELD(this, index.offset());
2208}
2209
2210uint64_t JSObject::RawFastDoublePropertyAsBitsAt(FieldIndex index) {
2211  DCHECK(IsUnboxedDoubleField(index));
2212  return READ_UINT64_FIELD(this, index.offset());
2213}
2214
2215void JSObject::RawFastPropertyAtPut(FieldIndex index, Object* value) {
2216  if (index.is_inobject()) {
2217    int offset = index.offset();
2218    WRITE_FIELD(this, offset, value);
2219    WRITE_BARRIER(GetHeap(), this, offset, value);
2220  } else {
2221    properties()->set(index.outobject_array_index(), value);
2222  }
2223}
2224
2225void JSObject::RawFastDoublePropertyAsBitsAtPut(FieldIndex index,
2226                                                uint64_t bits) {
2227  WRITE_UINT64_FIELD(this, index.offset(), bits);
2228}
2229
2230void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
2231  if (IsUnboxedDoubleField(index)) {
2232    DCHECK(value->IsMutableHeapNumber());
2233    // Ensure that all bits of the double value are preserved.
2234    RawFastDoublePropertyAsBitsAtPut(index,
2235                                     HeapNumber::cast(value)->value_as_bits());
2236  } else {
2237    RawFastPropertyAtPut(index, value);
2238  }
2239}
2240
2241void JSObject::WriteToField(int descriptor, PropertyDetails details,
2242                            Object* value) {
2243  DCHECK_EQ(kField, details.location());
2244  DCHECK_EQ(kData, details.kind());
2245  DisallowHeapAllocation no_gc;
2246  FieldIndex index = FieldIndex::ForDescriptor(map(), descriptor);
2247  if (details.representation().IsDouble()) {
2248    // Nothing more to be done.
2249    if (value->IsUninitialized(this->GetIsolate())) {
2250      return;
2251    }
2252    // Manipulating the signaling NaN used for the hole and uninitialized
2253    // double field sentinel in C++, e.g. with bit_cast or value()/set_value(),
2254    // will change its value on ia32 (the x87 stack is used to return values
2255    // and stores to the stack silently clear the signalling bit).
2256    uint64_t bits;
2257    if (value->IsSmi()) {
2258      bits = bit_cast<uint64_t>(static_cast<double>(Smi::cast(value)->value()));
2259    } else {
2260      DCHECK(value->IsHeapNumber());
2261      bits = HeapNumber::cast(value)->value_as_bits();
2262    }
2263    if (IsUnboxedDoubleField(index)) {
2264      RawFastDoublePropertyAsBitsAtPut(index, bits);
2265    } else {
2266      HeapNumber* box = HeapNumber::cast(RawFastPropertyAt(index));
2267      DCHECK(box->IsMutableHeapNumber());
2268      box->set_value_as_bits(bits);
2269    }
2270  } else {
2271    RawFastPropertyAtPut(index, value);
2272  }
2273}
2274
2275int JSObject::GetInObjectPropertyOffset(int index) {
2276  return map()->GetInObjectPropertyOffset(index);
2277}
2278
2279
2280Object* JSObject::InObjectPropertyAt(int index) {
2281  int offset = GetInObjectPropertyOffset(index);
2282  return READ_FIELD(this, offset);
2283}
2284
2285
2286Object* JSObject::InObjectPropertyAtPut(int index,
2287                                        Object* value,
2288                                        WriteBarrierMode mode) {
2289  // Adjust for the number of properties stored in the object.
2290  int offset = GetInObjectPropertyOffset(index);
2291  WRITE_FIELD(this, offset, value);
2292  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2293  return value;
2294}
2295
2296
2297void JSObject::InitializeBody(Map* map, int start_offset,
2298                              Object* pre_allocated_value,
2299                              Object* filler_value) {
2300  DCHECK(!filler_value->IsHeapObject() ||
2301         !GetHeap()->InNewSpace(filler_value));
2302  DCHECK(!pre_allocated_value->IsHeapObject() ||
2303         !GetHeap()->InNewSpace(pre_allocated_value));
2304  int size = map->instance_size();
2305  int offset = start_offset;
2306  if (filler_value != pre_allocated_value) {
2307    int end_of_pre_allocated_offset =
2308        size - (map->unused_property_fields() * kPointerSize);
2309    DCHECK_LE(kHeaderSize, end_of_pre_allocated_offset);
2310    while (offset < end_of_pre_allocated_offset) {
2311      WRITE_FIELD(this, offset, pre_allocated_value);
2312      offset += kPointerSize;
2313    }
2314  }
2315  while (offset < size) {
2316    WRITE_FIELD(this, offset, filler_value);
2317    offset += kPointerSize;
2318  }
2319}
2320
2321
2322bool Map::TooManyFastProperties(StoreFromKeyed store_mode) {
2323  if (unused_property_fields() != 0) return false;
2324  if (is_prototype_map()) return false;
2325  int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
2326  int limit = Max(minimum, GetInObjectProperties());
2327  int external = NumberOfFields() - GetInObjectProperties();
2328  return external > limit;
2329}
2330
2331
2332void Struct::InitializeBody(int object_size) {
2333  Object* value = GetHeap()->undefined_value();
2334  for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2335    WRITE_FIELD(this, offset, value);
2336  }
2337}
2338
2339bool Object::ToArrayLength(uint32_t* index) { return Object::ToUint32(index); }
2340
2341
2342bool Object::ToArrayIndex(uint32_t* index) {
2343  return Object::ToUint32(index) && *index != kMaxUInt32;
2344}
2345
2346
2347void Object::VerifyApiCallResultType() {
2348#if DEBUG
2349  if (IsSmi()) return;
2350  DCHECK(IsHeapObject());
2351  Isolate* isolate = HeapObject::cast(this)->GetIsolate();
2352  if (!(IsString() || IsSymbol() || IsJSReceiver() || IsHeapNumber() ||
2353        IsUndefined(isolate) || IsTrue(isolate) || IsFalse(isolate) ||
2354        IsNull(isolate))) {
2355    FATAL("API call returned invalid object");
2356  }
2357#endif  // DEBUG
2358}
2359
2360
2361Object* FixedArray::get(int index) const {
2362  SLOW_DCHECK(index >= 0 && index < this->length());
2363  return NOBARRIER_READ_FIELD(this, kHeaderSize + index * kPointerSize);
2364}
2365
2366Handle<Object> FixedArray::get(FixedArray* array, int index, Isolate* isolate) {
2367  return handle(array->get(index), isolate);
2368}
2369
2370template <class T>
2371MaybeHandle<T> FixedArray::GetValue(Isolate* isolate, int index) const {
2372  Object* obj = get(index);
2373  if (obj->IsUndefined(isolate)) return MaybeHandle<T>();
2374  return Handle<T>(T::cast(obj), isolate);
2375}
2376
2377template <class T>
2378Handle<T> FixedArray::GetValueChecked(Isolate* isolate, int index) const {
2379  Object* obj = get(index);
2380  CHECK(!obj->IsUndefined(isolate));
2381  return Handle<T>(T::cast(obj), isolate);
2382}
2383bool FixedArray::is_the_hole(Isolate* isolate, int index) {
2384  return get(index)->IsTheHole(isolate);
2385}
2386
2387void FixedArray::set(int index, Smi* value) {
2388  DCHECK(map() != GetHeap()->fixed_cow_array_map());
2389  DCHECK(index >= 0 && index < this->length());
2390  DCHECK(reinterpret_cast<Object*>(value)->IsSmi());
2391  int offset = kHeaderSize + index * kPointerSize;
2392  NOBARRIER_WRITE_FIELD(this, offset, value);
2393}
2394
2395
2396void FixedArray::set(int index, Object* value) {
2397  DCHECK_NE(GetHeap()->fixed_cow_array_map(), map());
2398  DCHECK(IsFixedArray());
2399  DCHECK_GE(index, 0);
2400  DCHECK_LT(index, this->length());
2401  int offset = kHeaderSize + index * kPointerSize;
2402  NOBARRIER_WRITE_FIELD(this, offset, value);
2403  WRITE_BARRIER(GetHeap(), this, offset, value);
2404}
2405
2406
2407double FixedDoubleArray::get_scalar(int index) {
2408  DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2409         map() != GetHeap()->fixed_array_map());
2410  DCHECK(index >= 0 && index < this->length());
2411  DCHECK(!is_the_hole(index));
2412  return READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2413}
2414
2415
2416uint64_t FixedDoubleArray::get_representation(int index) {
2417  DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2418         map() != GetHeap()->fixed_array_map());
2419  DCHECK(index >= 0 && index < this->length());
2420  int offset = kHeaderSize + index * kDoubleSize;
2421  return READ_UINT64_FIELD(this, offset);
2422}
2423
2424Handle<Object> FixedDoubleArray::get(FixedDoubleArray* array, int index,
2425                                     Isolate* isolate) {
2426  if (array->is_the_hole(index)) {
2427    return isolate->factory()->the_hole_value();
2428  } else {
2429    return isolate->factory()->NewNumber(array->get_scalar(index));
2430  }
2431}
2432
2433
2434void FixedDoubleArray::set(int index, double value) {
2435  DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2436         map() != GetHeap()->fixed_array_map());
2437  int offset = kHeaderSize + index * kDoubleSize;
2438  if (std::isnan(value)) {
2439    WRITE_DOUBLE_FIELD(this, offset, std::numeric_limits<double>::quiet_NaN());
2440  } else {
2441    WRITE_DOUBLE_FIELD(this, offset, value);
2442  }
2443  DCHECK(!is_the_hole(index));
2444}
2445
2446void FixedDoubleArray::set_the_hole(Isolate* isolate, int index) {
2447  set_the_hole(index);
2448}
2449
2450void FixedDoubleArray::set_the_hole(int index) {
2451  DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2452         map() != GetHeap()->fixed_array_map());
2453  int offset = kHeaderSize + index * kDoubleSize;
2454  WRITE_UINT64_FIELD(this, offset, kHoleNanInt64);
2455}
2456
2457bool FixedDoubleArray::is_the_hole(Isolate* isolate, int index) {
2458  return is_the_hole(index);
2459}
2460
2461bool FixedDoubleArray::is_the_hole(int index) {
2462  return get_representation(index) == kHoleNanInt64;
2463}
2464
2465
2466double* FixedDoubleArray::data_start() {
2467  return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2468}
2469
2470
2471void FixedDoubleArray::FillWithHoles(int from, int to) {
2472  for (int i = from; i < to; i++) {
2473    set_the_hole(i);
2474  }
2475}
2476
2477
2478Object* WeakFixedArray::Get(int index) const {
2479  Object* raw = FixedArray::cast(this)->get(index + kFirstIndex);
2480  if (raw->IsSmi()) return raw;
2481  DCHECK(raw->IsWeakCell());
2482  return WeakCell::cast(raw)->value();
2483}
2484
2485
2486bool WeakFixedArray::IsEmptySlot(int index) const {
2487  DCHECK(index < Length());
2488  return Get(index)->IsSmi();
2489}
2490
2491
2492void WeakFixedArray::Clear(int index) {
2493  FixedArray::cast(this)->set(index + kFirstIndex, Smi::kZero);
2494}
2495
2496
2497int WeakFixedArray::Length() const {
2498  return FixedArray::cast(this)->length() - kFirstIndex;
2499}
2500
2501
2502int WeakFixedArray::last_used_index() const {
2503  return Smi::cast(FixedArray::cast(this)->get(kLastUsedIndexIndex))->value();
2504}
2505
2506
2507void WeakFixedArray::set_last_used_index(int index) {
2508  FixedArray::cast(this)->set(kLastUsedIndexIndex, Smi::FromInt(index));
2509}
2510
2511
2512template <class T>
2513T* WeakFixedArray::Iterator::Next() {
2514  if (list_ != NULL) {
2515    // Assert that list did not change during iteration.
2516    DCHECK_EQ(last_used_index_, list_->last_used_index());
2517    while (index_ < list_->Length()) {
2518      Object* item = list_->Get(index_++);
2519      if (item != Empty()) return T::cast(item);
2520    }
2521    list_ = NULL;
2522  }
2523  return NULL;
2524}
2525
2526
2527int ArrayList::Length() {
2528  if (FixedArray::cast(this)->length() == 0) return 0;
2529  return Smi::cast(FixedArray::cast(this)->get(kLengthIndex))->value();
2530}
2531
2532
2533void ArrayList::SetLength(int length) {
2534  return FixedArray::cast(this)->set(kLengthIndex, Smi::FromInt(length));
2535}
2536
2537
2538Object* ArrayList::Get(int index) {
2539  return FixedArray::cast(this)->get(kFirstIndex + index);
2540}
2541
2542
2543Object** ArrayList::Slot(int index) {
2544  return data_start() + kFirstIndex + index;
2545}
2546
2547void ArrayList::Set(int index, Object* obj, WriteBarrierMode mode) {
2548  FixedArray::cast(this)->set(kFirstIndex + index, obj, mode);
2549}
2550
2551
2552void ArrayList::Clear(int index, Object* undefined) {
2553  DCHECK(undefined->IsUndefined(GetIsolate()));
2554  FixedArray::cast(this)
2555      ->set(kFirstIndex + index, undefined, SKIP_WRITE_BARRIER);
2556}
2557
2558int RegExpMatchInfo::NumberOfCaptureRegisters() {
2559  DCHECK_GE(length(), kLastMatchOverhead);
2560  Object* obj = get(kNumberOfCapturesIndex);
2561  return Smi::cast(obj)->value();
2562}
2563
2564void RegExpMatchInfo::SetNumberOfCaptureRegisters(int value) {
2565  DCHECK_GE(length(), kLastMatchOverhead);
2566  set(kNumberOfCapturesIndex, Smi::FromInt(value));
2567}
2568
2569String* RegExpMatchInfo::LastSubject() {
2570  DCHECK_GE(length(), kLastMatchOverhead);
2571  Object* obj = get(kLastSubjectIndex);
2572  return String::cast(obj);
2573}
2574
2575void RegExpMatchInfo::SetLastSubject(String* value) {
2576  DCHECK_GE(length(), kLastMatchOverhead);
2577  set(kLastSubjectIndex, value);
2578}
2579
2580Object* RegExpMatchInfo::LastInput() {
2581  DCHECK_GE(length(), kLastMatchOverhead);
2582  return get(kLastInputIndex);
2583}
2584
2585void RegExpMatchInfo::SetLastInput(Object* value) {
2586  DCHECK_GE(length(), kLastMatchOverhead);
2587  set(kLastInputIndex, value);
2588}
2589
2590int RegExpMatchInfo::Capture(int i) {
2591  DCHECK_LT(i, NumberOfCaptureRegisters());
2592  Object* obj = get(kFirstCaptureIndex + i);
2593  return Smi::cast(obj)->value();
2594}
2595
2596void RegExpMatchInfo::SetCapture(int i, int value) {
2597  DCHECK_LT(i, NumberOfCaptureRegisters());
2598  set(kFirstCaptureIndex + i, Smi::FromInt(value));
2599}
2600
2601WriteBarrierMode HeapObject::GetWriteBarrierMode(
2602    const DisallowHeapAllocation& promise) {
2603  Heap* heap = GetHeap();
2604  if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2605  if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2606  return UPDATE_WRITE_BARRIER;
2607}
2608
2609
2610AllocationAlignment HeapObject::RequiredAlignment() {
2611#ifdef V8_HOST_ARCH_32_BIT
2612  if ((IsFixedFloat64Array() || IsFixedDoubleArray()) &&
2613      FixedArrayBase::cast(this)->length() != 0) {
2614    return kDoubleAligned;
2615  }
2616  if (IsHeapNumber()) return kDoubleUnaligned;
2617#endif  // V8_HOST_ARCH_32_BIT
2618  return kWordAligned;
2619}
2620
2621
2622void FixedArray::set(int index,
2623                     Object* value,
2624                     WriteBarrierMode mode) {
2625  DCHECK_NE(map(), GetHeap()->fixed_cow_array_map());
2626  DCHECK_GE(index, 0);
2627  DCHECK_LT(index, this->length());
2628  int offset = kHeaderSize + index * kPointerSize;
2629  NOBARRIER_WRITE_FIELD(this, offset, value);
2630  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2631}
2632
2633
2634void FixedArray::NoWriteBarrierSet(FixedArray* array,
2635                                   int index,
2636                                   Object* value) {
2637  DCHECK_NE(array->map(), array->GetHeap()->fixed_cow_array_map());
2638  DCHECK_GE(index, 0);
2639  DCHECK_LT(index, array->length());
2640  DCHECK(!array->GetHeap()->InNewSpace(value));
2641  NOBARRIER_WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2642}
2643
2644void FixedArray::set_undefined(int index) {
2645  set_undefined(GetIsolate(), index);
2646}
2647
2648void FixedArray::set_undefined(Isolate* isolate, int index) {
2649  FixedArray::NoWriteBarrierSet(this, index,
2650                                isolate->heap()->undefined_value());
2651}
2652
2653void FixedArray::set_null(int index) { set_null(GetIsolate(), index); }
2654
2655void FixedArray::set_null(Isolate* isolate, int index) {
2656  FixedArray::NoWriteBarrierSet(this, index, isolate->heap()->null_value());
2657}
2658
2659void FixedArray::set_the_hole(int index) { set_the_hole(GetIsolate(), index); }
2660
2661void FixedArray::set_the_hole(Isolate* isolate, int index) {
2662  FixedArray::NoWriteBarrierSet(this, index, isolate->heap()->the_hole_value());
2663}
2664
2665void FixedArray::FillWithHoles(int from, int to) {
2666  Isolate* isolate = GetIsolate();
2667  for (int i = from; i < to; i++) {
2668    set_the_hole(isolate, i);
2669  }
2670}
2671
2672
2673Object** FixedArray::data_start() {
2674  return HeapObject::RawField(this, kHeaderSize);
2675}
2676
2677
2678Object** FixedArray::RawFieldOfElementAt(int index) {
2679  return HeapObject::RawField(this, OffsetOfElementAt(index));
2680}
2681
2682#define DEFINE_FRAME_ARRAY_ACCESSORS(name, type)                              \
2683  type* FrameArray::name(int frame_ix) const {                                \
2684    Object* obj =                                                             \
2685        get(kFirstIndex + frame_ix * kElementsPerFrame + k##name##Offset);    \
2686    return type::cast(obj);                                                   \
2687  }                                                                           \
2688                                                                              \
2689  void FrameArray::Set##name(int frame_ix, type* value) {                     \
2690    set(kFirstIndex + frame_ix * kElementsPerFrame + k##name##Offset, value); \
2691  }
2692FRAME_ARRAY_FIELD_LIST(DEFINE_FRAME_ARRAY_ACCESSORS)
2693#undef DEFINE_FRAME_ARRAY_ACCESSORS
2694
2695bool FrameArray::IsWasmFrame(int frame_ix) const {
2696  const int flags = Flags(frame_ix)->value();
2697  return (flags & kIsWasmFrame) != 0;
2698}
2699
2700bool FrameArray::IsAsmJsWasmFrame(int frame_ix) const {
2701  const int flags = Flags(frame_ix)->value();
2702  return (flags & kIsAsmJsWasmFrame) != 0;
2703}
2704
2705int FrameArray::FrameCount() const {
2706  const int frame_count = Smi::cast(get(kFrameCountIndex))->value();
2707  DCHECK_LE(0, frame_count);
2708  return frame_count;
2709}
2710
2711bool DescriptorArray::IsEmpty() {
2712  DCHECK(length() >= kFirstIndex ||
2713         this == GetHeap()->empty_descriptor_array());
2714  return length() < kFirstIndex;
2715}
2716
2717
2718int DescriptorArray::number_of_descriptors() {
2719  DCHECK(length() >= kFirstIndex || IsEmpty());
2720  int len = length();
2721  return len == 0 ? 0 : Smi::cast(get(kDescriptorLengthIndex))->value();
2722}
2723
2724
2725int DescriptorArray::number_of_descriptors_storage() {
2726  int len = length();
2727  return len == 0 ? 0 : (len - kFirstIndex) / kEntrySize;
2728}
2729
2730
2731int DescriptorArray::NumberOfSlackDescriptors() {
2732  return number_of_descriptors_storage() - number_of_descriptors();
2733}
2734
2735
2736void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2737  WRITE_FIELD(
2738      this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2739}
2740
2741
2742inline int DescriptorArray::number_of_entries() {
2743  return number_of_descriptors();
2744}
2745
2746
2747bool DescriptorArray::HasEnumCache() {
2748  return !IsEmpty() && !get(kEnumCacheIndex)->IsSmi();
2749}
2750
2751
2752void DescriptorArray::CopyEnumCacheFrom(DescriptorArray* array) {
2753  set(kEnumCacheIndex, array->get(kEnumCacheIndex));
2754}
2755
2756
2757FixedArray* DescriptorArray::GetEnumCache() {
2758  DCHECK(HasEnumCache());
2759  FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
2760  return FixedArray::cast(bridge->get(kEnumCacheBridgeCacheIndex));
2761}
2762
2763
2764bool DescriptorArray::HasEnumIndicesCache() {
2765  if (IsEmpty()) return false;
2766  Object* object = get(kEnumCacheIndex);
2767  if (object->IsSmi()) return false;
2768  FixedArray* bridge = FixedArray::cast(object);
2769  return !bridge->get(kEnumCacheBridgeIndicesCacheIndex)->IsSmi();
2770}
2771
2772
2773FixedArray* DescriptorArray::GetEnumIndicesCache() {
2774  DCHECK(HasEnumIndicesCache());
2775  FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
2776  return FixedArray::cast(bridge->get(kEnumCacheBridgeIndicesCacheIndex));
2777}
2778
2779
2780Object** DescriptorArray::GetEnumCacheSlot() {
2781  DCHECK(HasEnumCache());
2782  return HeapObject::RawField(reinterpret_cast<HeapObject*>(this),
2783                              kEnumCacheOffset);
2784}
2785
2786// Perform a binary search in a fixed array.
2787template <SearchMode search_mode, typename T>
2788int BinarySearch(T* array, Name* name, int valid_entries,
2789                 int* out_insertion_index) {
2790  DCHECK(search_mode == ALL_ENTRIES || out_insertion_index == NULL);
2791  int low = 0;
2792  int high = array->number_of_entries() - 1;
2793  uint32_t hash = name->hash_field();
2794  int limit = high;
2795
2796  DCHECK(low <= high);
2797
2798  while (low != high) {
2799    int mid = low + (high - low) / 2;
2800    Name* mid_name = array->GetSortedKey(mid);
2801    uint32_t mid_hash = mid_name->hash_field();
2802
2803    if (mid_hash >= hash) {
2804      high = mid;
2805    } else {
2806      low = mid + 1;
2807    }
2808  }
2809
2810  for (; low <= limit; ++low) {
2811    int sort_index = array->GetSortedKeyIndex(low);
2812    Name* entry = array->GetKey(sort_index);
2813    uint32_t current_hash = entry->hash_field();
2814    if (current_hash != hash) {
2815      if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
2816        *out_insertion_index = sort_index + (current_hash > hash ? 0 : 1);
2817      }
2818      return T::kNotFound;
2819    }
2820    if (entry == name) {
2821      if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2822        return sort_index;
2823      }
2824      return T::kNotFound;
2825    }
2826  }
2827
2828  if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
2829    *out_insertion_index = limit + 1;
2830  }
2831  return T::kNotFound;
2832}
2833
2834
2835// Perform a linear search in this fixed array. len is the number of entry
2836// indices that are valid.
2837template <SearchMode search_mode, typename T>
2838int LinearSearch(T* array, Name* name, int valid_entries,
2839                 int* out_insertion_index) {
2840  if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
2841    uint32_t hash = name->hash_field();
2842    int len = array->number_of_entries();
2843    for (int number = 0; number < len; number++) {
2844      int sorted_index = array->GetSortedKeyIndex(number);
2845      Name* entry = array->GetKey(sorted_index);
2846      uint32_t current_hash = entry->hash_field();
2847      if (current_hash > hash) {
2848        *out_insertion_index = sorted_index;
2849        return T::kNotFound;
2850      }
2851      if (entry == name) return sorted_index;
2852    }
2853    *out_insertion_index = len;
2854    return T::kNotFound;
2855  } else {
2856    DCHECK_LE(valid_entries, array->number_of_entries());
2857    DCHECK_NULL(out_insertion_index);  // Not supported here.
2858    for (int number = 0; number < valid_entries; number++) {
2859      if (array->GetKey(number) == name) return number;
2860    }
2861    return T::kNotFound;
2862  }
2863}
2864
2865
2866template <SearchMode search_mode, typename T>
2867int Search(T* array, Name* name, int valid_entries, int* out_insertion_index) {
2868  SLOW_DCHECK(array->IsSortedNoDuplicates());
2869
2870  if (valid_entries == 0) {
2871    if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
2872      *out_insertion_index = 0;
2873    }
2874    return T::kNotFound;
2875  }
2876
2877  // Fast case: do linear search for small arrays.
2878  const int kMaxElementsForLinearSearch = 8;
2879  if (valid_entries <= kMaxElementsForLinearSearch) {
2880    return LinearSearch<search_mode>(array, name, valid_entries,
2881                                     out_insertion_index);
2882  }
2883
2884  // Slow case: perform binary search.
2885  return BinarySearch<search_mode>(array, name, valid_entries,
2886                                   out_insertion_index);
2887}
2888
2889
2890int DescriptorArray::Search(Name* name, int valid_descriptors) {
2891  DCHECK(name->IsUniqueName());
2892  return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors, NULL);
2893}
2894
2895int DescriptorArray::SearchWithCache(Isolate* isolate, Name* name, Map* map) {
2896  DCHECK(name->IsUniqueName());
2897  int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2898  if (number_of_own_descriptors == 0) return kNotFound;
2899
2900  DescriptorLookupCache* cache = isolate->descriptor_lookup_cache();
2901  int number = cache->Lookup(map, name);
2902
2903  if (number == DescriptorLookupCache::kAbsent) {
2904    number = Search(name, number_of_own_descriptors);
2905    cache->Update(map, name, number);
2906  }
2907
2908  return number;
2909}
2910
2911PropertyDetails Map::GetLastDescriptorDetails() {
2912  return instance_descriptors()->GetDetails(LastAdded());
2913}
2914
2915
2916int Map::LastAdded() {
2917  int number_of_own_descriptors = NumberOfOwnDescriptors();
2918  DCHECK(number_of_own_descriptors > 0);
2919  return number_of_own_descriptors - 1;
2920}
2921
2922
2923int Map::NumberOfOwnDescriptors() {
2924  return NumberOfOwnDescriptorsBits::decode(bit_field3());
2925}
2926
2927
2928void Map::SetNumberOfOwnDescriptors(int number) {
2929  DCHECK(number <= instance_descriptors()->number_of_descriptors());
2930  set_bit_field3(NumberOfOwnDescriptorsBits::update(bit_field3(), number));
2931}
2932
2933
2934int Map::EnumLength() { return EnumLengthBits::decode(bit_field3()); }
2935
2936
2937void Map::SetEnumLength(int length) {
2938  if (length != kInvalidEnumCacheSentinel) {
2939    DCHECK(length >= 0);
2940    DCHECK(length == 0 || instance_descriptors()->HasEnumCache());
2941    DCHECK(length <= NumberOfOwnDescriptors());
2942  }
2943  set_bit_field3(EnumLengthBits::update(bit_field3(), length));
2944}
2945
2946
2947FixedArrayBase* Map::GetInitialElements() {
2948  FixedArrayBase* result = nullptr;
2949  if (has_fast_elements() || has_fast_string_wrapper_elements()) {
2950    result = GetHeap()->empty_fixed_array();
2951  } else if (has_fast_sloppy_arguments_elements()) {
2952    result = GetHeap()->empty_sloppy_arguments_elements();
2953  } else if (has_fixed_typed_array_elements()) {
2954    result = GetHeap()->EmptyFixedTypedArrayForMap(this);
2955  } else {
2956    UNREACHABLE();
2957  }
2958  DCHECK(!GetHeap()->InNewSpace(result));
2959  return result;
2960}
2961
2962Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2963  DCHECK(descriptor_number < number_of_descriptors());
2964  return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
2965}
2966
2967
2968Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
2969  return GetKeySlot(descriptor_number);
2970}
2971
2972
2973Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
2974  return GetValueSlot(descriptor_number - 1) + 1;
2975}
2976
2977
2978Name* DescriptorArray::GetKey(int descriptor_number) {
2979  DCHECK(descriptor_number < number_of_descriptors());
2980  return Name::cast(get(ToKeyIndex(descriptor_number)));
2981}
2982
2983
2984int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2985  return GetDetails(descriptor_number).pointer();
2986}
2987
2988
2989Name* DescriptorArray::GetSortedKey(int descriptor_number) {
2990  return GetKey(GetSortedKeyIndex(descriptor_number));
2991}
2992
2993
2994void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
2995  PropertyDetails details = GetDetails(descriptor_index);
2996  set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2997}
2998
2999
3000Object** DescriptorArray::GetValueSlot(int descriptor_number) {
3001  DCHECK(descriptor_number < number_of_descriptors());
3002  return RawFieldOfElementAt(ToValueIndex(descriptor_number));
3003}
3004
3005
3006int DescriptorArray::GetValueOffset(int descriptor_number) {
3007  return OffsetOfElementAt(ToValueIndex(descriptor_number));
3008}
3009
3010
3011Object* DescriptorArray::GetValue(int descriptor_number) {
3012  DCHECK(descriptor_number < number_of_descriptors());
3013  return get(ToValueIndex(descriptor_number));
3014}
3015
3016
3017void DescriptorArray::SetValue(int descriptor_index, Object* value) {
3018  set(ToValueIndex(descriptor_index), value);
3019}
3020
3021
3022PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
3023  DCHECK(descriptor_number < number_of_descriptors());
3024  Object* details = get(ToDetailsIndex(descriptor_number));
3025  return PropertyDetails(Smi::cast(details));
3026}
3027
3028int DescriptorArray::GetFieldIndex(int descriptor_number) {
3029  DCHECK(GetDetails(descriptor_number).location() == kField);
3030  return GetDetails(descriptor_number).field_index();
3031}
3032
3033FieldType* DescriptorArray::GetFieldType(int descriptor_number) {
3034  DCHECK(GetDetails(descriptor_number).location() == kField);
3035  Object* wrapped_type = GetValue(descriptor_number);
3036  return Map::UnwrapFieldType(wrapped_type);
3037}
3038
3039void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
3040  desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
3041             handle(GetValue(descriptor_number), GetIsolate()),
3042             GetDetails(descriptor_number));
3043}
3044
3045void DescriptorArray::Set(int descriptor_number, Name* key, Object* value,
3046                          PropertyDetails details) {
3047  // Range check.
3048  DCHECK(descriptor_number < number_of_descriptors());
3049  set(ToKeyIndex(descriptor_number), key);
3050  set(ToValueIndex(descriptor_number), value);
3051  set(ToDetailsIndex(descriptor_number), details.AsSmi());
3052}
3053
3054void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
3055  Name* key = *desc->GetKey();
3056  Object* value = *desc->GetValue();
3057  Set(descriptor_number, key, value, desc->GetDetails());
3058}
3059
3060
3061void DescriptorArray::Append(Descriptor* desc) {
3062  DisallowHeapAllocation no_gc;
3063  int descriptor_number = number_of_descriptors();
3064  SetNumberOfDescriptors(descriptor_number + 1);
3065  Set(descriptor_number, desc);
3066
3067  uint32_t hash = desc->GetKey()->Hash();
3068
3069  int insertion;
3070
3071  for (insertion = descriptor_number; insertion > 0; --insertion) {
3072    Name* key = GetSortedKey(insertion - 1);
3073    if (key->Hash() <= hash) break;
3074    SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
3075  }
3076
3077  SetSortedKey(insertion, descriptor_number);
3078}
3079
3080
3081void DescriptorArray::SwapSortedKeys(int first, int second) {
3082  int first_key = GetSortedKeyIndex(first);
3083  SetSortedKey(first, GetSortedKeyIndex(second));
3084  SetSortedKey(second, first_key);
3085}
3086
3087
3088int HashTableBase::NumberOfElements() {
3089  return Smi::cast(get(kNumberOfElementsIndex))->value();
3090}
3091
3092
3093int HashTableBase::NumberOfDeletedElements() {
3094  return Smi::cast(get(kNumberOfDeletedElementsIndex))->value();
3095}
3096
3097
3098int HashTableBase::Capacity() {
3099  return Smi::cast(get(kCapacityIndex))->value();
3100}
3101
3102
3103void HashTableBase::ElementAdded() {
3104  SetNumberOfElements(NumberOfElements() + 1);
3105}
3106
3107
3108void HashTableBase::ElementRemoved() {
3109  SetNumberOfElements(NumberOfElements() - 1);
3110  SetNumberOfDeletedElements(NumberOfDeletedElements() + 1);
3111}
3112
3113
3114void HashTableBase::ElementsRemoved(int n) {
3115  SetNumberOfElements(NumberOfElements() - n);
3116  SetNumberOfDeletedElements(NumberOfDeletedElements() + n);
3117}
3118
3119
3120// static
3121int HashTableBase::ComputeCapacity(int at_least_space_for) {
3122  int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2);
3123  return Max(capacity, kMinCapacity);
3124}
3125
3126bool HashTableBase::IsKey(Isolate* isolate, Object* k) {
3127  Heap* heap = isolate->heap();
3128  return k != heap->the_hole_value() && k != heap->undefined_value();
3129}
3130
3131bool HashTableBase::IsKey(Object* k) {
3132  Isolate* isolate = this->GetIsolate();
3133  return !k->IsTheHole(isolate) && !k->IsUndefined(isolate);
3134}
3135
3136
3137void HashTableBase::SetNumberOfElements(int nof) {
3138  set(kNumberOfElementsIndex, Smi::FromInt(nof));
3139}
3140
3141
3142void HashTableBase::SetNumberOfDeletedElements(int nod) {
3143  set(kNumberOfDeletedElementsIndex, Smi::FromInt(nod));
3144}
3145
3146template <typename Key>
3147Map* BaseShape<Key>::GetMap(Isolate* isolate) {
3148  return isolate->heap()->hash_table_map();
3149}
3150
3151template <typename Derived, typename Shape, typename Key>
3152int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
3153  return FindEntry(GetIsolate(), key);
3154}
3155
3156
3157template<typename Derived, typename Shape, typename Key>
3158int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
3159  return FindEntry(isolate, key, HashTable::Hash(key));
3160}
3161
3162// Find entry for key otherwise return kNotFound.
3163template <typename Derived, typename Shape, typename Key>
3164int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key,
3165                                              int32_t hash) {
3166  uint32_t capacity = Capacity();
3167  uint32_t entry = FirstProbe(hash, capacity);
3168  uint32_t count = 1;
3169  // EnsureCapacity will guarantee the hash table is never full.
3170  Object* undefined = isolate->heap()->undefined_value();
3171  Object* the_hole = isolate->heap()->the_hole_value();
3172  while (true) {
3173    Object* element = KeyAt(entry);
3174    // Empty entry. Uses raw unchecked accessors because it is called by the
3175    // string table during bootstrapping.
3176    if (element == undefined) break;
3177    if (element != the_hole && Shape::IsMatch(key, element)) return entry;
3178    entry = NextProbe(entry, count++, capacity);
3179  }
3180  return kNotFound;
3181}
3182
3183template <typename Derived, typename Shape, typename Key>
3184bool HashTable<Derived, Shape, Key>::Has(Key key) {
3185  return FindEntry(key) != kNotFound;
3186}
3187
3188template <typename Derived, typename Shape, typename Key>
3189bool HashTable<Derived, Shape, Key>::Has(Isolate* isolate, Key key) {
3190  return FindEntry(isolate, key) != kNotFound;
3191}
3192
3193bool ObjectHashSet::Has(Isolate* isolate, Handle<Object> key, int32_t hash) {
3194  return FindEntry(isolate, key, hash) != kNotFound;
3195}
3196
3197bool ObjectHashSet::Has(Isolate* isolate, Handle<Object> key) {
3198  Object* hash = key->GetHash();
3199  if (!hash->IsSmi()) return false;
3200  return FindEntry(isolate, key, Smi::cast(hash)->value()) != kNotFound;
3201}
3202
3203bool StringSetShape::IsMatch(String* key, Object* value) {
3204  return value->IsString() && key->Equals(String::cast(value));
3205}
3206
3207uint32_t StringSetShape::Hash(String* key) { return key->Hash(); }
3208
3209uint32_t StringSetShape::HashForObject(String* key, Object* object) {
3210  return object->IsString() ? String::cast(object)->Hash() : 0;
3211}
3212
3213bool SeededNumberDictionary::requires_slow_elements() {
3214  Object* max_index_object = get(kMaxNumberKeyIndex);
3215  if (!max_index_object->IsSmi()) return false;
3216  return 0 !=
3217      (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
3218}
3219
3220
3221uint32_t SeededNumberDictionary::max_number_key() {
3222  DCHECK(!requires_slow_elements());
3223  Object* max_index_object = get(kMaxNumberKeyIndex);
3224  if (!max_index_object->IsSmi()) return 0;
3225  uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
3226  return value >> kRequiresSlowElementsTagSize;
3227}
3228
3229
3230void SeededNumberDictionary::set_requires_slow_elements() {
3231  set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
3232}
3233
3234
3235template <class T>
3236PodArray<T>* PodArray<T>::cast(Object* object) {
3237  SLOW_DCHECK(object->IsByteArray());
3238  return reinterpret_cast<PodArray<T>*>(object);
3239}
3240template <class T>
3241const PodArray<T>* PodArray<T>::cast(const Object* object) {
3242  SLOW_DCHECK(object->IsByteArray());
3243  return reinterpret_cast<const PodArray<T>*>(object);
3244}
3245
3246// static
3247template <class T>
3248Handle<PodArray<T>> PodArray<T>::New(Isolate* isolate, int length,
3249                                     PretenureFlag pretenure) {
3250  return Handle<PodArray<T>>::cast(
3251      isolate->factory()->NewByteArray(length * sizeof(T), pretenure));
3252}
3253
3254// static
3255template <class Traits>
3256STATIC_CONST_MEMBER_DEFINITION const InstanceType
3257    FixedTypedArray<Traits>::kInstanceType;
3258
3259
3260template <class Traits>
3261FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
3262  SLOW_DCHECK(object->IsHeapObject() &&
3263              HeapObject::cast(object)->map()->instance_type() ==
3264              Traits::kInstanceType);
3265  return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3266}
3267
3268
3269template <class Traits>
3270const FixedTypedArray<Traits>*
3271FixedTypedArray<Traits>::cast(const Object* object) {
3272  SLOW_DCHECK(object->IsHeapObject() &&
3273              HeapObject::cast(object)->map()->instance_type() ==
3274              Traits::kInstanceType);
3275  return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3276}
3277
3278
3279#define DEFINE_DEOPT_ELEMENT_ACCESSORS(name, type)       \
3280  type* DeoptimizationInputData::name() {                \
3281    return type::cast(get(k##name##Index));              \
3282  }                                                      \
3283  void DeoptimizationInputData::Set##name(type* value) { \
3284    set(k##name##Index, value);                          \
3285  }
3286
3287DEFINE_DEOPT_ELEMENT_ACCESSORS(TranslationByteArray, ByteArray)
3288DEFINE_DEOPT_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
3289DEFINE_DEOPT_ELEMENT_ACCESSORS(LiteralArray, FixedArray)
3290DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrAstId, Smi)
3291DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
3292DEFINE_DEOPT_ELEMENT_ACCESSORS(OptimizationId, Smi)
3293DEFINE_DEOPT_ELEMENT_ACCESSORS(SharedFunctionInfo, Object)
3294DEFINE_DEOPT_ELEMENT_ACCESSORS(WeakCellCache, Object)
3295DEFINE_DEOPT_ELEMENT_ACCESSORS(InliningPositions, PodArray<InliningPosition>)
3296
3297#undef DEFINE_DEOPT_ELEMENT_ACCESSORS
3298
3299
3300#define DEFINE_DEOPT_ENTRY_ACCESSORS(name, type)                \
3301  type* DeoptimizationInputData::name(int i) {                  \
3302    return type::cast(get(IndexForEntry(i) + k##name##Offset)); \
3303  }                                                             \
3304  void DeoptimizationInputData::Set##name(int i, type* value) { \
3305    set(IndexForEntry(i) + k##name##Offset, value);             \
3306  }
3307
3308DEFINE_DEOPT_ENTRY_ACCESSORS(AstIdRaw, Smi)
3309DEFINE_DEOPT_ENTRY_ACCESSORS(TranslationIndex, Smi)
3310DEFINE_DEOPT_ENTRY_ACCESSORS(ArgumentsStackHeight, Smi)
3311DEFINE_DEOPT_ENTRY_ACCESSORS(Pc, Smi)
3312
3313#undef DEFINE_DEOPT_ENTRY_ACCESSORS
3314
3315
3316BailoutId DeoptimizationInputData::AstId(int i) {
3317  return BailoutId(AstIdRaw(i)->value());
3318}
3319
3320
3321void DeoptimizationInputData::SetAstId(int i, BailoutId value) {
3322  SetAstIdRaw(i, Smi::FromInt(value.ToInt()));
3323}
3324
3325
3326int DeoptimizationInputData::DeoptCount() {
3327  return (length() - kFirstDeoptEntryIndex) / kDeoptEntrySize;
3328}
3329
3330
3331int DeoptimizationOutputData::DeoptPoints() { return length() / 2; }
3332
3333
3334BailoutId DeoptimizationOutputData::AstId(int index) {
3335  return BailoutId(Smi::cast(get(index * 2))->value());
3336}
3337
3338
3339void DeoptimizationOutputData::SetAstId(int index, BailoutId id) {
3340  set(index * 2, Smi::FromInt(id.ToInt()));
3341}
3342
3343
3344Smi* DeoptimizationOutputData::PcAndState(int index) {
3345  return Smi::cast(get(1 + index * 2));
3346}
3347
3348
3349void DeoptimizationOutputData::SetPcAndState(int index, Smi* offset) {
3350  set(1 + index * 2, offset);
3351}
3352
3353int HandlerTable::GetRangeStart(int index) const {
3354  return Smi::cast(get(index * kRangeEntrySize + kRangeStartIndex))->value();
3355}
3356
3357int HandlerTable::GetRangeEnd(int index) const {
3358  return Smi::cast(get(index * kRangeEntrySize + kRangeEndIndex))->value();
3359}
3360
3361int HandlerTable::GetRangeHandler(int index) const {
3362  return HandlerOffsetField::decode(
3363      Smi::cast(get(index * kRangeEntrySize + kRangeHandlerIndex))->value());
3364}
3365
3366int HandlerTable::GetRangeData(int index) const {
3367  return Smi::cast(get(index * kRangeEntrySize + kRangeDataIndex))->value();
3368}
3369
3370void HandlerTable::SetRangeStart(int index, int value) {
3371  set(index * kRangeEntrySize + kRangeStartIndex, Smi::FromInt(value));
3372}
3373
3374
3375void HandlerTable::SetRangeEnd(int index, int value) {
3376  set(index * kRangeEntrySize + kRangeEndIndex, Smi::FromInt(value));
3377}
3378
3379
3380void HandlerTable::SetRangeHandler(int index, int offset,
3381                                   CatchPrediction prediction) {
3382  int value = HandlerOffsetField::encode(offset) |
3383              HandlerPredictionField::encode(prediction);
3384  set(index * kRangeEntrySize + kRangeHandlerIndex, Smi::FromInt(value));
3385}
3386
3387void HandlerTable::SetRangeData(int index, int value) {
3388  set(index * kRangeEntrySize + kRangeDataIndex, Smi::FromInt(value));
3389}
3390
3391
3392void HandlerTable::SetReturnOffset(int index, int value) {
3393  set(index * kReturnEntrySize + kReturnOffsetIndex, Smi::FromInt(value));
3394}
3395
3396void HandlerTable::SetReturnHandler(int index, int offset) {
3397  int value = HandlerOffsetField::encode(offset);
3398  set(index * kReturnEntrySize + kReturnHandlerIndex, Smi::FromInt(value));
3399}
3400
3401int HandlerTable::NumberOfRangeEntries() const {
3402  return length() / kRangeEntrySize;
3403}
3404
3405template <typename Derived, typename Shape, typename Key>
3406HashTable<Derived, Shape, Key>*
3407HashTable<Derived, Shape, Key>::cast(Object* obj) {
3408  SLOW_DCHECK(obj->IsHashTable());
3409  return reinterpret_cast<HashTable*>(obj);
3410}
3411
3412
3413template <typename Derived, typename Shape, typename Key>
3414const HashTable<Derived, Shape, Key>*
3415HashTable<Derived, Shape, Key>::cast(const Object* obj) {
3416  SLOW_DCHECK(obj->IsHashTable());
3417  return reinterpret_cast<const HashTable*>(obj);
3418}
3419
3420
3421SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3422SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3423
3424SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3425NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3426
3427SMI_ACCESSORS(String, length, kLengthOffset)
3428SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3429
3430
3431int FreeSpace::Size() { return size(); }
3432
3433
3434FreeSpace* FreeSpace::next() {
3435  DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
3436         (!GetHeap()->deserialization_complete() && map() == NULL));
3437  DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3438  return reinterpret_cast<FreeSpace*>(
3439      Memory::Address_at(address() + kNextOffset));
3440}
3441
3442
3443void FreeSpace::set_next(FreeSpace* next) {
3444  DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
3445         (!GetHeap()->deserialization_complete() && map() == NULL));
3446  DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3447  base::NoBarrier_Store(
3448      reinterpret_cast<base::AtomicWord*>(address() + kNextOffset),
3449      reinterpret_cast<base::AtomicWord>(next));
3450}
3451
3452
3453FreeSpace* FreeSpace::cast(HeapObject* o) {
3454  SLOW_DCHECK(!o->GetHeap()->deserialization_complete() || o->IsFreeSpace());
3455  return reinterpret_cast<FreeSpace*>(o);
3456}
3457
3458
3459uint32_t Name::hash_field() {
3460  return READ_UINT32_FIELD(this, kHashFieldOffset);
3461}
3462
3463
3464void Name::set_hash_field(uint32_t value) {
3465  WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
3466#if V8_HOST_ARCH_64_BIT
3467#if V8_TARGET_LITTLE_ENDIAN
3468  WRITE_UINT32_FIELD(this, kHashFieldSlot + kIntSize, 0);
3469#else
3470  WRITE_UINT32_FIELD(this, kHashFieldSlot, 0);
3471#endif
3472#endif
3473}
3474
3475
3476bool Name::Equals(Name* other) {
3477  if (other == this) return true;
3478  if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3479      this->IsSymbol() || other->IsSymbol()) {
3480    return false;
3481  }
3482  return String::cast(this)->SlowEquals(String::cast(other));
3483}
3484
3485
3486bool Name::Equals(Handle<Name> one, Handle<Name> two) {
3487  if (one.is_identical_to(two)) return true;
3488  if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3489      one->IsSymbol() || two->IsSymbol()) {
3490    return false;
3491  }
3492  return String::SlowEquals(Handle<String>::cast(one),
3493                            Handle<String>::cast(two));
3494}
3495
3496
3497ACCESSORS(Symbol, name, Object, kNameOffset)
3498SMI_ACCESSORS(Symbol, flags, kFlagsOffset)
3499BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3500BOOL_ACCESSORS(Symbol, flags, is_well_known_symbol, kWellKnownSymbolBit)
3501BOOL_ACCESSORS(Symbol, flags, is_public, kPublicBit)
3502
3503bool String::Equals(String* other) {
3504  if (other == this) return true;
3505  if (this->IsInternalizedString() && other->IsInternalizedString()) {
3506    return false;
3507  }
3508  return SlowEquals(other);
3509}
3510
3511
3512bool String::Equals(Handle<String> one, Handle<String> two) {
3513  if (one.is_identical_to(two)) return true;
3514  if (one->IsInternalizedString() && two->IsInternalizedString()) {
3515    return false;
3516  }
3517  return SlowEquals(one, two);
3518}
3519
3520
3521Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
3522  if (string->IsConsString()) {
3523    Handle<ConsString> cons = Handle<ConsString>::cast(string);
3524    if (cons->IsFlat()) {
3525      string = handle(cons->first());
3526    } else {
3527      return SlowFlatten(cons, pretenure);
3528    }
3529  }
3530  if (string->IsThinString()) {
3531    string = handle(Handle<ThinString>::cast(string)->actual());
3532    DCHECK(!string->IsConsString());
3533  }
3534  return string;
3535}
3536
3537
3538uint16_t String::Get(int index) {
3539  DCHECK(index >= 0 && index < length());
3540  switch (StringShape(this).full_representation_tag()) {
3541    case kSeqStringTag | kOneByteStringTag:
3542      return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3543    case kSeqStringTag | kTwoByteStringTag:
3544      return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3545    case kConsStringTag | kOneByteStringTag:
3546    case kConsStringTag | kTwoByteStringTag:
3547      return ConsString::cast(this)->ConsStringGet(index);
3548    case kExternalStringTag | kOneByteStringTag:
3549      return ExternalOneByteString::cast(this)->ExternalOneByteStringGet(index);
3550    case kExternalStringTag | kTwoByteStringTag:
3551      return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3552    case kSlicedStringTag | kOneByteStringTag:
3553    case kSlicedStringTag | kTwoByteStringTag:
3554      return SlicedString::cast(this)->SlicedStringGet(index);
3555    case kThinStringTag | kOneByteStringTag:
3556    case kThinStringTag | kTwoByteStringTag:
3557      return ThinString::cast(this)->ThinStringGet(index);
3558    default:
3559      break;
3560  }
3561
3562  UNREACHABLE();
3563  return 0;
3564}
3565
3566
3567void String::Set(int index, uint16_t value) {
3568  DCHECK(index >= 0 && index < length());
3569  DCHECK(StringShape(this).IsSequential());
3570
3571  return this->IsOneByteRepresentation()
3572      ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3573      : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3574}
3575
3576
3577bool String::IsFlat() {
3578  if (!StringShape(this).IsCons()) return true;
3579  return ConsString::cast(this)->second()->length() == 0;
3580}
3581
3582
3583String* String::GetUnderlying() {
3584  // Giving direct access to underlying string only makes sense if the
3585  // wrapping string is already flattened.
3586  DCHECK(this->IsFlat());
3587  DCHECK(StringShape(this).IsIndirect());
3588  STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3589  STATIC_ASSERT(ConsString::kFirstOffset == ThinString::kActualOffset);
3590  const int kUnderlyingOffset = SlicedString::kParentOffset;
3591  return String::cast(READ_FIELD(this, kUnderlyingOffset));
3592}
3593
3594
3595template<class Visitor>
3596ConsString* String::VisitFlat(Visitor* visitor,
3597                              String* string,
3598                              const int offset) {
3599  int slice_offset = offset;
3600  const int length = string->length();
3601  DCHECK(offset <= length);
3602  while (true) {
3603    int32_t type = string->map()->instance_type();
3604    switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3605      case kSeqStringTag | kOneByteStringTag:
3606        visitor->VisitOneByteString(
3607            SeqOneByteString::cast(string)->GetChars() + slice_offset,
3608            length - offset);
3609        return NULL;
3610
3611      case kSeqStringTag | kTwoByteStringTag:
3612        visitor->VisitTwoByteString(
3613            SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3614            length - offset);
3615        return NULL;
3616
3617      case kExternalStringTag | kOneByteStringTag:
3618        visitor->VisitOneByteString(
3619            ExternalOneByteString::cast(string)->GetChars() + slice_offset,
3620            length - offset);
3621        return NULL;
3622
3623      case kExternalStringTag | kTwoByteStringTag:
3624        visitor->VisitTwoByteString(
3625            ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3626            length - offset);
3627        return NULL;
3628
3629      case kSlicedStringTag | kOneByteStringTag:
3630      case kSlicedStringTag | kTwoByteStringTag: {
3631        SlicedString* slicedString = SlicedString::cast(string);
3632        slice_offset += slicedString->offset();
3633        string = slicedString->parent();
3634        continue;
3635      }
3636
3637      case kConsStringTag | kOneByteStringTag:
3638      case kConsStringTag | kTwoByteStringTag:
3639        return ConsString::cast(string);
3640
3641      case kThinStringTag | kOneByteStringTag:
3642      case kThinStringTag | kTwoByteStringTag:
3643        string = ThinString::cast(string)->actual();
3644        continue;
3645
3646      default:
3647        UNREACHABLE();
3648        return NULL;
3649    }
3650  }
3651}
3652
3653
3654template <>
3655inline Vector<const uint8_t> String::GetCharVector() {
3656  String::FlatContent flat = GetFlatContent();
3657  DCHECK(flat.IsOneByte());
3658  return flat.ToOneByteVector();
3659}
3660
3661
3662template <>
3663inline Vector<const uc16> String::GetCharVector() {
3664  String::FlatContent flat = GetFlatContent();
3665  DCHECK(flat.IsTwoByte());
3666  return flat.ToUC16Vector();
3667}
3668
3669uint32_t String::ToValidIndex(Object* number) {
3670  uint32_t index = PositiveNumberToUint32(number);
3671  uint32_t length_value = static_cast<uint32_t>(length());
3672  if (index > length_value) return length_value;
3673  return index;
3674}
3675
3676uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3677  DCHECK(index >= 0 && index < length());
3678  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3679}
3680
3681
3682void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3683  DCHECK(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3684  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3685                   static_cast<byte>(value));
3686}
3687
3688
3689Address SeqOneByteString::GetCharsAddress() {
3690  return FIELD_ADDR(this, kHeaderSize);
3691}
3692
3693
3694uint8_t* SeqOneByteString::GetChars() {
3695  return reinterpret_cast<uint8_t*>(GetCharsAddress());
3696}
3697
3698
3699Address SeqTwoByteString::GetCharsAddress() {
3700  return FIELD_ADDR(this, kHeaderSize);
3701}
3702
3703
3704uc16* SeqTwoByteString::GetChars() {
3705  return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3706}
3707
3708
3709uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3710  DCHECK(index >= 0 && index < length());
3711  return READ_UINT16_FIELD(this, kHeaderSize + index * kShortSize);
3712}
3713
3714
3715void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3716  DCHECK(index >= 0 && index < length());
3717  WRITE_UINT16_FIELD(this, kHeaderSize + index * kShortSize, value);
3718}
3719
3720
3721int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3722  return SizeFor(length());
3723}
3724
3725
3726int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3727  return SizeFor(length());
3728}
3729
3730
3731String* SlicedString::parent() {
3732  return String::cast(READ_FIELD(this, kParentOffset));
3733}
3734
3735
3736void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3737  DCHECK(parent->IsSeqString() || parent->IsExternalString());
3738  WRITE_FIELD(this, kParentOffset, parent);
3739  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3740}
3741
3742
3743SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3744
3745
3746String* ConsString::first() {
3747  return String::cast(READ_FIELD(this, kFirstOffset));
3748}
3749
3750
3751Object* ConsString::unchecked_first() {
3752  return READ_FIELD(this, kFirstOffset);
3753}
3754
3755
3756void ConsString::set_first(String* value, WriteBarrierMode mode) {
3757  WRITE_FIELD(this, kFirstOffset, value);
3758  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3759}
3760
3761
3762String* ConsString::second() {
3763  return String::cast(READ_FIELD(this, kSecondOffset));
3764}
3765
3766
3767Object* ConsString::unchecked_second() {
3768  return READ_FIELD(this, kSecondOffset);
3769}
3770
3771
3772void ConsString::set_second(String* value, WriteBarrierMode mode) {
3773  WRITE_FIELD(this, kSecondOffset, value);
3774  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3775}
3776
3777ACCESSORS(ThinString, actual, String, kActualOffset);
3778
3779bool ExternalString::is_short() {
3780  InstanceType type = map()->instance_type();
3781  return (type & kShortExternalStringMask) == kShortExternalStringTag;
3782}
3783
3784
3785const ExternalOneByteString::Resource* ExternalOneByteString::resource() {
3786  return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3787}
3788
3789
3790void ExternalOneByteString::update_data_cache() {
3791  if (is_short()) return;
3792  const char** data_field =
3793      reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3794  *data_field = resource()->data();
3795}
3796
3797
3798void ExternalOneByteString::set_resource(
3799    const ExternalOneByteString::Resource* resource) {
3800  DCHECK(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3801  *reinterpret_cast<const Resource**>(
3802      FIELD_ADDR(this, kResourceOffset)) = resource;
3803  if (resource != NULL) update_data_cache();
3804}
3805
3806
3807const uint8_t* ExternalOneByteString::GetChars() {
3808  return reinterpret_cast<const uint8_t*>(resource()->data());
3809}
3810
3811
3812uint16_t ExternalOneByteString::ExternalOneByteStringGet(int index) {
3813  DCHECK(index >= 0 && index < length());
3814  return GetChars()[index];
3815}
3816
3817
3818const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3819  return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3820}
3821
3822
3823void ExternalTwoByteString::update_data_cache() {
3824  if (is_short()) return;
3825  const uint16_t** data_field =
3826      reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3827  *data_field = resource()->data();
3828}
3829
3830
3831void ExternalTwoByteString::set_resource(
3832    const ExternalTwoByteString::Resource* resource) {
3833  *reinterpret_cast<const Resource**>(
3834      FIELD_ADDR(this, kResourceOffset)) = resource;
3835  if (resource != NULL) update_data_cache();
3836}
3837
3838
3839const uint16_t* ExternalTwoByteString::GetChars() {
3840  return resource()->data();
3841}
3842
3843
3844uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3845  DCHECK(index >= 0 && index < length());
3846  return GetChars()[index];
3847}
3848
3849
3850const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3851      unsigned start) {
3852  return GetChars() + start;
3853}
3854
3855
3856int ConsStringIterator::OffsetForDepth(int depth) { return depth & kDepthMask; }
3857
3858
3859void ConsStringIterator::PushLeft(ConsString* string) {
3860  frames_[depth_++ & kDepthMask] = string;
3861}
3862
3863
3864void ConsStringIterator::PushRight(ConsString* string) {
3865  // Inplace update.
3866  frames_[(depth_-1) & kDepthMask] = string;
3867}
3868
3869
3870void ConsStringIterator::AdjustMaximumDepth() {
3871  if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3872}
3873
3874
3875void ConsStringIterator::Pop() {
3876  DCHECK(depth_ > 0);
3877  DCHECK(depth_ <= maximum_depth_);
3878  depth_--;
3879}
3880
3881
3882uint16_t StringCharacterStream::GetNext() {
3883  DCHECK(buffer8_ != NULL && end_ != NULL);
3884  // Advance cursor if needed.
3885  if (buffer8_ == end_) HasMore();
3886  DCHECK(buffer8_ < end_);
3887  return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3888}
3889
3890
3891StringCharacterStream::StringCharacterStream(String* string, int offset)
3892    : is_one_byte_(false) {
3893  Reset(string, offset);
3894}
3895
3896
3897void StringCharacterStream::Reset(String* string, int offset) {
3898  buffer8_ = NULL;
3899  end_ = NULL;
3900  ConsString* cons_string = String::VisitFlat(this, string, offset);
3901  iter_.Reset(cons_string, offset);
3902  if (cons_string != NULL) {
3903    string = iter_.Next(&offset);
3904    if (string != NULL) String::VisitFlat(this, string, offset);
3905  }
3906}
3907
3908
3909bool StringCharacterStream::HasMore() {
3910  if (buffer8_ != end_) return true;
3911  int offset;
3912  String* string = iter_.Next(&offset);
3913  DCHECK_EQ(offset, 0);
3914  if (string == NULL) return false;
3915  String::VisitFlat(this, string);
3916  DCHECK(buffer8_ != end_);
3917  return true;
3918}
3919
3920
3921void StringCharacterStream::VisitOneByteString(
3922    const uint8_t* chars, int length) {
3923  is_one_byte_ = true;
3924  buffer8_ = chars;
3925  end_ = chars + length;
3926}
3927
3928
3929void StringCharacterStream::VisitTwoByteString(
3930    const uint16_t* chars, int length) {
3931  is_one_byte_ = false;
3932  buffer16_ = chars;
3933  end_ = reinterpret_cast<const uint8_t*>(chars + length);
3934}
3935
3936
3937int ByteArray::Size() { return RoundUp(length() + kHeaderSize, kPointerSize); }
3938
3939byte ByteArray::get(int index) {
3940  DCHECK(index >= 0 && index < this->length());
3941  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3942}
3943
3944void ByteArray::set(int index, byte value) {
3945  DCHECK(index >= 0 && index < this->length());
3946  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3947}
3948
3949void ByteArray::copy_in(int index, const byte* buffer, int length) {
3950  DCHECK(index >= 0 && length >= 0 && length <= kMaxInt - index &&
3951         index + length <= this->length());
3952  byte* dst_addr = FIELD_ADDR(this, kHeaderSize + index * kCharSize);
3953  memcpy(dst_addr, buffer, length);
3954}
3955
3956void ByteArray::copy_out(int index, byte* buffer, int length) {
3957  DCHECK(index >= 0 && length >= 0 && length <= kMaxInt - index &&
3958         index + length <= this->length());
3959  const byte* src_addr = FIELD_ADDR(this, kHeaderSize + index * kCharSize);
3960  memcpy(buffer, src_addr, length);
3961}
3962
3963int ByteArray::get_int(int index) {
3964  DCHECK(index >= 0 && index < this->length() / kIntSize);
3965  return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
3966}
3967
3968void ByteArray::set_int(int index, int value) {
3969  DCHECK(index >= 0 && index < this->length() / kIntSize);
3970  WRITE_INT_FIELD(this, kHeaderSize + index * kIntSize, value);
3971}
3972
3973ByteArray* ByteArray::FromDataStartAddress(Address address) {
3974  DCHECK_TAG_ALIGNED(address);
3975  return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
3976}
3977
3978
3979int ByteArray::ByteArraySize() { return SizeFor(this->length()); }
3980
3981
3982Address ByteArray::GetDataStartAddress() {
3983  return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
3984}
3985
3986
3987byte BytecodeArray::get(int index) {
3988  DCHECK(index >= 0 && index < this->length());
3989  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3990}
3991
3992
3993void BytecodeArray::set(int index, byte value) {
3994  DCHECK(index >= 0 && index < this->length());
3995  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3996}
3997
3998
3999void BytecodeArray::set_frame_size(int frame_size) {
4000  DCHECK_GE(frame_size, 0);
4001  DCHECK(IsAligned(frame_size, static_cast<unsigned>(kPointerSize)));
4002  WRITE_INT_FIELD(this, kFrameSizeOffset, frame_size);
4003}
4004
4005
4006int BytecodeArray::frame_size() const {
4007  return READ_INT_FIELD(this, kFrameSizeOffset);
4008}
4009
4010
4011int BytecodeArray::register_count() const {
4012  return frame_size() / kPointerSize;
4013}
4014
4015
4016void BytecodeArray::set_parameter_count(int number_of_parameters) {
4017  DCHECK_GE(number_of_parameters, 0);
4018  // Parameter count is stored as the size on stack of the parameters to allow
4019  // it to be used directly by generated code.
4020  WRITE_INT_FIELD(this, kParameterSizeOffset,
4021                  (number_of_parameters << kPointerSizeLog2));
4022}
4023
4024int BytecodeArray::interrupt_budget() const {
4025  return READ_INT_FIELD(this, kInterruptBudgetOffset);
4026}
4027
4028void BytecodeArray::set_interrupt_budget(int interrupt_budget) {
4029  DCHECK_GE(interrupt_budget, 0);
4030  WRITE_INT_FIELD(this, kInterruptBudgetOffset, interrupt_budget);
4031}
4032
4033int BytecodeArray::osr_loop_nesting_level() const {
4034  return READ_INT8_FIELD(this, kOSRNestingLevelOffset);
4035}
4036
4037void BytecodeArray::set_osr_loop_nesting_level(int depth) {
4038  DCHECK(0 <= depth && depth <= AbstractCode::kMaxLoopNestingMarker);
4039  STATIC_ASSERT(AbstractCode::kMaxLoopNestingMarker < kMaxInt8);
4040  WRITE_INT8_FIELD(this, kOSRNestingLevelOffset, depth);
4041}
4042
4043BytecodeArray::Age BytecodeArray::bytecode_age() const {
4044  return static_cast<Age>(READ_INT8_FIELD(this, kBytecodeAgeOffset));
4045}
4046
4047void BytecodeArray::set_bytecode_age(BytecodeArray::Age age) {
4048  DCHECK_GE(age, kFirstBytecodeAge);
4049  DCHECK_LE(age, kLastBytecodeAge);
4050  STATIC_ASSERT(kLastBytecodeAge <= kMaxInt8);
4051  WRITE_INT8_FIELD(this, kBytecodeAgeOffset, static_cast<int8_t>(age));
4052}
4053
4054int BytecodeArray::parameter_count() const {
4055  // Parameter count is stored as the size on stack of the parameters to allow
4056  // it to be used directly by generated code.
4057  return READ_INT_FIELD(this, kParameterSizeOffset) >> kPointerSizeLog2;
4058}
4059
4060ACCESSORS(BytecodeArray, constant_pool, FixedArray, kConstantPoolOffset)
4061ACCESSORS(BytecodeArray, handler_table, FixedArray, kHandlerTableOffset)
4062ACCESSORS(BytecodeArray, source_position_table, ByteArray,
4063          kSourcePositionTableOffset)
4064
4065Address BytecodeArray::GetFirstBytecodeAddress() {
4066  return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
4067}
4068
4069
4070int BytecodeArray::BytecodeArraySize() { return SizeFor(this->length()); }
4071
4072int BytecodeArray::SizeIncludingMetadata() {
4073  int size = BytecodeArraySize();
4074  size += constant_pool()->Size();
4075  size += handler_table()->Size();
4076  size += source_position_table()->Size();
4077  return size;
4078}
4079
4080ACCESSORS(FixedTypedArrayBase, base_pointer, Object, kBasePointerOffset)
4081
4082
4083void* FixedTypedArrayBase::external_pointer() const {
4084  intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
4085  return reinterpret_cast<void*>(ptr);
4086}
4087
4088
4089void FixedTypedArrayBase::set_external_pointer(void* value,
4090                                               WriteBarrierMode mode) {
4091  intptr_t ptr = reinterpret_cast<intptr_t>(value);
4092  WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
4093}
4094
4095
4096void* FixedTypedArrayBase::DataPtr() {
4097  return reinterpret_cast<void*>(
4098      reinterpret_cast<intptr_t>(base_pointer()) +
4099      reinterpret_cast<intptr_t>(external_pointer()));
4100}
4101
4102
4103int FixedTypedArrayBase::ElementSize(InstanceType type) {
4104  int element_size;
4105  switch (type) {
4106#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size)                       \
4107    case FIXED_##TYPE##_ARRAY_TYPE:                                           \
4108      element_size = size;                                                    \
4109      break;
4110
4111    TYPED_ARRAYS(TYPED_ARRAY_CASE)
4112#undef TYPED_ARRAY_CASE
4113    default:
4114      UNREACHABLE();
4115      return 0;
4116  }
4117  return element_size;
4118}
4119
4120
4121int FixedTypedArrayBase::DataSize(InstanceType type) {
4122  if (base_pointer() == Smi::kZero) return 0;
4123  return length() * ElementSize(type);
4124}
4125
4126
4127int FixedTypedArrayBase::DataSize() {
4128  return DataSize(map()->instance_type());
4129}
4130
4131
4132int FixedTypedArrayBase::size() {
4133  return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
4134}
4135
4136
4137int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
4138  return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
4139}
4140
4141
4142int FixedTypedArrayBase::TypedArraySize(InstanceType type, int length) {
4143  return OBJECT_POINTER_ALIGN(kDataOffset + length * ElementSize(type));
4144}
4145
4146
4147uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
4148
4149
4150uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
4151
4152
4153int8_t Int8ArrayTraits::defaultValue() { return 0; }
4154
4155
4156uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
4157
4158
4159int16_t Int16ArrayTraits::defaultValue() { return 0; }
4160
4161
4162uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
4163
4164
4165int32_t Int32ArrayTraits::defaultValue() { return 0; }
4166
4167
4168float Float32ArrayTraits::defaultValue() {
4169  return std::numeric_limits<float>::quiet_NaN();
4170}
4171
4172
4173double Float64ArrayTraits::defaultValue() {
4174  return std::numeric_limits<double>::quiet_NaN();
4175}
4176
4177
4178template <class Traits>
4179typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
4180  DCHECK((index >= 0) && (index < this->length()));
4181  ElementType* ptr = reinterpret_cast<ElementType*>(DataPtr());
4182  return ptr[index];
4183}
4184
4185
4186template <class Traits>
4187void FixedTypedArray<Traits>::set(int index, ElementType value) {
4188  DCHECK((index >= 0) && (index < this->length()));
4189  ElementType* ptr = reinterpret_cast<ElementType*>(DataPtr());
4190  ptr[index] = value;
4191}
4192
4193
4194template <class Traits>
4195typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
4196  return static_cast<ElementType>(value);
4197}
4198
4199
4200template <> inline
4201uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
4202  if (value < 0) return 0;
4203  if (value > 0xFF) return 0xFF;
4204  return static_cast<uint8_t>(value);
4205}
4206
4207
4208template <class Traits>
4209typename Traits::ElementType FixedTypedArray<Traits>::from_double(
4210    double value) {
4211  return static_cast<ElementType>(DoubleToInt32(value));
4212}
4213
4214
4215template<> inline
4216uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
4217  // Handle NaNs and less than zero values which clamp to zero.
4218  if (!(value > 0)) return 0;
4219  if (value > 0xFF) return 0xFF;
4220  return static_cast<uint8_t>(lrint(value));
4221}
4222
4223
4224template<> inline
4225float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
4226  return static_cast<float>(value);
4227}
4228
4229
4230template<> inline
4231double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
4232  return value;
4233}
4234
4235template <class Traits>
4236Handle<Object> FixedTypedArray<Traits>::get(FixedTypedArray<Traits>* array,
4237                                            int index) {
4238  return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
4239}
4240
4241
4242template <class Traits>
4243void FixedTypedArray<Traits>::SetValue(uint32_t index, Object* value) {
4244  ElementType cast_value = Traits::defaultValue();
4245  if (value->IsSmi()) {
4246    int int_value = Smi::cast(value)->value();
4247    cast_value = from_int(int_value);
4248  } else if (value->IsHeapNumber()) {
4249    double double_value = HeapNumber::cast(value)->value();
4250    cast_value = from_double(double_value);
4251  } else {
4252    // Clamp undefined to the default value. All other types have been
4253    // converted to a number type further up in the call chain.
4254    DCHECK(value->IsUndefined(GetIsolate()));
4255  }
4256  set(index, cast_value);
4257}
4258
4259
4260Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
4261  return handle(Smi::FromInt(scalar), isolate);
4262}
4263
4264
4265Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
4266                                                 uint8_t scalar) {
4267  return handle(Smi::FromInt(scalar), isolate);
4268}
4269
4270
4271Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4272  return handle(Smi::FromInt(scalar), isolate);
4273}
4274
4275
4276Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
4277  return handle(Smi::FromInt(scalar), isolate);
4278}
4279
4280
4281Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
4282  return handle(Smi::FromInt(scalar), isolate);
4283}
4284
4285
4286Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
4287  return isolate->factory()->NewNumberFromUint(scalar);
4288}
4289
4290
4291Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
4292  return isolate->factory()->NewNumberFromInt(scalar);
4293}
4294
4295
4296Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
4297  return isolate->factory()->NewNumber(scalar);
4298}
4299
4300
4301Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
4302  return isolate->factory()->NewNumber(scalar);
4303}
4304
4305
4306int Map::visitor_id() {
4307  return READ_BYTE_FIELD(this, kVisitorIdOffset);
4308}
4309
4310
4311void Map::set_visitor_id(int id) {
4312  DCHECK(0 <= id && id < 256);
4313  WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4314}
4315
4316
4317int Map::instance_size() {
4318  return NOBARRIER_READ_BYTE_FIELD(
4319      this, kInstanceSizeOffset) << kPointerSizeLog2;
4320}
4321
4322
4323int Map::inobject_properties_or_constructor_function_index() {
4324  return READ_BYTE_FIELD(this,
4325                         kInObjectPropertiesOrConstructorFunctionIndexOffset);
4326}
4327
4328
4329void Map::set_inobject_properties_or_constructor_function_index(int value) {
4330  DCHECK(0 <= value && value < 256);
4331  WRITE_BYTE_FIELD(this, kInObjectPropertiesOrConstructorFunctionIndexOffset,
4332                   static_cast<byte>(value));
4333}
4334
4335
4336int Map::GetInObjectProperties() {
4337  DCHECK(IsJSObjectMap());
4338  return inobject_properties_or_constructor_function_index();
4339}
4340
4341
4342void Map::SetInObjectProperties(int value) {
4343  DCHECK(IsJSObjectMap());
4344  set_inobject_properties_or_constructor_function_index(value);
4345}
4346
4347
4348int Map::GetConstructorFunctionIndex() {
4349  DCHECK(IsPrimitiveMap());
4350  return inobject_properties_or_constructor_function_index();
4351}
4352
4353
4354void Map::SetConstructorFunctionIndex(int value) {
4355  DCHECK(IsPrimitiveMap());
4356  set_inobject_properties_or_constructor_function_index(value);
4357}
4358
4359
4360int Map::GetInObjectPropertyOffset(int index) {
4361  // Adjust for the number of properties stored in the object.
4362  index -= GetInObjectProperties();
4363  DCHECK(index <= 0);
4364  return instance_size() + (index * kPointerSize);
4365}
4366
4367
4368Handle<Map> Map::AddMissingTransitionsForTesting(
4369    Handle<Map> split_map, Handle<DescriptorArray> descriptors,
4370    Handle<LayoutDescriptor> full_layout_descriptor) {
4371  return AddMissingTransitions(split_map, descriptors, full_layout_descriptor);
4372}
4373
4374
4375int HeapObject::SizeFromMap(Map* map) {
4376  int instance_size = map->instance_size();
4377  if (instance_size != kVariableSizeSentinel) return instance_size;
4378  // Only inline the most frequent cases.
4379  InstanceType instance_type = map->instance_type();
4380  if (instance_type == FIXED_ARRAY_TYPE ||
4381      instance_type == TRANSITION_ARRAY_TYPE) {
4382    return FixedArray::SizeFor(
4383        reinterpret_cast<FixedArray*>(this)->synchronized_length());
4384  }
4385  if (instance_type == ONE_BYTE_STRING_TYPE ||
4386      instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
4387    // Strings may get concurrently truncated, hence we have to access its
4388    // length synchronized.
4389    return SeqOneByteString::SizeFor(
4390        reinterpret_cast<SeqOneByteString*>(this)->synchronized_length());
4391  }
4392  if (instance_type == BYTE_ARRAY_TYPE) {
4393    return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4394  }
4395  if (instance_type == BYTECODE_ARRAY_TYPE) {
4396    return reinterpret_cast<BytecodeArray*>(this)->BytecodeArraySize();
4397  }
4398  if (instance_type == FREE_SPACE_TYPE) {
4399    return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
4400  }
4401  if (instance_type == STRING_TYPE ||
4402      instance_type == INTERNALIZED_STRING_TYPE) {
4403    // Strings may get concurrently truncated, hence we have to access its
4404    // length synchronized.
4405    return SeqTwoByteString::SizeFor(
4406        reinterpret_cast<SeqTwoByteString*>(this)->synchronized_length());
4407  }
4408  if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4409    return FixedDoubleArray::SizeFor(
4410        reinterpret_cast<FixedDoubleArray*>(this)->length());
4411  }
4412  if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4413      instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4414    return reinterpret_cast<FixedTypedArrayBase*>(
4415        this)->TypedArraySize(instance_type);
4416  }
4417  DCHECK(instance_type == CODE_TYPE);
4418  return reinterpret_cast<Code*>(this)->CodeSize();
4419}
4420
4421
4422void Map::set_instance_size(int value) {
4423  DCHECK_EQ(0, value & (kPointerSize - 1));
4424  value >>= kPointerSizeLog2;
4425  DCHECK(0 <= value && value < 256);
4426  NOBARRIER_WRITE_BYTE_FIELD(
4427      this, kInstanceSizeOffset, static_cast<byte>(value));
4428}
4429
4430
4431void Map::clear_unused() { WRITE_BYTE_FIELD(this, kUnusedOffset, 0); }
4432
4433
4434InstanceType Map::instance_type() {
4435  return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4436}
4437
4438
4439void Map::set_instance_type(InstanceType value) {
4440  WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4441}
4442
4443
4444int Map::unused_property_fields() {
4445  return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4446}
4447
4448
4449void Map::set_unused_property_fields(int value) {
4450  WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4451}
4452
4453
4454byte Map::bit_field() const { return READ_BYTE_FIELD(this, kBitFieldOffset); }
4455
4456
4457void Map::set_bit_field(byte value) {
4458  WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4459}
4460
4461
4462byte Map::bit_field2() const { return READ_BYTE_FIELD(this, kBitField2Offset); }
4463
4464
4465void Map::set_bit_field2(byte value) {
4466  WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4467}
4468
4469
4470void Map::set_non_instance_prototype(bool value) {
4471  if (value) {
4472    set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4473  } else {
4474    set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4475  }
4476}
4477
4478
4479bool Map::has_non_instance_prototype() {
4480  return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4481}
4482
4483
4484void Map::set_is_constructor(bool value) {
4485  if (value) {
4486    set_bit_field(bit_field() | (1 << kIsConstructor));
4487  } else {
4488    set_bit_field(bit_field() & ~(1 << kIsConstructor));
4489  }
4490}
4491
4492
4493bool Map::is_constructor() const {
4494  return ((1 << kIsConstructor) & bit_field()) != 0;
4495}
4496
4497void Map::set_has_hidden_prototype(bool value) {
4498  set_bit_field3(HasHiddenPrototype::update(bit_field3(), value));
4499}
4500
4501bool Map::has_hidden_prototype() const {
4502  return HasHiddenPrototype::decode(bit_field3());
4503}
4504
4505
4506void Map::set_has_indexed_interceptor() {
4507  set_bit_field(bit_field() | (1 << kHasIndexedInterceptor));
4508}
4509
4510
4511bool Map::has_indexed_interceptor() {
4512  return ((1 << kHasIndexedInterceptor) & bit_field()) != 0;
4513}
4514
4515
4516void Map::set_is_undetectable() {
4517  set_bit_field(bit_field() | (1 << kIsUndetectable));
4518}
4519
4520
4521bool Map::is_undetectable() {
4522  return ((1 << kIsUndetectable) & bit_field()) != 0;
4523}
4524
4525
4526void Map::set_has_named_interceptor() {
4527  set_bit_field(bit_field() | (1 << kHasNamedInterceptor));
4528}
4529
4530
4531bool Map::has_named_interceptor() {
4532  return ((1 << kHasNamedInterceptor) & bit_field()) != 0;
4533}
4534
4535
4536void Map::set_is_access_check_needed(bool access_check_needed) {
4537  if (access_check_needed) {
4538    set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4539  } else {
4540    set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4541  }
4542}
4543
4544
4545bool Map::is_access_check_needed() {
4546  return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4547}
4548
4549
4550void Map::set_is_extensible(bool value) {
4551  if (value) {
4552    set_bit_field2(bit_field2() | (1 << kIsExtensible));
4553  } else {
4554    set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4555  }
4556}
4557
4558bool Map::is_extensible() {
4559  return ((1 << kIsExtensible) & bit_field2()) != 0;
4560}
4561
4562
4563void Map::set_is_prototype_map(bool value) {
4564  set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
4565}
4566
4567bool Map::is_prototype_map() const {
4568  return IsPrototypeMapBits::decode(bit_field2());
4569}
4570
4571bool Map::should_be_fast_prototype_map() const {
4572  if (!prototype_info()->IsPrototypeInfo()) return false;
4573  return PrototypeInfo::cast(prototype_info())->should_be_fast_map();
4574}
4575
4576void Map::set_elements_kind(ElementsKind elements_kind) {
4577  DCHECK(static_cast<int>(elements_kind) < kElementsKindCount);
4578  DCHECK(kElementsKindCount <= (1 << Map::ElementsKindBits::kSize));
4579  set_bit_field2(Map::ElementsKindBits::update(bit_field2(), elements_kind));
4580  DCHECK(this->elements_kind() == elements_kind);
4581}
4582
4583
4584ElementsKind Map::elements_kind() {
4585  return Map::ElementsKindBits::decode(bit_field2());
4586}
4587
4588
4589bool Map::has_fast_smi_elements() {
4590  return IsFastSmiElementsKind(elements_kind());
4591}
4592
4593bool Map::has_fast_object_elements() {
4594  return IsFastObjectElementsKind(elements_kind());
4595}
4596
4597bool Map::has_fast_smi_or_object_elements() {
4598  return IsFastSmiOrObjectElementsKind(elements_kind());
4599}
4600
4601bool Map::has_fast_double_elements() {
4602  return IsFastDoubleElementsKind(elements_kind());
4603}
4604
4605bool Map::has_fast_elements() { return IsFastElementsKind(elements_kind()); }
4606
4607bool Map::has_sloppy_arguments_elements() {
4608  return IsSloppyArgumentsElements(elements_kind());
4609}
4610
4611bool Map::has_fast_sloppy_arguments_elements() {
4612  return elements_kind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
4613}
4614
4615bool Map::has_fast_string_wrapper_elements() {
4616  return elements_kind() == FAST_STRING_WRAPPER_ELEMENTS;
4617}
4618
4619bool Map::has_fixed_typed_array_elements() {
4620  return IsFixedTypedArrayElementsKind(elements_kind());
4621}
4622
4623bool Map::has_dictionary_elements() {
4624  return IsDictionaryElementsKind(elements_kind());
4625}
4626
4627
4628void Map::set_dictionary_map(bool value) {
4629  uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4630  new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4631  set_bit_field3(new_bit_field3);
4632}
4633
4634
4635bool Map::is_dictionary_map() {
4636  return DictionaryMap::decode(bit_field3());
4637}
4638
4639
4640Code::Flags Code::flags() {
4641  return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4642}
4643
4644
4645void Map::set_owns_descriptors(bool owns_descriptors) {
4646  set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors));
4647}
4648
4649
4650bool Map::owns_descriptors() {
4651  return OwnsDescriptors::decode(bit_field3());
4652}
4653
4654
4655void Map::set_is_callable() { set_bit_field(bit_field() | (1 << kIsCallable)); }
4656
4657
4658bool Map::is_callable() const {
4659  return ((1 << kIsCallable) & bit_field()) != 0;
4660}
4661
4662
4663void Map::deprecate() {
4664  set_bit_field3(Deprecated::update(bit_field3(), true));
4665}
4666
4667
4668bool Map::is_deprecated() {
4669  return Deprecated::decode(bit_field3());
4670}
4671
4672
4673void Map::set_migration_target(bool value) {
4674  set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4675}
4676
4677
4678bool Map::is_migration_target() {
4679  return IsMigrationTarget::decode(bit_field3());
4680}
4681
4682void Map::set_immutable_proto(bool value) {
4683  set_bit_field3(ImmutablePrototype::update(bit_field3(), value));
4684}
4685
4686bool Map::is_immutable_proto() {
4687  return ImmutablePrototype::decode(bit_field3());
4688}
4689
4690void Map::set_new_target_is_base(bool value) {
4691  set_bit_field3(NewTargetIsBase::update(bit_field3(), value));
4692}
4693
4694
4695bool Map::new_target_is_base() { return NewTargetIsBase::decode(bit_field3()); }
4696
4697
4698void Map::set_construction_counter(int value) {
4699  set_bit_field3(ConstructionCounter::update(bit_field3(), value));
4700}
4701
4702
4703int Map::construction_counter() {
4704  return ConstructionCounter::decode(bit_field3());
4705}
4706
4707
4708void Map::mark_unstable() {
4709  set_bit_field3(IsUnstable::update(bit_field3(), true));
4710}
4711
4712
4713bool Map::is_stable() {
4714  return !IsUnstable::decode(bit_field3());
4715}
4716
4717
4718bool Map::has_code_cache() {
4719  // Code caches are always fixed arrays. The empty fixed array is used as a
4720  // sentinel for an absent code cache.
4721  return code_cache()->length() != 0;
4722}
4723
4724
4725bool Map::CanBeDeprecated() {
4726  int descriptor = LastAdded();
4727  for (int i = 0; i <= descriptor; i++) {
4728    PropertyDetails details = instance_descriptors()->GetDetails(i);
4729    if (details.representation().IsNone()) return true;
4730    if (details.representation().IsSmi()) return true;
4731    if (details.representation().IsDouble()) return true;
4732    if (details.representation().IsHeapObject()) return true;
4733    if (details.kind() == kData && details.location() == kDescriptor) {
4734      return true;
4735    }
4736  }
4737  return false;
4738}
4739
4740
4741void Map::NotifyLeafMapLayoutChange() {
4742  if (is_stable()) {
4743    mark_unstable();
4744    dependent_code()->DeoptimizeDependentCodeGroup(
4745        GetIsolate(),
4746        DependentCode::kPrototypeCheckGroup);
4747  }
4748}
4749
4750
4751bool Map::CanTransition() {
4752  // Only JSObject and subtypes have map transitions and back pointers.
4753  STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE);
4754  return instance_type() >= FIRST_JS_OBJECT_TYPE;
4755}
4756
4757
4758bool Map::IsBooleanMap() { return this == GetHeap()->boolean_map(); }
4759bool Map::IsPrimitiveMap() {
4760  STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
4761  return instance_type() <= LAST_PRIMITIVE_TYPE;
4762}
4763bool Map::IsJSReceiverMap() {
4764  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
4765  return instance_type() >= FIRST_JS_RECEIVER_TYPE;
4766}
4767bool Map::IsJSObjectMap() {
4768  STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
4769  return instance_type() >= FIRST_JS_OBJECT_TYPE;
4770}
4771bool Map::IsJSArrayMap() { return instance_type() == JS_ARRAY_TYPE; }
4772bool Map::IsJSFunctionMap() { return instance_type() == JS_FUNCTION_TYPE; }
4773bool Map::IsStringMap() { return instance_type() < FIRST_NONSTRING_TYPE; }
4774bool Map::IsJSProxyMap() { return instance_type() == JS_PROXY_TYPE; }
4775bool Map::IsJSGlobalProxyMap() {
4776  return instance_type() == JS_GLOBAL_PROXY_TYPE;
4777}
4778bool Map::IsJSGlobalObjectMap() {
4779  return instance_type() == JS_GLOBAL_OBJECT_TYPE;
4780}
4781bool Map::IsJSTypedArrayMap() { return instance_type() == JS_TYPED_ARRAY_TYPE; }
4782bool Map::IsJSDataViewMap() { return instance_type() == JS_DATA_VIEW_TYPE; }
4783
4784bool Map::IsSpecialReceiverMap() {
4785  bool result = IsSpecialReceiverInstanceType(instance_type());
4786  DCHECK_IMPLIES(!result,
4787                 !has_named_interceptor() && !is_access_check_needed());
4788  return result;
4789}
4790
4791bool Map::CanOmitMapChecks() {
4792  return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4793}
4794
4795
4796DependentCode* DependentCode::next_link() {
4797  return DependentCode::cast(get(kNextLinkIndex));
4798}
4799
4800
4801void DependentCode::set_next_link(DependentCode* next) {
4802  set(kNextLinkIndex, next);
4803}
4804
4805
4806int DependentCode::flags() { return Smi::cast(get(kFlagsIndex))->value(); }
4807
4808
4809void DependentCode::set_flags(int flags) {
4810  set(kFlagsIndex, Smi::FromInt(flags));
4811}
4812
4813
4814int DependentCode::count() { return CountField::decode(flags()); }
4815
4816void DependentCode::set_count(int value) {
4817  set_flags(CountField::update(flags(), value));
4818}
4819
4820
4821DependentCode::DependencyGroup DependentCode::group() {
4822  return static_cast<DependencyGroup>(GroupField::decode(flags()));
4823}
4824
4825
4826void DependentCode::set_group(DependentCode::DependencyGroup group) {
4827  set_flags(GroupField::update(flags(), static_cast<int>(group)));
4828}
4829
4830
4831void DependentCode::set_object_at(int i, Object* object) {
4832  set(kCodesStartIndex + i, object);
4833}
4834
4835
4836Object* DependentCode::object_at(int i) {
4837  return get(kCodesStartIndex + i);
4838}
4839
4840
4841void DependentCode::clear_at(int i) {
4842  set_undefined(kCodesStartIndex + i);
4843}
4844
4845
4846void DependentCode::copy(int from, int to) {
4847  set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4848}
4849
4850
4851void Code::set_flags(Code::Flags flags) {
4852  STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4853  WRITE_INT_FIELD(this, kFlagsOffset, flags);
4854}
4855
4856
4857Code::Kind Code::kind() {
4858  return ExtractKindFromFlags(flags());
4859}
4860
4861bool Code::IsCodeStubOrIC() {
4862  switch (kind()) {
4863    case STUB:
4864    case HANDLER:
4865#define CASE_KIND(kind) case kind:
4866      IC_KIND_LIST(CASE_KIND)
4867#undef CASE_KIND
4868      return true;
4869    default:
4870      return false;
4871  }
4872}
4873
4874ExtraICState Code::extra_ic_state() {
4875  DCHECK(is_binary_op_stub() || is_compare_ic_stub() ||
4876         is_to_boolean_ic_stub() || is_debug_stub());
4877  return ExtractExtraICStateFromFlags(flags());
4878}
4879
4880
4881// For initialization.
4882void Code::set_raw_kind_specific_flags1(int value) {
4883  WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4884}
4885
4886
4887void Code::set_raw_kind_specific_flags2(int value) {
4888  WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4889}
4890
4891
4892inline bool Code::is_crankshafted() {
4893  return IsCrankshaftedField::decode(
4894      READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4895}
4896
4897
4898inline bool Code::is_hydrogen_stub() {
4899  return is_crankshafted() && kind() != OPTIMIZED_FUNCTION;
4900}
4901
4902inline bool Code::is_interpreter_trampoline_builtin() {
4903  Builtins* builtins = GetIsolate()->builtins();
4904  return this == *builtins->InterpreterEntryTrampoline() ||
4905         this == *builtins->InterpreterEnterBytecodeAdvance() ||
4906         this == *builtins->InterpreterEnterBytecodeDispatch();
4907}
4908
4909inline bool Code::has_unwinding_info() const {
4910  return HasUnwindingInfoField::decode(READ_UINT32_FIELD(this, kFlagsOffset));
4911}
4912
4913inline void Code::set_has_unwinding_info(bool state) {
4914  uint32_t previous = READ_UINT32_FIELD(this, kFlagsOffset);
4915  uint32_t updated_value = HasUnwindingInfoField::update(previous, state);
4916  WRITE_UINT32_FIELD(this, kFlagsOffset, updated_value);
4917}
4918
4919inline void Code::set_is_crankshafted(bool value) {
4920  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4921  int updated = IsCrankshaftedField::update(previous, value);
4922  WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4923}
4924
4925
4926inline bool Code::is_turbofanned() {
4927  return IsTurbofannedField::decode(
4928      READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4929}
4930
4931
4932inline void Code::set_is_turbofanned(bool value) {
4933  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4934  int updated = IsTurbofannedField::update(previous, value);
4935  WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4936}
4937
4938
4939inline bool Code::can_have_weak_objects() {
4940  DCHECK(kind() == OPTIMIZED_FUNCTION);
4941  return CanHaveWeakObjectsField::decode(
4942      READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4943}
4944
4945
4946inline void Code::set_can_have_weak_objects(bool value) {
4947  DCHECK(kind() == OPTIMIZED_FUNCTION);
4948  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4949  int updated = CanHaveWeakObjectsField::update(previous, value);
4950  WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4951}
4952
4953inline bool Code::is_construct_stub() {
4954  DCHECK(kind() == BUILTIN);
4955  return IsConstructStubField::decode(
4956      READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4957}
4958
4959inline void Code::set_is_construct_stub(bool value) {
4960  DCHECK(kind() == BUILTIN);
4961  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4962  int updated = IsConstructStubField::update(previous, value);
4963  WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4964}
4965
4966inline bool Code::is_promise_rejection() {
4967  DCHECK(kind() == BUILTIN);
4968  return IsPromiseRejectionField::decode(
4969      READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4970}
4971
4972inline void Code::set_is_promise_rejection(bool value) {
4973  DCHECK(kind() == BUILTIN);
4974  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4975  int updated = IsPromiseRejectionField::update(previous, value);
4976  WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4977}
4978
4979inline bool Code::is_exception_caught() {
4980  DCHECK(kind() == BUILTIN);
4981  return IsExceptionCaughtField::decode(
4982      READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4983}
4984
4985inline void Code::set_is_exception_caught(bool value) {
4986  DCHECK(kind() == BUILTIN);
4987  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4988  int updated = IsExceptionCaughtField::update(previous, value);
4989  WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4990}
4991
4992bool Code::has_deoptimization_support() {
4993  DCHECK_EQ(FUNCTION, kind());
4994  unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4995  return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
4996}
4997
4998
4999void Code::set_has_deoptimization_support(bool value) {
5000  DCHECK_EQ(FUNCTION, kind());
5001  unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
5002  flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
5003  WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
5004}
5005
5006
5007bool Code::has_debug_break_slots() {
5008  DCHECK_EQ(FUNCTION, kind());
5009  unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
5010  return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
5011}
5012
5013
5014void Code::set_has_debug_break_slots(bool value) {
5015  DCHECK_EQ(FUNCTION, kind());
5016  unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
5017  flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
5018  WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
5019}
5020
5021
5022bool Code::has_reloc_info_for_serialization() {
5023  DCHECK_EQ(FUNCTION, kind());
5024  unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
5025  return FullCodeFlagsHasRelocInfoForSerialization::decode(flags);
5026}
5027
5028
5029void Code::set_has_reloc_info_for_serialization(bool value) {
5030  DCHECK_EQ(FUNCTION, kind());
5031  unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
5032  flags = FullCodeFlagsHasRelocInfoForSerialization::update(flags, value);
5033  WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
5034}
5035
5036
5037int Code::allow_osr_at_loop_nesting_level() {
5038  DCHECK_EQ(FUNCTION, kind());
5039  int fields = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5040  return AllowOSRAtLoopNestingLevelField::decode(fields);
5041}
5042
5043
5044void Code::set_allow_osr_at_loop_nesting_level(int level) {
5045  DCHECK_EQ(FUNCTION, kind());
5046  DCHECK(level >= 0 && level <= AbstractCode::kMaxLoopNestingMarker);
5047  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5048  int updated = AllowOSRAtLoopNestingLevelField::update(previous, level);
5049  WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5050}
5051
5052
5053int Code::profiler_ticks() {
5054  DCHECK_EQ(FUNCTION, kind());
5055  return ProfilerTicksField::decode(
5056      READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5057}
5058
5059
5060void Code::set_profiler_ticks(int ticks) {
5061  if (kind() == FUNCTION) {
5062    unsigned previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5063    unsigned updated = ProfilerTicksField::update(previous, ticks);
5064    WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5065  }
5066}
5067
5068int Code::builtin_index() { return READ_INT_FIELD(this, kBuiltinIndexOffset); }
5069
5070void Code::set_builtin_index(int index) {
5071  WRITE_INT_FIELD(this, kBuiltinIndexOffset, index);
5072}
5073
5074
5075unsigned Code::stack_slots() {
5076  DCHECK(is_crankshafted());
5077  return StackSlotsField::decode(
5078      READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5079}
5080
5081
5082void Code::set_stack_slots(unsigned slots) {
5083  CHECK(slots <= (1 << kStackSlotsBitCount));
5084  DCHECK(is_crankshafted());
5085  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5086  int updated = StackSlotsField::update(previous, slots);
5087  WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5088}
5089
5090
5091unsigned Code::safepoint_table_offset() {
5092  DCHECK(is_crankshafted());
5093  return SafepointTableOffsetField::decode(
5094      READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
5095}
5096
5097
5098void Code::set_safepoint_table_offset(unsigned offset) {
5099  CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
5100  DCHECK(is_crankshafted());
5101  DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
5102  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5103  int updated = SafepointTableOffsetField::update(previous, offset);
5104  WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5105}
5106
5107
5108unsigned Code::back_edge_table_offset() {
5109  DCHECK_EQ(FUNCTION, kind());
5110  return BackEdgeTableOffsetField::decode(
5111      READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)) << kPointerSizeLog2;
5112}
5113
5114
5115void Code::set_back_edge_table_offset(unsigned offset) {
5116  DCHECK_EQ(FUNCTION, kind());
5117  DCHECK(IsAligned(offset, static_cast<unsigned>(kPointerSize)));
5118  offset = offset >> kPointerSizeLog2;
5119  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5120  int updated = BackEdgeTableOffsetField::update(previous, offset);
5121  WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5122}
5123
5124
5125bool Code::back_edges_patched_for_osr() {
5126  DCHECK_EQ(FUNCTION, kind());
5127  return allow_osr_at_loop_nesting_level() > 0;
5128}
5129
5130
5131uint16_t Code::to_boolean_state() { return extra_ic_state(); }
5132
5133
5134bool Code::marked_for_deoptimization() {
5135  DCHECK(kind() == OPTIMIZED_FUNCTION);
5136  return MarkedForDeoptimizationField::decode(
5137      READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5138}
5139
5140
5141void Code::set_marked_for_deoptimization(bool flag) {
5142  DCHECK(kind() == OPTIMIZED_FUNCTION);
5143  DCHECK(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
5144  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5145  int updated = MarkedForDeoptimizationField::update(previous, flag);
5146  WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5147}
5148
5149
5150bool Code::is_inline_cache_stub() {
5151  Kind kind = this->kind();
5152  switch (kind) {
5153#define CASE(name) case name: return true;
5154    IC_KIND_LIST(CASE)
5155#undef CASE
5156    default: return false;
5157  }
5158}
5159
5160bool Code::is_debug_stub() {
5161  if (kind() != BUILTIN) return false;
5162  switch (builtin_index()) {
5163#define CASE_DEBUG_BUILTIN(name) case Builtins::k##name:
5164    BUILTIN_LIST_DBG(CASE_DEBUG_BUILTIN)
5165#undef CASE_DEBUG_BUILTIN
5166      return true;
5167    default:
5168      return false;
5169  }
5170  return false;
5171}
5172bool Code::is_handler() { return kind() == HANDLER; }
5173bool Code::is_stub() { return kind() == STUB; }
5174bool Code::is_binary_op_stub() { return kind() == BINARY_OP_IC; }
5175bool Code::is_compare_ic_stub() { return kind() == COMPARE_IC; }
5176bool Code::is_to_boolean_ic_stub() { return kind() == TO_BOOLEAN_IC; }
5177bool Code::is_optimized_code() { return kind() == OPTIMIZED_FUNCTION; }
5178bool Code::is_wasm_code() { return kind() == WASM_FUNCTION; }
5179
5180Address Code::constant_pool() {
5181  Address constant_pool = NULL;
5182  if (FLAG_enable_embedded_constant_pool) {
5183    int offset = constant_pool_offset();
5184    if (offset < instruction_size()) {
5185      constant_pool = FIELD_ADDR(this, kHeaderSize + offset);
5186    }
5187  }
5188  return constant_pool;
5189}
5190
5191Code::Flags Code::ComputeFlags(Kind kind, ExtraICState extra_ic_state,
5192                               CacheHolderFlag holder) {
5193  // Compute the bit mask.
5194  unsigned int bits = KindField::encode(kind) |
5195                      ExtraICStateField::encode(extra_ic_state) |
5196                      CacheHolderField::encode(holder);
5197  return static_cast<Flags>(bits);
5198}
5199
5200Code::Flags Code::ComputeHandlerFlags(Kind handler_kind,
5201                                      CacheHolderFlag holder) {
5202  return ComputeFlags(Code::HANDLER, handler_kind, holder);
5203}
5204
5205
5206Code::Kind Code::ExtractKindFromFlags(Flags flags) {
5207  return KindField::decode(flags);
5208}
5209
5210
5211ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
5212  return ExtraICStateField::decode(flags);
5213}
5214
5215
5216CacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
5217  return CacheHolderField::decode(flags);
5218}
5219
5220Code::Flags Code::RemoveHolderFromFlags(Flags flags) {
5221  int bits = flags & ~CacheHolderField::kMask;
5222  return static_cast<Flags>(bits);
5223}
5224
5225
5226Code* Code::GetCodeFromTargetAddress(Address address) {
5227  HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
5228  // GetCodeFromTargetAddress might be called when marking objects during mark
5229  // sweep. reinterpret_cast is therefore used instead of the more appropriate
5230  // Code::cast. Code::cast does not work when the object's map is
5231  // marked.
5232  Code* result = reinterpret_cast<Code*>(code);
5233  return result;
5234}
5235
5236
5237Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
5238  return HeapObject::
5239      FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
5240}
5241
5242
5243bool Code::CanContainWeakObjects() {
5244  return is_optimized_code() && can_have_weak_objects();
5245}
5246
5247
5248bool Code::IsWeakObject(Object* object) {
5249  return (CanContainWeakObjects() && IsWeakObjectInOptimizedCode(object));
5250}
5251
5252
5253bool Code::IsWeakObjectInOptimizedCode(Object* object) {
5254  if (object->IsMap()) {
5255    return Map::cast(object)->CanTransition();
5256  }
5257  if (object->IsCell()) {
5258    object = Cell::cast(object)->value();
5259  } else if (object->IsPropertyCell()) {
5260    object = PropertyCell::cast(object)->value();
5261  }
5262  if (object->IsJSReceiver() || object->IsContext()) {
5263    return true;
5264  }
5265  return false;
5266}
5267
5268
5269int AbstractCode::instruction_size() {
5270  if (IsCode()) {
5271    return GetCode()->instruction_size();
5272  } else {
5273    return GetBytecodeArray()->length();
5274  }
5275}
5276
5277ByteArray* AbstractCode::source_position_table() {
5278  if (IsCode()) {
5279    return GetCode()->source_position_table();
5280  } else {
5281    return GetBytecodeArray()->source_position_table();
5282  }
5283}
5284
5285void AbstractCode::set_source_position_table(ByteArray* source_position_table) {
5286  if (IsCode()) {
5287    GetCode()->set_source_position_table(source_position_table);
5288  } else {
5289    GetBytecodeArray()->set_source_position_table(source_position_table);
5290  }
5291}
5292
5293int AbstractCode::SizeIncludingMetadata() {
5294  if (IsCode()) {
5295    return GetCode()->SizeIncludingMetadata();
5296  } else {
5297    return GetBytecodeArray()->SizeIncludingMetadata();
5298  }
5299}
5300int AbstractCode::ExecutableSize() {
5301  if (IsCode()) {
5302    return GetCode()->ExecutableSize();
5303  } else {
5304    return GetBytecodeArray()->BytecodeArraySize();
5305  }
5306}
5307
5308Address AbstractCode::instruction_start() {
5309  if (IsCode()) {
5310    return GetCode()->instruction_start();
5311  } else {
5312    return GetBytecodeArray()->GetFirstBytecodeAddress();
5313  }
5314}
5315
5316Address AbstractCode::instruction_end() {
5317  if (IsCode()) {
5318    return GetCode()->instruction_end();
5319  } else {
5320    return GetBytecodeArray()->GetFirstBytecodeAddress() +
5321           GetBytecodeArray()->length();
5322  }
5323}
5324
5325bool AbstractCode::contains(byte* inner_pointer) {
5326  return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
5327}
5328
5329AbstractCode::Kind AbstractCode::kind() {
5330  if (IsCode()) {
5331    STATIC_ASSERT(AbstractCode::FUNCTION ==
5332                  static_cast<AbstractCode::Kind>(Code::FUNCTION));
5333    return static_cast<AbstractCode::Kind>(GetCode()->kind());
5334  } else {
5335    return INTERPRETED_FUNCTION;
5336  }
5337}
5338
5339Code* AbstractCode::GetCode() { return Code::cast(this); }
5340
5341BytecodeArray* AbstractCode::GetBytecodeArray() {
5342  return BytecodeArray::cast(this);
5343}
5344
5345Object* Map::prototype() const {
5346  return READ_FIELD(this, kPrototypeOffset);
5347}
5348
5349
5350void Map::set_prototype(Object* value, WriteBarrierMode mode) {
5351  DCHECK(value->IsNull(GetIsolate()) || value->IsJSReceiver());
5352  WRITE_FIELD(this, kPrototypeOffset, value);
5353  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
5354}
5355
5356
5357LayoutDescriptor* Map::layout_descriptor_gc_safe() {
5358  Object* layout_desc = READ_FIELD(this, kLayoutDescriptorOffset);
5359  return LayoutDescriptor::cast_gc_safe(layout_desc);
5360}
5361
5362
5363bool Map::HasFastPointerLayout() const {
5364  Object* layout_desc = READ_FIELD(this, kLayoutDescriptorOffset);
5365  return LayoutDescriptor::IsFastPointerLayout(layout_desc);
5366}
5367
5368
5369void Map::UpdateDescriptors(DescriptorArray* descriptors,
5370                            LayoutDescriptor* layout_desc) {
5371  set_instance_descriptors(descriptors);
5372  if (FLAG_unbox_double_fields) {
5373    if (layout_descriptor()->IsSlowLayout()) {
5374      set_layout_descriptor(layout_desc);
5375    }
5376#ifdef VERIFY_HEAP
5377    // TODO(ishell): remove these checks from VERIFY_HEAP mode.
5378    if (FLAG_verify_heap) {
5379      CHECK(layout_descriptor()->IsConsistentWithMap(this));
5380      CHECK(visitor_id() == Heap::GetStaticVisitorIdForMap(this));
5381    }
5382#else
5383    SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5384    DCHECK(visitor_id() == Heap::GetStaticVisitorIdForMap(this));
5385#endif
5386  }
5387}
5388
5389
5390void Map::InitializeDescriptors(DescriptorArray* descriptors,
5391                                LayoutDescriptor* layout_desc) {
5392  int len = descriptors->number_of_descriptors();
5393  set_instance_descriptors(descriptors);
5394  SetNumberOfOwnDescriptors(len);
5395
5396  if (FLAG_unbox_double_fields) {
5397    set_layout_descriptor(layout_desc);
5398#ifdef VERIFY_HEAP
5399    // TODO(ishell): remove these checks from VERIFY_HEAP mode.
5400    if (FLAG_verify_heap) {
5401      CHECK(layout_descriptor()->IsConsistentWithMap(this));
5402    }
5403#else
5404    SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5405#endif
5406    set_visitor_id(Heap::GetStaticVisitorIdForMap(this));
5407  }
5408}
5409
5410
5411ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
5412ACCESSORS(Map, layout_descriptor, LayoutDescriptor, kLayoutDescriptorOffset)
5413
5414void Map::set_bit_field3(uint32_t bits) {
5415  if (kInt32Size != kPointerSize) {
5416    WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
5417  }
5418  WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
5419}
5420
5421
5422uint32_t Map::bit_field3() const {
5423  return READ_UINT32_FIELD(this, kBitField3Offset);
5424}
5425
5426
5427LayoutDescriptor* Map::GetLayoutDescriptor() {
5428  return FLAG_unbox_double_fields ? layout_descriptor()
5429                                  : LayoutDescriptor::FastPointerLayout();
5430}
5431
5432
5433void Map::AppendDescriptor(Descriptor* desc) {
5434  DescriptorArray* descriptors = instance_descriptors();
5435  int number_of_own_descriptors = NumberOfOwnDescriptors();
5436  DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
5437  descriptors->Append(desc);
5438  SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
5439
5440// This function does not support appending double field descriptors and
5441// it should never try to (otherwise, layout descriptor must be updated too).
5442#ifdef DEBUG
5443  PropertyDetails details = desc->GetDetails();
5444  CHECK(details.location() != kField || !details.representation().IsDouble());
5445#endif
5446}
5447
5448
5449Object* Map::GetBackPointer() {
5450  Object* object = constructor_or_backpointer();
5451  if (object->IsMap()) {
5452    return object;
5453  }
5454  return GetIsolate()->heap()->undefined_value();
5455}
5456
5457
5458Map* Map::ElementsTransitionMap() {
5459  return TransitionArray::SearchSpecial(
5460      this, GetHeap()->elements_transition_symbol());
5461}
5462
5463
5464ACCESSORS(Map, raw_transitions, Object, kTransitionsOrPrototypeInfoOffset)
5465
5466
5467Object* Map::prototype_info() const {
5468  DCHECK(is_prototype_map());
5469  return READ_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset);
5470}
5471
5472
5473void Map::set_prototype_info(Object* value, WriteBarrierMode mode) {
5474  DCHECK(is_prototype_map());
5475  WRITE_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset, value);
5476  CONDITIONAL_WRITE_BARRIER(
5477      GetHeap(), this, Map::kTransitionsOrPrototypeInfoOffset, value, mode);
5478}
5479
5480
5481void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
5482  DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
5483  DCHECK(value->IsMap());
5484  DCHECK(GetBackPointer()->IsUndefined(GetIsolate()));
5485  DCHECK(!value->IsMap() ||
5486         Map::cast(value)->GetConstructor() == constructor_or_backpointer());
5487  set_constructor_or_backpointer(value, mode);
5488}
5489
5490ACCESSORS(Map, code_cache, FixedArray, kCodeCacheOffset)
5491ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
5492ACCESSORS(Map, weak_cell_cache, Object, kWeakCellCacheOffset)
5493ACCESSORS(Map, constructor_or_backpointer, Object,
5494          kConstructorOrBackPointerOffset)
5495
5496
5497Object* Map::GetConstructor() const {
5498  Object* maybe_constructor = constructor_or_backpointer();
5499  // Follow any back pointers.
5500  while (maybe_constructor->IsMap()) {
5501    maybe_constructor =
5502        Map::cast(maybe_constructor)->constructor_or_backpointer();
5503  }
5504  return maybe_constructor;
5505}
5506
5507
5508void Map::SetConstructor(Object* constructor, WriteBarrierMode mode) {
5509  // Never overwrite a back pointer with a constructor.
5510  DCHECK(!constructor_or_backpointer()->IsMap());
5511  set_constructor_or_backpointer(constructor, mode);
5512}
5513
5514
5515Handle<Map> Map::CopyInitialMap(Handle<Map> map) {
5516  return CopyInitialMap(map, map->instance_size(), map->GetInObjectProperties(),
5517                        map->unused_property_fields());
5518}
5519
5520
5521ACCESSORS(JSBoundFunction, bound_target_function, JSReceiver,
5522          kBoundTargetFunctionOffset)
5523ACCESSORS(JSBoundFunction, bound_this, Object, kBoundThisOffset)
5524ACCESSORS(JSBoundFunction, bound_arguments, FixedArray, kBoundArgumentsOffset)
5525
5526ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5527ACCESSORS(JSFunction, feedback_vector_cell, Cell, kFeedbackVectorOffset)
5528ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5529
5530ACCESSORS(JSGlobalObject, native_context, Context, kNativeContextOffset)
5531ACCESSORS(JSGlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
5532
5533ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5534ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
5535
5536ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5537SMI_ACCESSORS(AccessorInfo, flag, kFlagOffset)
5538ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5539          kExpectedReceiverTypeOffset)
5540
5541ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
5542ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
5543ACCESSORS(AccessorInfo, js_getter, Object, kJsGetterOffset)
5544ACCESSORS(AccessorInfo, data, Object, kDataOffset)
5545
5546ACCESSORS(PromiseResolveThenableJobInfo, thenable, JSReceiver, kThenableOffset)
5547ACCESSORS(PromiseResolveThenableJobInfo, then, JSReceiver, kThenOffset)
5548ACCESSORS(PromiseResolveThenableJobInfo, resolve, JSFunction, kResolveOffset)
5549ACCESSORS(PromiseResolveThenableJobInfo, reject, JSFunction, kRejectOffset)
5550ACCESSORS(PromiseResolveThenableJobInfo, context, Context, kContextOffset);
5551
5552ACCESSORS(PromiseReactionJobInfo, value, Object, kValueOffset);
5553ACCESSORS(PromiseReactionJobInfo, tasks, Object, kTasksOffset);
5554ACCESSORS(PromiseReactionJobInfo, deferred_promise, Object,
5555          kDeferredPromiseOffset);
5556ACCESSORS(PromiseReactionJobInfo, deferred_on_resolve, Object,
5557          kDeferredOnResolveOffset);
5558ACCESSORS(PromiseReactionJobInfo, deferred_on_reject, Object,
5559          kDeferredOnRejectOffset);
5560ACCESSORS(PromiseReactionJobInfo, context, Context, kContextOffset);
5561
5562Map* PrototypeInfo::ObjectCreateMap() {
5563  return Map::cast(WeakCell::cast(object_create_map())->value());
5564}
5565
5566// static
5567void PrototypeInfo::SetObjectCreateMap(Handle<PrototypeInfo> info,
5568                                       Handle<Map> map) {
5569  Handle<WeakCell> cell = Map::WeakCellForMap(map);
5570  info->set_object_create_map(*cell);
5571}
5572
5573bool PrototypeInfo::HasObjectCreateMap() {
5574  Object* cache = object_create_map();
5575  return cache->IsWeakCell() && !WeakCell::cast(cache)->cleared();
5576}
5577
5578bool FunctionTemplateInfo::instantiated() {
5579  return shared_function_info()->IsSharedFunctionInfo();
5580}
5581
5582FunctionTemplateInfo* FunctionTemplateInfo::GetParent(Isolate* isolate) {
5583  Object* parent = parent_template();
5584  return parent->IsUndefined(isolate) ? nullptr
5585                                      : FunctionTemplateInfo::cast(parent);
5586}
5587
5588ObjectTemplateInfo* ObjectTemplateInfo::GetParent(Isolate* isolate) {
5589  Object* maybe_ctor = constructor();
5590  if (maybe_ctor->IsUndefined(isolate)) return nullptr;
5591  FunctionTemplateInfo* constructor = FunctionTemplateInfo::cast(maybe_ctor);
5592  while (true) {
5593    constructor = constructor->GetParent(isolate);
5594    if (constructor == nullptr) return nullptr;
5595    Object* maybe_obj = constructor->instance_template();
5596    if (!maybe_obj->IsUndefined(isolate)) {
5597      return ObjectTemplateInfo::cast(maybe_obj);
5598    }
5599  }
5600  return nullptr;
5601}
5602
5603ACCESSORS(PrototypeInfo, weak_cell, Object, kWeakCellOffset)
5604ACCESSORS(PrototypeInfo, prototype_users, Object, kPrototypeUsersOffset)
5605ACCESSORS(PrototypeInfo, object_create_map, Object, kObjectCreateMap)
5606SMI_ACCESSORS(PrototypeInfo, registry_slot, kRegistrySlotOffset)
5607ACCESSORS(PrototypeInfo, validity_cell, Object, kValidityCellOffset)
5608SMI_ACCESSORS(PrototypeInfo, bit_field, kBitFieldOffset)
5609BOOL_ACCESSORS(PrototypeInfo, bit_field, should_be_fast_map, kShouldBeFastBit)
5610
5611ACCESSORS(Tuple2, value1, Object, kValue1Offset)
5612ACCESSORS(Tuple2, value2, Object, kValue2Offset)
5613ACCESSORS(Tuple3, value3, Object, kValue3Offset)
5614
5615ACCESSORS(ContextExtension, scope_info, ScopeInfo, kScopeInfoOffset)
5616ACCESSORS(ContextExtension, extension, Object, kExtensionOffset)
5617
5618SMI_ACCESSORS(ConstantElementsPair, elements_kind, kElementsKindOffset)
5619ACCESSORS(ConstantElementsPair, constant_values, FixedArrayBase,
5620          kConstantValuesOffset)
5621
5622ACCESSORS(JSModuleNamespace, module, Module, kModuleOffset)
5623
5624ACCESSORS(Module, code, Object, kCodeOffset)
5625ACCESSORS(Module, exports, ObjectHashTable, kExportsOffset)
5626ACCESSORS(Module, regular_exports, FixedArray, kRegularExportsOffset)
5627ACCESSORS(Module, regular_imports, FixedArray, kRegularImportsOffset)
5628ACCESSORS(Module, module_namespace, HeapObject, kModuleNamespaceOffset)
5629ACCESSORS(Module, requested_modules, FixedArray, kRequestedModulesOffset)
5630SMI_ACCESSORS(Module, hash, kHashOffset)
5631
5632bool Module::evaluated() const { return code()->IsModuleInfo(); }
5633
5634void Module::set_evaluated() {
5635  DCHECK(instantiated());
5636  DCHECK(!evaluated());
5637  return set_code(
5638      JSFunction::cast(code())->shared()->scope_info()->ModuleDescriptorInfo());
5639}
5640
5641bool Module::instantiated() const { return !code()->IsSharedFunctionInfo(); }
5642
5643ModuleInfo* Module::info() const {
5644  if (evaluated()) return ModuleInfo::cast(code());
5645  ScopeInfo* scope_info = instantiated()
5646                              ? JSFunction::cast(code())->shared()->scope_info()
5647                              : SharedFunctionInfo::cast(code())->scope_info();
5648  return scope_info->ModuleDescriptorInfo();
5649}
5650
5651ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5652ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5653
5654ACCESSORS(AccessCheckInfo, callback, Object, kCallbackOffset)
5655ACCESSORS(AccessCheckInfo, named_interceptor, Object, kNamedInterceptorOffset)
5656ACCESSORS(AccessCheckInfo, indexed_interceptor, Object,
5657          kIndexedInterceptorOffset)
5658ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5659
5660ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5661ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5662ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5663ACCESSORS(InterceptorInfo, descriptor, Object, kDescriptorOffset)
5664ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5665ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5666ACCESSORS(InterceptorInfo, definer, Object, kDefinerOffset)
5667ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5668SMI_ACCESSORS(InterceptorInfo, flags, kFlagsOffset)
5669BOOL_ACCESSORS(InterceptorInfo, flags, can_intercept_symbols,
5670               kCanInterceptSymbolsBit)
5671BOOL_ACCESSORS(InterceptorInfo, flags, all_can_read, kAllCanReadBit)
5672BOOL_ACCESSORS(InterceptorInfo, flags, non_masking, kNonMasking)
5673
5674ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5675ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5676ACCESSORS(CallHandlerInfo, fast_handler, Object, kFastHandlerOffset)
5677
5678ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5679ACCESSORS(TemplateInfo, serial_number, Object, kSerialNumberOffset)
5680SMI_ACCESSORS(TemplateInfo, number_of_properties, kNumberOfProperties)
5681ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5682ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5683
5684ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5685ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5686          kPrototypeTemplateOffset)
5687ACCESSORS(FunctionTemplateInfo, prototype_provider_template, Object,
5688          kPrototypeProviderTemplateOffset)
5689
5690ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5691ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5692          kNamedPropertyHandlerOffset)
5693ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5694          kIndexedPropertyHandlerOffset)
5695ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5696          kInstanceTemplateOffset)
5697ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5698ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5699ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5700          kInstanceCallHandlerOffset)
5701ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5702          kAccessCheckInfoOffset)
5703ACCESSORS(FunctionTemplateInfo, shared_function_info, Object,
5704          kSharedFunctionInfoOffset)
5705ACCESSORS(FunctionTemplateInfo, cached_property_name, Object,
5706          kCachedPropertyNameOffset)
5707
5708SMI_ACCESSORS(FunctionTemplateInfo, flag, kFlagOffset)
5709
5710ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5711ACCESSORS(ObjectTemplateInfo, data, Object, kDataOffset)
5712
5713int ObjectTemplateInfo::internal_field_count() const {
5714  Object* value = data();
5715  DCHECK(value->IsSmi());
5716  return InternalFieldCount::decode(Smi::cast(value)->value());
5717}
5718
5719void ObjectTemplateInfo::set_internal_field_count(int count) {
5720  return set_data(Smi::FromInt(
5721      InternalFieldCount::update(Smi::cast(data())->value(), count)));
5722}
5723
5724bool ObjectTemplateInfo::immutable_proto() const {
5725  Object* value = data();
5726  DCHECK(value->IsSmi());
5727  return IsImmutablePrototype::decode(Smi::cast(value)->value());
5728}
5729
5730void ObjectTemplateInfo::set_immutable_proto(bool immutable) {
5731  return set_data(Smi::FromInt(
5732      IsImmutablePrototype::update(Smi::cast(data())->value(), immutable)));
5733}
5734
5735int TemplateList::length() const {
5736  return Smi::cast(FixedArray::cast(this)->get(kLengthIndex))->value();
5737}
5738
5739Object* TemplateList::get(int index) const {
5740  return FixedArray::cast(this)->get(kFirstElementIndex + index);
5741}
5742
5743void TemplateList::set(int index, Object* value) {
5744  FixedArray::cast(this)->set(kFirstElementIndex + index, value);
5745}
5746
5747ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5748ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5749SMI_ACCESSORS(AllocationSite, pretenure_data, kPretenureDataOffset)
5750SMI_ACCESSORS(AllocationSite, pretenure_create_count,
5751              kPretenureCreateCountOffset)
5752ACCESSORS(AllocationSite, dependent_code, DependentCode,
5753          kDependentCodeOffset)
5754ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5755ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5756
5757ACCESSORS(Script, source, Object, kSourceOffset)
5758ACCESSORS(Script, name, Object, kNameOffset)
5759SMI_ACCESSORS(Script, id, kIdOffset)
5760SMI_ACCESSORS(Script, line_offset, kLineOffsetOffset)
5761SMI_ACCESSORS(Script, column_offset, kColumnOffsetOffset)
5762ACCESSORS(Script, context_data, Object, kContextOffset)
5763ACCESSORS(Script, wrapper, HeapObject, kWrapperOffset)
5764SMI_ACCESSORS(Script, type, kTypeOffset)
5765ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5766ACCESSORS_CHECKED(Script, eval_from_shared, Object, kEvalFromSharedOffset,
5767                  this->type() != TYPE_WASM)
5768SMI_ACCESSORS_CHECKED(Script, eval_from_position, kEvalFromPositionOffset,
5769                      this->type() != TYPE_WASM)
5770ACCESSORS(Script, shared_function_infos, FixedArray, kSharedFunctionInfosOffset)
5771SMI_ACCESSORS(Script, flags, kFlagsOffset)
5772ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
5773ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
5774ACCESSORS_CHECKED(Script, wasm_compiled_module, Object, kEvalFromSharedOffset,
5775                  this->type() == TYPE_WASM)
5776
5777Script::CompilationType Script::compilation_type() {
5778  return BooleanBit::get(flags(), kCompilationTypeBit) ?
5779      COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5780}
5781void Script::set_compilation_type(CompilationType type) {
5782  set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5783      type == COMPILATION_TYPE_EVAL));
5784}
5785Script::CompilationState Script::compilation_state() {
5786  return BooleanBit::get(flags(), kCompilationStateBit) ?
5787      COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5788}
5789void Script::set_compilation_state(CompilationState state) {
5790  set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5791      state == COMPILATION_STATE_COMPILED));
5792}
5793ScriptOriginOptions Script::origin_options() {
5794  return ScriptOriginOptions((flags() & kOriginOptionsMask) >>
5795                             kOriginOptionsShift);
5796}
5797void Script::set_origin_options(ScriptOriginOptions origin_options) {
5798  DCHECK(!(origin_options.Flags() & ~((1 << kOriginOptionsSize) - 1)));
5799  set_flags((flags() & ~kOriginOptionsMask) |
5800            (origin_options.Flags() << kOriginOptionsShift));
5801}
5802
5803
5804ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5805SMI_ACCESSORS(DebugInfo, debugger_hints, kDebuggerHintsIndex)
5806ACCESSORS(DebugInfo, debug_bytecode_array, Object, kDebugBytecodeArrayIndex)
5807ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5808
5809bool DebugInfo::HasDebugBytecodeArray() {
5810  return debug_bytecode_array()->IsBytecodeArray();
5811}
5812
5813bool DebugInfo::HasDebugCode() {
5814  Code* code = shared()->code();
5815  bool has = code->kind() == Code::FUNCTION;
5816  DCHECK(!has || code->has_debug_break_slots());
5817  return has;
5818}
5819
5820BytecodeArray* DebugInfo::OriginalBytecodeArray() {
5821  DCHECK(HasDebugBytecodeArray());
5822  return shared()->bytecode_array();
5823}
5824
5825BytecodeArray* DebugInfo::DebugBytecodeArray() {
5826  DCHECK(HasDebugBytecodeArray());
5827  return BytecodeArray::cast(debug_bytecode_array());
5828}
5829
5830Code* DebugInfo::DebugCode() {
5831  DCHECK(HasDebugCode());
5832  return shared()->code();
5833}
5834
5835SMI_ACCESSORS(BreakPointInfo, source_position, kSourcePositionIndex)
5836ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5837
5838ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5839ACCESSORS(SharedFunctionInfo, optimized_code_map, FixedArray,
5840          kOptimizedCodeMapOffset)
5841ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5842ACCESSORS(SharedFunctionInfo, feedback_metadata, FeedbackMetadata,
5843          kFeedbackMetadataOffset)
5844SMI_ACCESSORS(SharedFunctionInfo, function_literal_id, kFunctionLiteralIdOffset)
5845#if TRACE_MAPS
5846SMI_ACCESSORS(SharedFunctionInfo, unique_id, kUniqueIdOffset)
5847#endif
5848ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5849          kInstanceClassNameOffset)
5850ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5851ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5852ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5853ACCESSORS(SharedFunctionInfo, function_identifier, Object,
5854          kFunctionIdentifierOffset)
5855
5856SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5857BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5858               kHiddenPrototypeBit)
5859BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5860BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5861               kNeedsAccessCheckBit)
5862BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5863               kReadOnlyPrototypeBit)
5864BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5865               kRemovePrototypeBit)
5866BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5867               kDoNotCacheBit)
5868BOOL_ACCESSORS(FunctionTemplateInfo, flag, accept_any_receiver,
5869               kAcceptAnyReceiver)
5870BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_named_expression,
5871               kIsNamedExpressionBit)
5872BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5873               kIsTopLevelBit)
5874
5875#if V8_HOST_ARCH_32_BIT
5876SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5877SMI_ACCESSORS(SharedFunctionInfo, internal_formal_parameter_count,
5878              kFormalParameterCountOffset)
5879SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5880              kExpectedNofPropertiesOffset)
5881SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5882              kStartPositionAndTypeOffset)
5883SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5884SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5885              kFunctionTokenPositionOffset)
5886SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5887              kCompilerHintsOffset)
5888SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5889              kOptCountAndBailoutReasonOffset)
5890SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5891SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5892SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
5893
5894#else
5895
5896#if V8_TARGET_LITTLE_ENDIAN
5897#define PSEUDO_SMI_LO_ALIGN 0
5898#define PSEUDO_SMI_HI_ALIGN kIntSize
5899#else
5900#define PSEUDO_SMI_LO_ALIGN kIntSize
5901#define PSEUDO_SMI_HI_ALIGN 0
5902#endif
5903
5904#define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset)                          \
5905  STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_LO_ALIGN);         \
5906  int holder::name() const {                                                   \
5907    int value = READ_INT_FIELD(this, offset);                                  \
5908    DCHECK(kHeapObjectTag == 1);                                               \
5909    DCHECK((value & kHeapObjectTag) == 0);                                     \
5910    return value >> 1;                                                         \
5911  }                                                                            \
5912  void holder::set_##name(int value) {                                         \
5913    DCHECK(kHeapObjectTag == 1);                                               \
5914    DCHECK((value & 0xC0000000) == 0xC0000000 || (value & 0xC0000000) == 0x0); \
5915    WRITE_INT_FIELD(this, offset, (value << 1) & ~kHeapObjectTag);             \
5916  }
5917
5918#define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset)                  \
5919  STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_HI_ALIGN); \
5920  INT_ACCESSORS(holder, name, offset)
5921
5922
5923PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5924PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, internal_formal_parameter_count,
5925                        kFormalParameterCountOffset)
5926
5927PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5928                        expected_nof_properties,
5929                        kExpectedNofPropertiesOffset)
5930
5931PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5932PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5933                        start_position_and_type,
5934                        kStartPositionAndTypeOffset)
5935
5936PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5937                        function_token_position,
5938                        kFunctionTokenPositionOffset)
5939PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5940                        compiler_hints,
5941                        kCompilerHintsOffset)
5942
5943PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5944                        opt_count_and_bailout_reason,
5945                        kOptCountAndBailoutReasonOffset)
5946PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5947
5948PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5949                        ast_node_count,
5950                        kAstNodeCountOffset)
5951PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5952                        profiler_ticks,
5953                        kProfilerTicksOffset)
5954
5955#endif
5956
5957AbstractCode* SharedFunctionInfo::abstract_code() {
5958  if (HasBytecodeArray()) {
5959    return AbstractCode::cast(bytecode_array());
5960  } else {
5961    return AbstractCode::cast(code());
5962  }
5963}
5964
5965BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, allows_lazy_compilation,
5966               kAllowLazyCompilation)
5967BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, uses_arguments,
5968               kUsesArguments)
5969BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, has_duplicate_parameters,
5970               kHasDuplicateParameters)
5971BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, asm_function, kIsAsmFunction)
5972BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_declaration,
5973               kIsDeclaration)
5974BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, marked_for_tier_up,
5975               kMarkedForTierUp)
5976
5977BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, needs_home_object,
5978               kNeedsHomeObject)
5979BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5980BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, force_inline, kForceInline)
5981BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, must_use_ignition_turbo,
5982               kMustUseIgnitionTurbo)
5983BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5984BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_asm_wasm_broken,
5985               kIsAsmWasmBroken)
5986
5987BOOL_GETTER(SharedFunctionInfo, compiler_hints, optimization_disabled,
5988            kOptimizationDisabled)
5989
5990void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5991  set_compiler_hints(BooleanBit::set(compiler_hints(),
5992                                     kOptimizationDisabled,
5993                                     disable));
5994}
5995
5996LanguageMode SharedFunctionInfo::language_mode() {
5997  STATIC_ASSERT(LANGUAGE_END == 2);
5998  return construct_language_mode(
5999      BooleanBit::get(compiler_hints(), kStrictModeFunction));
6000}
6001
6002void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
6003  STATIC_ASSERT(LANGUAGE_END == 2);
6004  // We only allow language mode transitions that set the same language mode
6005  // again or go up in the chain:
6006  DCHECK(is_sloppy(this->language_mode()) || is_strict(language_mode));
6007  int hints = compiler_hints();
6008  hints = BooleanBit::set(hints, kStrictModeFunction, is_strict(language_mode));
6009  set_compiler_hints(hints);
6010}
6011
6012FunctionKind SharedFunctionInfo::kind() const {
6013  return FunctionKindBits::decode(compiler_hints());
6014}
6015
6016void SharedFunctionInfo::set_kind(FunctionKind kind) {
6017  DCHECK(IsValidFunctionKind(kind));
6018  int hints = compiler_hints();
6019  hints = FunctionKindBits::update(hints, kind);
6020  set_compiler_hints(hints);
6021}
6022
6023BOOL_ACCESSORS(SharedFunctionInfo, debugger_hints,
6024               name_should_print_as_anonymous, kNameShouldPrintAsAnonymous)
6025BOOL_ACCESSORS(SharedFunctionInfo, debugger_hints, is_anonymous_expression,
6026               kIsAnonymousExpression)
6027BOOL_ACCESSORS(SharedFunctionInfo, debugger_hints, deserialized, kDeserialized)
6028BOOL_ACCESSORS(SharedFunctionInfo, debugger_hints, has_no_side_effect,
6029               kHasNoSideEffect)
6030BOOL_ACCESSORS(SharedFunctionInfo, debugger_hints, computed_has_no_side_effect,
6031               kComputedHasNoSideEffect)
6032BOOL_ACCESSORS(SharedFunctionInfo, debugger_hints, debug_is_blackboxed,
6033               kDebugIsBlackboxed)
6034BOOL_ACCESSORS(SharedFunctionInfo, debugger_hints, computed_debug_is_blackboxed,
6035               kComputedDebugIsBlackboxed)
6036
6037bool Script::HasValidSource() {
6038  Object* src = this->source();
6039  if (!src->IsString()) return true;
6040  String* src_str = String::cast(src);
6041  if (!StringShape(src_str).IsExternal()) return true;
6042  if (src_str->IsOneByteRepresentation()) {
6043    return ExternalOneByteString::cast(src)->resource() != NULL;
6044  } else if (src_str->IsTwoByteRepresentation()) {
6045    return ExternalTwoByteString::cast(src)->resource() != NULL;
6046  }
6047  return true;
6048}
6049
6050
6051void SharedFunctionInfo::DontAdaptArguments() {
6052  DCHECK(code()->kind() == Code::BUILTIN || code()->kind() == Code::STUB);
6053  set_internal_formal_parameter_count(kDontAdaptArgumentsSentinel);
6054}
6055
6056
6057int SharedFunctionInfo::start_position() const {
6058  return start_position_and_type() >> kStartPositionShift;
6059}
6060
6061
6062void SharedFunctionInfo::set_start_position(int start_position) {
6063  set_start_position_and_type((start_position << kStartPositionShift)
6064    | (start_position_and_type() & ~kStartPositionMask));
6065}
6066
6067
6068Code* SharedFunctionInfo::code() const {
6069  return Code::cast(READ_FIELD(this, kCodeOffset));
6070}
6071
6072
6073void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
6074  DCHECK(value->kind() != Code::OPTIMIZED_FUNCTION);
6075  // If the SharedFunctionInfo has bytecode we should never mark it for lazy
6076  // compile, since the bytecode is never flushed.
6077  DCHECK(value != GetIsolate()->builtins()->builtin(Builtins::kCompileLazy) ||
6078         !HasBytecodeArray());
6079  WRITE_FIELD(this, kCodeOffset, value);
6080  CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
6081}
6082
6083
6084void SharedFunctionInfo::ReplaceCode(Code* value) {
6085  // If the GC metadata field is already used then the function was
6086  // enqueued as a code flushing candidate and we remove it now.
6087  if (code()->gc_metadata() != NULL) {
6088    CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
6089    flusher->EvictCandidate(this);
6090  }
6091
6092  DCHECK(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
6093#ifdef DEBUG
6094  Code::VerifyRecompiledCode(code(), value);
6095#endif  // DEBUG
6096
6097  set_code(value);
6098}
6099
6100bool SharedFunctionInfo::IsInterpreted() const {
6101  return code()->is_interpreter_trampoline_builtin();
6102}
6103
6104bool SharedFunctionInfo::HasBaselineCode() const {
6105  return code()->kind() == Code::FUNCTION;
6106}
6107
6108ScopeInfo* SharedFunctionInfo::scope_info() const {
6109  return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
6110}
6111
6112
6113void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
6114                                        WriteBarrierMode mode) {
6115  WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
6116  CONDITIONAL_WRITE_BARRIER(GetHeap(),
6117                            this,
6118                            kScopeInfoOffset,
6119                            reinterpret_cast<Object*>(value),
6120                            mode);
6121}
6122
6123ACCESSORS(SharedFunctionInfo, outer_scope_info, HeapObject,
6124          kOuterScopeInfoOffset)
6125
6126bool SharedFunctionInfo::is_compiled() const {
6127  Builtins* builtins = GetIsolate()->builtins();
6128  DCHECK(code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent));
6129  DCHECK(code() != builtins->builtin(Builtins::kCompileOptimized));
6130  DCHECK(code() != builtins->builtin(Builtins::kCompileBaseline));
6131  return code() != builtins->builtin(Builtins::kCompileLazy);
6132}
6133
6134
6135bool SharedFunctionInfo::has_simple_parameters() {
6136  return scope_info()->HasSimpleParameters();
6137}
6138
6139bool SharedFunctionInfo::HasDebugInfo() const {
6140  bool has_debug_info = !debug_info()->IsSmi();
6141  DCHECK_EQ(debug_info()->IsStruct(), has_debug_info);
6142  DCHECK(!has_debug_info || HasDebugCode());
6143  return has_debug_info;
6144}
6145
6146DebugInfo* SharedFunctionInfo::GetDebugInfo() const {
6147  DCHECK(HasDebugInfo());
6148  return DebugInfo::cast(debug_info());
6149}
6150
6151bool SharedFunctionInfo::HasDebugCode() const {
6152  if (HasBaselineCode()) return code()->has_debug_break_slots();
6153  return HasBytecodeArray();
6154}
6155
6156int SharedFunctionInfo::debugger_hints() const {
6157  if (HasDebugInfo()) return GetDebugInfo()->debugger_hints();
6158  return Smi::cast(debug_info())->value();
6159}
6160
6161void SharedFunctionInfo::set_debugger_hints(int value) {
6162  if (HasDebugInfo()) {
6163    GetDebugInfo()->set_debugger_hints(value);
6164  } else {
6165    set_debug_info(Smi::FromInt(value));
6166  }
6167}
6168
6169bool SharedFunctionInfo::IsApiFunction() {
6170  return function_data()->IsFunctionTemplateInfo();
6171}
6172
6173
6174FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
6175  DCHECK(IsApiFunction());
6176  return FunctionTemplateInfo::cast(function_data());
6177}
6178
6179void SharedFunctionInfo::set_api_func_data(FunctionTemplateInfo* data) {
6180  DCHECK(function_data()->IsUndefined(GetIsolate()));
6181  set_function_data(data);
6182}
6183
6184bool SharedFunctionInfo::HasBytecodeArray() const {
6185  return function_data()->IsBytecodeArray();
6186}
6187
6188BytecodeArray* SharedFunctionInfo::bytecode_array() const {
6189  DCHECK(HasBytecodeArray());
6190  return BytecodeArray::cast(function_data());
6191}
6192
6193void SharedFunctionInfo::set_bytecode_array(BytecodeArray* bytecode) {
6194  DCHECK(function_data()->IsUndefined(GetIsolate()));
6195  set_function_data(bytecode);
6196}
6197
6198void SharedFunctionInfo::ClearBytecodeArray() {
6199  DCHECK(function_data()->IsUndefined(GetIsolate()) || HasBytecodeArray());
6200  set_function_data(GetHeap()->undefined_value());
6201}
6202
6203bool SharedFunctionInfo::HasAsmWasmData() const {
6204  return function_data()->IsFixedArray();
6205}
6206
6207FixedArray* SharedFunctionInfo::asm_wasm_data() const {
6208  DCHECK(HasAsmWasmData());
6209  return FixedArray::cast(function_data());
6210}
6211
6212void SharedFunctionInfo::set_asm_wasm_data(FixedArray* data) {
6213  DCHECK(function_data()->IsUndefined(GetIsolate()) || HasAsmWasmData());
6214  set_function_data(data);
6215}
6216
6217void SharedFunctionInfo::ClearAsmWasmData() {
6218  DCHECK(function_data()->IsUndefined(GetIsolate()) || HasAsmWasmData());
6219  set_function_data(GetHeap()->undefined_value());
6220}
6221
6222bool SharedFunctionInfo::HasBuiltinFunctionId() {
6223  return function_identifier()->IsSmi();
6224}
6225
6226BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
6227  DCHECK(HasBuiltinFunctionId());
6228  return static_cast<BuiltinFunctionId>(
6229      Smi::cast(function_identifier())->value());
6230}
6231
6232void SharedFunctionInfo::set_builtin_function_id(BuiltinFunctionId id) {
6233  set_function_identifier(Smi::FromInt(id));
6234}
6235
6236bool SharedFunctionInfo::HasInferredName() {
6237  return function_identifier()->IsString();
6238}
6239
6240String* SharedFunctionInfo::inferred_name() {
6241  if (HasInferredName()) {
6242    return String::cast(function_identifier());
6243  }
6244  Isolate* isolate = GetIsolate();
6245  DCHECK(function_identifier()->IsUndefined(isolate) || HasBuiltinFunctionId());
6246  return isolate->heap()->empty_string();
6247}
6248
6249void SharedFunctionInfo::set_inferred_name(String* inferred_name) {
6250  DCHECK(function_identifier()->IsUndefined(GetIsolate()) || HasInferredName());
6251  set_function_identifier(inferred_name);
6252}
6253
6254int SharedFunctionInfo::ic_age() {
6255  return ICAgeBits::decode(counters());
6256}
6257
6258
6259void SharedFunctionInfo::set_ic_age(int ic_age) {
6260  set_counters(ICAgeBits::update(counters(), ic_age));
6261}
6262
6263
6264int SharedFunctionInfo::deopt_count() {
6265  return DeoptCountBits::decode(counters());
6266}
6267
6268
6269void SharedFunctionInfo::set_deopt_count(int deopt_count) {
6270  set_counters(DeoptCountBits::update(counters(), deopt_count));
6271}
6272
6273
6274void SharedFunctionInfo::increment_deopt_count() {
6275  int value = counters();
6276  int deopt_count = DeoptCountBits::decode(value);
6277  deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
6278  set_counters(DeoptCountBits::update(value, deopt_count));
6279}
6280
6281
6282int SharedFunctionInfo::opt_reenable_tries() {
6283  return OptReenableTriesBits::decode(counters());
6284}
6285
6286
6287void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
6288  set_counters(OptReenableTriesBits::update(counters(), tries));
6289}
6290
6291
6292int SharedFunctionInfo::opt_count() {
6293  return OptCountBits::decode(opt_count_and_bailout_reason());
6294}
6295
6296
6297void SharedFunctionInfo::set_opt_count(int opt_count) {
6298  set_opt_count_and_bailout_reason(
6299      OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
6300}
6301
6302
6303BailoutReason SharedFunctionInfo::disable_optimization_reason() {
6304  return static_cast<BailoutReason>(
6305      DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
6306}
6307
6308
6309bool SharedFunctionInfo::has_deoptimization_support() {
6310  Code* code = this->code();
6311  return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
6312}
6313
6314
6315void SharedFunctionInfo::TryReenableOptimization() {
6316  int tries = opt_reenable_tries();
6317  set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
6318  // We reenable optimization whenever the number of tries is a large
6319  // enough power of 2.
6320  if (tries >= 16 && (((tries - 1) & tries) == 0)) {
6321    set_optimization_disabled(false);
6322    set_opt_count(0);
6323    set_deopt_count(0);
6324  }
6325}
6326
6327
6328void SharedFunctionInfo::set_disable_optimization_reason(BailoutReason reason) {
6329  set_opt_count_and_bailout_reason(DisabledOptimizationReasonBits::update(
6330      opt_count_and_bailout_reason(), reason));
6331}
6332
6333bool SharedFunctionInfo::IsUserJavaScript() {
6334  Object* script_obj = script();
6335  if (script_obj->IsUndefined(GetIsolate())) return false;
6336  Script* script = Script::cast(script_obj);
6337  return static_cast<Script::Type>(script->type()) == Script::TYPE_NORMAL;
6338}
6339
6340bool SharedFunctionInfo::IsSubjectToDebugging() {
6341  return IsUserJavaScript() && !HasAsmWasmData();
6342}
6343
6344bool SharedFunctionInfo::OptimizedCodeMapIsCleared() const {
6345  return optimized_code_map() == GetHeap()->empty_fixed_array();
6346}
6347
6348FeedbackVector* JSFunction::feedback_vector() const {
6349  DCHECK(feedback_vector_cell()->value()->IsFeedbackVector());
6350  return FeedbackVector::cast(feedback_vector_cell()->value());
6351}
6352
6353bool JSFunction::IsOptimized() {
6354  return code()->kind() == Code::OPTIMIZED_FUNCTION;
6355}
6356
6357bool JSFunction::IsInterpreted() {
6358  return code()->is_interpreter_trampoline_builtin();
6359}
6360
6361bool JSFunction::IsMarkedForBaseline() {
6362  return code() ==
6363         GetIsolate()->builtins()->builtin(Builtins::kCompileBaseline);
6364}
6365
6366bool JSFunction::IsMarkedForOptimization() {
6367  return code() == GetIsolate()->builtins()->builtin(
6368      Builtins::kCompileOptimized);
6369}
6370
6371
6372bool JSFunction::IsMarkedForConcurrentOptimization() {
6373  return code() == GetIsolate()->builtins()->builtin(
6374      Builtins::kCompileOptimizedConcurrent);
6375}
6376
6377
6378bool JSFunction::IsInOptimizationQueue() {
6379  return code() == GetIsolate()->builtins()->builtin(
6380      Builtins::kInOptimizationQueue);
6381}
6382
6383
6384void JSFunction::CompleteInobjectSlackTrackingIfActive() {
6385  if (has_initial_map() && initial_map()->IsInobjectSlackTrackingInProgress()) {
6386    initial_map()->CompleteInobjectSlackTracking();
6387  }
6388}
6389
6390
6391bool Map::IsInobjectSlackTrackingInProgress() {
6392  return construction_counter() != Map::kNoSlackTracking;
6393}
6394
6395
6396void Map::InobjectSlackTrackingStep() {
6397  if (!IsInobjectSlackTrackingInProgress()) return;
6398  int counter = construction_counter();
6399  set_construction_counter(counter - 1);
6400  if (counter == kSlackTrackingCounterEnd) {
6401    CompleteInobjectSlackTracking();
6402  }
6403}
6404
6405AbstractCode* JSFunction::abstract_code() {
6406  if (IsInterpreted()) {
6407    return AbstractCode::cast(shared()->bytecode_array());
6408  } else {
6409    return AbstractCode::cast(code());
6410  }
6411}
6412
6413Code* JSFunction::code() {
6414  return Code::cast(
6415      Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
6416}
6417
6418
6419void JSFunction::set_code(Code* value) {
6420  DCHECK(!GetHeap()->InNewSpace(value));
6421  Address entry = value->entry();
6422  WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6423  GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
6424      this,
6425      HeapObject::RawField(this, kCodeEntryOffset),
6426      value);
6427}
6428
6429
6430void JSFunction::set_code_no_write_barrier(Code* value) {
6431  DCHECK(!GetHeap()->InNewSpace(value));
6432  Address entry = value->entry();
6433  WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6434}
6435
6436
6437void JSFunction::ReplaceCode(Code* code) {
6438  bool was_optimized = IsOptimized();
6439  bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
6440
6441  if (was_optimized && is_optimized) {
6442    shared()->EvictFromOptimizedCodeMap(this->code(),
6443        "Replacing with another optimized code");
6444  }
6445
6446  set_code(code);
6447
6448  // Add/remove the function from the list of optimized functions for this
6449  // context based on the state change.
6450  if (!was_optimized && is_optimized) {
6451    context()->native_context()->AddOptimizedFunction(this);
6452  }
6453  if (was_optimized && !is_optimized) {
6454    // TODO(titzer): linear in the number of optimized functions; fix!
6455    context()->native_context()->RemoveOptimizedFunction(this);
6456  }
6457}
6458
6459bool JSFunction::has_feedback_vector() const {
6460  return !feedback_vector_cell()->value()->IsUndefined(GetIsolate());
6461}
6462
6463JSFunction::FeedbackVectorState JSFunction::GetFeedbackVectorState(
6464    Isolate* isolate) const {
6465  Cell* cell = feedback_vector_cell();
6466  if (cell == isolate->heap()->undefined_cell()) {
6467    return TOP_LEVEL_SCRIPT_NEEDS_VECTOR;
6468  } else if (cell->value() == isolate->heap()->undefined_value() ||
6469             !has_feedback_vector()) {
6470    return NEEDS_VECTOR;
6471  }
6472  return HAS_VECTOR;
6473}
6474
6475Context* JSFunction::context() {
6476  return Context::cast(READ_FIELD(this, kContextOffset));
6477}
6478
6479bool JSFunction::has_context() const {
6480  return READ_FIELD(this, kContextOffset)->IsContext();
6481}
6482
6483JSObject* JSFunction::global_proxy() {
6484  return context()->global_proxy();
6485}
6486
6487
6488Context* JSFunction::native_context() { return context()->native_context(); }
6489
6490
6491void JSFunction::set_context(Object* value) {
6492  DCHECK(value->IsUndefined(GetIsolate()) || value->IsContext());
6493  WRITE_FIELD(this, kContextOffset, value);
6494  WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
6495}
6496
6497ACCESSORS(JSFunction, prototype_or_initial_map, Object,
6498          kPrototypeOrInitialMapOffset)
6499
6500
6501Map* JSFunction::initial_map() {
6502  return Map::cast(prototype_or_initial_map());
6503}
6504
6505
6506bool JSFunction::has_initial_map() {
6507  return prototype_or_initial_map()->IsMap();
6508}
6509
6510
6511bool JSFunction::has_instance_prototype() {
6512  return has_initial_map() ||
6513         !prototype_or_initial_map()->IsTheHole(GetIsolate());
6514}
6515
6516
6517bool JSFunction::has_prototype() {
6518  return map()->has_non_instance_prototype() || has_instance_prototype();
6519}
6520
6521
6522Object* JSFunction::instance_prototype() {
6523  DCHECK(has_instance_prototype());
6524  if (has_initial_map()) return initial_map()->prototype();
6525  // When there is no initial map and the prototype is a JSObject, the
6526  // initial map field is used for the prototype field.
6527  return prototype_or_initial_map();
6528}
6529
6530
6531Object* JSFunction::prototype() {
6532  DCHECK(has_prototype());
6533  // If the function's prototype property has been set to a non-JSObject
6534  // value, that value is stored in the constructor field of the map.
6535  if (map()->has_non_instance_prototype()) {
6536    Object* prototype = map()->GetConstructor();
6537    // The map must have a prototype in that field, not a back pointer.
6538    DCHECK(!prototype->IsMap());
6539    return prototype;
6540  }
6541  return instance_prototype();
6542}
6543
6544
6545bool JSFunction::is_compiled() {
6546  Builtins* builtins = GetIsolate()->builtins();
6547  return code() != builtins->builtin(Builtins::kCompileLazy) &&
6548         code() != builtins->builtin(Builtins::kCompileBaseline) &&
6549         code() != builtins->builtin(Builtins::kCompileOptimized) &&
6550         code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent);
6551}
6552
6553ACCESSORS(JSProxy, target, JSReceiver, kTargetOffset)
6554ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
6555ACCESSORS(JSProxy, hash, Object, kHashOffset)
6556
6557bool JSProxy::IsRevoked() const { return !handler()->IsJSReceiver(); }
6558
6559ACCESSORS(JSCollection, table, Object, kTableOffset)
6560
6561
6562#define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset)    \
6563  template<class Derived, class TableType>                           \
6564  type* OrderedHashTableIterator<Derived, TableType>::name() const { \
6565    return type::cast(READ_FIELD(this, offset));                     \
6566  }                                                                  \
6567  template<class Derived, class TableType>                           \
6568  void OrderedHashTableIterator<Derived, TableType>::set_##name(     \
6569      type* value, WriteBarrierMode mode) {                          \
6570    WRITE_FIELD(this, offset, value);                                \
6571    CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
6572  }
6573
6574ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
6575ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Object, kIndexOffset)
6576ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Object, kKindOffset)
6577
6578#undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
6579
6580
6581ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
6582ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
6583
6584
6585Address Foreign::foreign_address() {
6586  return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
6587}
6588
6589
6590void Foreign::set_foreign_address(Address value) {
6591  WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
6592}
6593
6594
6595ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
6596ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
6597ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
6598ACCESSORS(JSGeneratorObject, input_or_debug_pos, Object, kInputOrDebugPosOffset)
6599SMI_ACCESSORS(JSGeneratorObject, resume_mode, kResumeModeOffset)
6600SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
6601ACCESSORS(JSGeneratorObject, register_file, FixedArray, kRegisterFileOffset)
6602
6603bool JSGeneratorObject::is_suspended() const {
6604  DCHECK_LT(kGeneratorExecuting, 0);
6605  DCHECK_LT(kGeneratorClosed, 0);
6606  return continuation() >= 0;
6607}
6608
6609bool JSGeneratorObject::is_closed() const {
6610  return continuation() == kGeneratorClosed;
6611}
6612
6613bool JSGeneratorObject::is_executing() const {
6614  return continuation() == kGeneratorExecuting;
6615}
6616
6617ACCESSORS(JSValue, value, Object, kValueOffset)
6618
6619
6620HeapNumber* HeapNumber::cast(Object* object) {
6621  SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6622  return reinterpret_cast<HeapNumber*>(object);
6623}
6624
6625
6626const HeapNumber* HeapNumber::cast(const Object* object) {
6627  SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6628  return reinterpret_cast<const HeapNumber*>(object);
6629}
6630
6631
6632ACCESSORS(JSDate, value, Object, kValueOffset)
6633ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
6634ACCESSORS(JSDate, year, Object, kYearOffset)
6635ACCESSORS(JSDate, month, Object, kMonthOffset)
6636ACCESSORS(JSDate, day, Object, kDayOffset)
6637ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
6638ACCESSORS(JSDate, hour, Object, kHourOffset)
6639ACCESSORS(JSDate, min, Object, kMinOffset)
6640ACCESSORS(JSDate, sec, Object, kSecOffset)
6641
6642
6643SMI_ACCESSORS(JSMessageObject, type, kTypeOffset)
6644ACCESSORS(JSMessageObject, argument, Object, kArgumentsOffset)
6645ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
6646ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
6647SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
6648SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
6649SMI_ACCESSORS(JSMessageObject, error_level, kErrorLevelOffset)
6650
6651INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
6652INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
6653INT_ACCESSORS(Code, constant_pool_offset, kConstantPoolOffset)
6654#define CODE_ACCESSORS(name, type, offset)           \
6655  ACCESSORS_CHECKED2(Code, name, type, offset, true, \
6656                     !GetHeap()->InNewSpace(value))
6657CODE_ACCESSORS(relocation_info, ByteArray, kRelocationInfoOffset)
6658CODE_ACCESSORS(handler_table, FixedArray, kHandlerTableOffset)
6659CODE_ACCESSORS(deoptimization_data, FixedArray, kDeoptimizationDataOffset)
6660CODE_ACCESSORS(source_position_table, ByteArray, kSourcePositionTableOffset)
6661CODE_ACCESSORS(raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
6662CODE_ACCESSORS(next_code_link, Object, kNextCodeLinkOffset)
6663#undef CODE_ACCESSORS
6664
6665void Code::WipeOutHeader() {
6666  WRITE_FIELD(this, kRelocationInfoOffset, NULL);
6667  WRITE_FIELD(this, kHandlerTableOffset, NULL);
6668  WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
6669  WRITE_FIELD(this, kSourcePositionTableOffset, NULL);
6670  // Do not wipe out major/minor keys on a code stub or IC
6671  if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
6672    WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
6673  }
6674  WRITE_FIELD(this, kNextCodeLinkOffset, NULL);
6675  WRITE_FIELD(this, kGCMetadataOffset, NULL);
6676}
6677
6678
6679Object* Code::type_feedback_info() {
6680  DCHECK(kind() == FUNCTION);
6681  return raw_type_feedback_info();
6682}
6683
6684
6685void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
6686  DCHECK(kind() == FUNCTION);
6687  set_raw_type_feedback_info(value, mode);
6688  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
6689                            value, mode);
6690}
6691
6692
6693uint32_t Code::stub_key() {
6694  DCHECK(IsCodeStubOrIC());
6695  Smi* smi_key = Smi::cast(raw_type_feedback_info());
6696  return static_cast<uint32_t>(smi_key->value());
6697}
6698
6699
6700void Code::set_stub_key(uint32_t key) {
6701  DCHECK(IsCodeStubOrIC());
6702  set_raw_type_feedback_info(Smi::FromInt(key));
6703}
6704
6705
6706ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
6707INT_ACCESSORS(Code, ic_age, kICAgeOffset)
6708
6709
6710byte* Code::instruction_start()  {
6711  return FIELD_ADDR(this, kHeaderSize);
6712}
6713
6714
6715byte* Code::instruction_end()  {
6716  return instruction_start() + instruction_size();
6717}
6718
6719int Code::GetUnwindingInfoSizeOffset() const {
6720  DCHECK(has_unwinding_info());
6721  return RoundUp(kHeaderSize + instruction_size(), kInt64Size);
6722}
6723
6724int Code::unwinding_info_size() const {
6725  DCHECK(has_unwinding_info());
6726  return static_cast<int>(
6727      READ_UINT64_FIELD(this, GetUnwindingInfoSizeOffset()));
6728}
6729
6730void Code::set_unwinding_info_size(int value) {
6731  DCHECK(has_unwinding_info());
6732  WRITE_UINT64_FIELD(this, GetUnwindingInfoSizeOffset(), value);
6733}
6734
6735byte* Code::unwinding_info_start() {
6736  DCHECK(has_unwinding_info());
6737  return FIELD_ADDR(this, GetUnwindingInfoSizeOffset()) + kInt64Size;
6738}
6739
6740byte* Code::unwinding_info_end() {
6741  DCHECK(has_unwinding_info());
6742  return unwinding_info_start() + unwinding_info_size();
6743}
6744
6745int Code::body_size() {
6746  int unpadded_body_size =
6747      has_unwinding_info()
6748          ? static_cast<int>(unwinding_info_end() - instruction_start())
6749          : instruction_size();
6750  return RoundUp(unpadded_body_size, kObjectAlignment);
6751}
6752
6753int Code::SizeIncludingMetadata() {
6754  int size = CodeSize();
6755  size += relocation_info()->Size();
6756  size += deoptimization_data()->Size();
6757  size += handler_table()->Size();
6758  if (kind() == FUNCTION) size += source_position_table()->Size();
6759  return size;
6760}
6761
6762ByteArray* Code::unchecked_relocation_info() {
6763  return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
6764}
6765
6766
6767byte* Code::relocation_start() {
6768  return unchecked_relocation_info()->GetDataStartAddress();
6769}
6770
6771
6772int Code::relocation_size() {
6773  return unchecked_relocation_info()->length();
6774}
6775
6776
6777byte* Code::entry() {
6778  return instruction_start();
6779}
6780
6781
6782bool Code::contains(byte* inner_pointer) {
6783  return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
6784}
6785
6786
6787int Code::ExecutableSize() {
6788  // Check that the assumptions about the layout of the code object holds.
6789  DCHECK_EQ(static_cast<int>(instruction_start() - address()),
6790            Code::kHeaderSize);
6791  return instruction_size() + Code::kHeaderSize;
6792}
6793
6794
6795int Code::CodeSize() { return SizeFor(body_size()); }
6796
6797
6798ACCESSORS(JSArray, length, Object, kLengthOffset)
6799
6800
6801void* JSArrayBuffer::backing_store() const {
6802  intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
6803  return reinterpret_cast<void*>(ptr);
6804}
6805
6806
6807void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6808  intptr_t ptr = reinterpret_cast<intptr_t>(value);
6809  WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6810}
6811
6812
6813ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6814
6815
6816void JSArrayBuffer::set_bit_field(uint32_t bits) {
6817  if (kInt32Size != kPointerSize) {
6818#if V8_TARGET_LITTLE_ENDIAN
6819    WRITE_UINT32_FIELD(this, kBitFieldSlot + kInt32Size, 0);
6820#else
6821    WRITE_UINT32_FIELD(this, kBitFieldSlot, 0);
6822#endif
6823  }
6824  WRITE_UINT32_FIELD(this, kBitFieldOffset, bits);
6825}
6826
6827
6828uint32_t JSArrayBuffer::bit_field() const {
6829  return READ_UINT32_FIELD(this, kBitFieldOffset);
6830}
6831
6832
6833bool JSArrayBuffer::is_external() { return IsExternal::decode(bit_field()); }
6834
6835
6836void JSArrayBuffer::set_is_external(bool value) {
6837  DCHECK(!value || !has_guard_region());
6838  set_bit_field(IsExternal::update(bit_field(), value));
6839}
6840
6841
6842bool JSArrayBuffer::is_neuterable() {
6843  return IsNeuterable::decode(bit_field());
6844}
6845
6846
6847void JSArrayBuffer::set_is_neuterable(bool value) {
6848  set_bit_field(IsNeuterable::update(bit_field(), value));
6849}
6850
6851
6852bool JSArrayBuffer::was_neutered() { return WasNeutered::decode(bit_field()); }
6853
6854
6855void JSArrayBuffer::set_was_neutered(bool value) {
6856  set_bit_field(WasNeutered::update(bit_field(), value));
6857}
6858
6859
6860bool JSArrayBuffer::is_shared() { return IsShared::decode(bit_field()); }
6861
6862
6863void JSArrayBuffer::set_is_shared(bool value) {
6864  set_bit_field(IsShared::update(bit_field(), value));
6865}
6866
6867bool JSArrayBuffer::has_guard_region() {
6868  return HasGuardRegion::decode(bit_field());
6869}
6870
6871void JSArrayBuffer::set_has_guard_region(bool value) {
6872  set_bit_field(HasGuardRegion::update(bit_field(), value));
6873}
6874
6875Object* JSArrayBufferView::byte_offset() const {
6876  if (WasNeutered()) return Smi::kZero;
6877  return Object::cast(READ_FIELD(this, kByteOffsetOffset));
6878}
6879
6880
6881void JSArrayBufferView::set_byte_offset(Object* value, WriteBarrierMode mode) {
6882  WRITE_FIELD(this, kByteOffsetOffset, value);
6883  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteOffsetOffset, value, mode);
6884}
6885
6886
6887Object* JSArrayBufferView::byte_length() const {
6888  if (WasNeutered()) return Smi::kZero;
6889  return Object::cast(READ_FIELD(this, kByteLengthOffset));
6890}
6891
6892
6893void JSArrayBufferView::set_byte_length(Object* value, WriteBarrierMode mode) {
6894  WRITE_FIELD(this, kByteLengthOffset, value);
6895  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteLengthOffset, value, mode);
6896}
6897
6898
6899ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
6900#ifdef VERIFY_HEAP
6901ACCESSORS(JSArrayBufferView, raw_byte_offset, Object, kByteOffsetOffset)
6902ACCESSORS(JSArrayBufferView, raw_byte_length, Object, kByteLengthOffset)
6903#endif
6904
6905
6906bool JSArrayBufferView::WasNeutered() const {
6907  return JSArrayBuffer::cast(buffer())->was_neutered();
6908}
6909
6910
6911Object* JSTypedArray::length() const {
6912  if (WasNeutered()) return Smi::kZero;
6913  return Object::cast(READ_FIELD(this, kLengthOffset));
6914}
6915
6916
6917uint32_t JSTypedArray::length_value() const {
6918  if (WasNeutered()) return 0;
6919  uint32_t index = 0;
6920  CHECK(Object::cast(READ_FIELD(this, kLengthOffset))->ToArrayLength(&index));
6921  return index;
6922}
6923
6924
6925void JSTypedArray::set_length(Object* value, WriteBarrierMode mode) {
6926  WRITE_FIELD(this, kLengthOffset, value);
6927  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kLengthOffset, value, mode);
6928}
6929
6930// static
6931MaybeHandle<JSTypedArray> JSTypedArray::Validate(Isolate* isolate,
6932                                                 Handle<Object> receiver,
6933                                                 const char* method_name) {
6934  if (V8_UNLIKELY(!receiver->IsJSTypedArray())) {
6935    const MessageTemplate::Template message = MessageTemplate::kNotTypedArray;
6936    THROW_NEW_ERROR(isolate, NewTypeError(message), JSTypedArray);
6937  }
6938
6939  // TODO(caitp): throw if array.[[ViewedArrayBuffer]] is neutered (per v8:4648)
6940  return Handle<JSTypedArray>::cast(receiver);
6941}
6942
6943#ifdef VERIFY_HEAP
6944ACCESSORS(JSTypedArray, raw_length, Object, kLengthOffset)
6945#endif
6946
6947ACCESSORS(JSPromiseCapability, promise, Object, kPromiseOffset)
6948ACCESSORS(JSPromiseCapability, resolve, Object, kResolveOffset)
6949ACCESSORS(JSPromiseCapability, reject, Object, kRejectOffset)
6950
6951SMI_ACCESSORS(JSPromise, status, kStatusOffset)
6952ACCESSORS(JSPromise, result, Object, kResultOffset)
6953ACCESSORS(JSPromise, deferred_promise, Object, kDeferredPromiseOffset)
6954ACCESSORS(JSPromise, deferred_on_resolve, Object, kDeferredOnResolveOffset)
6955ACCESSORS(JSPromise, deferred_on_reject, Object, kDeferredOnRejectOffset)
6956ACCESSORS(JSPromise, fulfill_reactions, Object, kFulfillReactionsOffset)
6957ACCESSORS(JSPromise, reject_reactions, Object, kRejectReactionsOffset)
6958SMI_ACCESSORS(JSPromise, flags, kFlagsOffset)
6959BOOL_ACCESSORS(JSPromise, flags, has_handler, kHasHandlerBit)
6960BOOL_ACCESSORS(JSPromise, flags, handled_hint, kHandledHintBit)
6961
6962ACCESSORS(JSRegExp, data, Object, kDataOffset)
6963ACCESSORS(JSRegExp, flags, Object, kFlagsOffset)
6964ACCESSORS(JSRegExp, source, Object, kSourceOffset)
6965
6966
6967JSRegExp::Type JSRegExp::TypeTag() {
6968  Object* data = this->data();
6969  if (data->IsUndefined(GetIsolate())) return JSRegExp::NOT_COMPILED;
6970  Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6971  return static_cast<JSRegExp::Type>(smi->value());
6972}
6973
6974
6975int JSRegExp::CaptureCount() {
6976  switch (TypeTag()) {
6977    case ATOM:
6978      return 0;
6979    case IRREGEXP:
6980      return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
6981    default:
6982      UNREACHABLE();
6983      return -1;
6984  }
6985}
6986
6987
6988JSRegExp::Flags JSRegExp::GetFlags() {
6989  DCHECK(this->data()->IsFixedArray());
6990  Object* data = this->data();
6991  Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
6992  return Flags(smi->value());
6993}
6994
6995
6996String* JSRegExp::Pattern() {
6997  DCHECK(this->data()->IsFixedArray());
6998  Object* data = this->data();
6999  String* pattern = String::cast(FixedArray::cast(data)->get(kSourceIndex));
7000  return pattern;
7001}
7002
7003
7004Object* JSRegExp::DataAt(int index) {
7005  DCHECK(TypeTag() != NOT_COMPILED);
7006  return FixedArray::cast(data())->get(index);
7007}
7008
7009
7010void JSRegExp::SetDataAt(int index, Object* value) {
7011  DCHECK(TypeTag() != NOT_COMPILED);
7012  DCHECK(index >= kDataIndex);  // Only implementation data can be set this way.
7013  FixedArray::cast(data())->set(index, value);
7014}
7015
7016void JSRegExp::SetLastIndex(int index) {
7017  static const int offset =
7018      kSize + JSRegExp::kLastIndexFieldIndex * kPointerSize;
7019  Smi* value = Smi::FromInt(index);
7020  WRITE_FIELD(this, offset, value);
7021}
7022
7023Object* JSRegExp::LastIndex() {
7024  static const int offset =
7025      kSize + JSRegExp::kLastIndexFieldIndex * kPointerSize;
7026  return READ_FIELD(this, offset);
7027}
7028
7029ElementsKind JSObject::GetElementsKind() {
7030  ElementsKind kind = map()->elements_kind();
7031#if VERIFY_HEAP && DEBUG
7032  FixedArrayBase* fixed_array =
7033      reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
7034
7035  // If a GC was caused while constructing this object, the elements
7036  // pointer may point to a one pointer filler map.
7037  if (ElementsAreSafeToExamine()) {
7038    Map* map = fixed_array->map();
7039    if (IsFastSmiOrObjectElementsKind(kind)) {
7040      DCHECK(map == GetHeap()->fixed_array_map() ||
7041             map == GetHeap()->fixed_cow_array_map());
7042    } else if (IsFastDoubleElementsKind(kind)) {
7043      DCHECK(fixed_array->IsFixedDoubleArray() ||
7044             fixed_array == GetHeap()->empty_fixed_array());
7045    } else if (kind == DICTIONARY_ELEMENTS) {
7046      DCHECK(fixed_array->IsFixedArray());
7047      DCHECK(fixed_array->IsDictionary());
7048    } else {
7049      DCHECK(kind > DICTIONARY_ELEMENTS);
7050    }
7051    DCHECK(!IsSloppyArgumentsElements(kind) ||
7052           (elements()->IsFixedArray() && elements()->length() >= 2));
7053  }
7054#endif
7055  return kind;
7056}
7057
7058
7059bool JSObject::HasFastObjectElements() {
7060  return IsFastObjectElementsKind(GetElementsKind());
7061}
7062
7063
7064bool JSObject::HasFastSmiElements() {
7065  return IsFastSmiElementsKind(GetElementsKind());
7066}
7067
7068
7069bool JSObject::HasFastSmiOrObjectElements() {
7070  return IsFastSmiOrObjectElementsKind(GetElementsKind());
7071}
7072
7073
7074bool JSObject::HasFastDoubleElements() {
7075  return IsFastDoubleElementsKind(GetElementsKind());
7076}
7077
7078
7079bool JSObject::HasFastHoleyElements() {
7080  return IsFastHoleyElementsKind(GetElementsKind());
7081}
7082
7083
7084bool JSObject::HasFastElements() {
7085  return IsFastElementsKind(GetElementsKind());
7086}
7087
7088
7089bool JSObject::HasDictionaryElements() {
7090  return GetElementsKind() == DICTIONARY_ELEMENTS;
7091}
7092
7093
7094bool JSObject::HasFastArgumentsElements() {
7095  return GetElementsKind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
7096}
7097
7098
7099bool JSObject::HasSlowArgumentsElements() {
7100  return GetElementsKind() == SLOW_SLOPPY_ARGUMENTS_ELEMENTS;
7101}
7102
7103
7104bool JSObject::HasSloppyArgumentsElements() {
7105  return IsSloppyArgumentsElements(GetElementsKind());
7106}
7107
7108bool JSObject::HasStringWrapperElements() {
7109  return IsStringWrapperElementsKind(GetElementsKind());
7110}
7111
7112bool JSObject::HasFastStringWrapperElements() {
7113  return GetElementsKind() == FAST_STRING_WRAPPER_ELEMENTS;
7114}
7115
7116bool JSObject::HasSlowStringWrapperElements() {
7117  return GetElementsKind() == SLOW_STRING_WRAPPER_ELEMENTS;
7118}
7119
7120bool JSObject::HasFixedTypedArrayElements() {
7121  DCHECK_NOT_NULL(elements());
7122  return map()->has_fixed_typed_array_elements();
7123}
7124
7125#define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size)      \
7126  bool JSObject::HasFixed##Type##Elements() {                          \
7127    HeapObject* array = elements();                                    \
7128    DCHECK(array != NULL);                                             \
7129    if (!array->IsHeapObject()) return false;                          \
7130    return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
7131  }
7132
7133TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
7134
7135#undef FIXED_TYPED_ELEMENTS_CHECK
7136
7137
7138bool JSObject::HasNamedInterceptor() {
7139  return map()->has_named_interceptor();
7140}
7141
7142
7143bool JSObject::HasIndexedInterceptor() {
7144  return map()->has_indexed_interceptor();
7145}
7146
7147
7148GlobalDictionary* JSObject::global_dictionary() {
7149  DCHECK(!HasFastProperties());
7150  DCHECK(IsJSGlobalObject());
7151  return GlobalDictionary::cast(properties());
7152}
7153
7154
7155SeededNumberDictionary* JSObject::element_dictionary() {
7156  DCHECK(HasDictionaryElements() || HasSlowStringWrapperElements());
7157  return SeededNumberDictionary::cast(elements());
7158}
7159
7160
7161bool Name::IsHashFieldComputed(uint32_t field) {
7162  return (field & kHashNotComputedMask) == 0;
7163}
7164
7165
7166bool Name::HasHashCode() {
7167  return IsHashFieldComputed(hash_field());
7168}
7169
7170
7171uint32_t Name::Hash() {
7172  // Fast case: has hash code already been computed?
7173  uint32_t field = hash_field();
7174  if (IsHashFieldComputed(field)) return field >> kHashShift;
7175  // Slow case: compute hash code and set it. Has to be a string.
7176  return String::cast(this)->ComputeAndSetHash();
7177}
7178
7179
7180bool Name::IsPrivate() {
7181  return this->IsSymbol() && Symbol::cast(this)->is_private();
7182}
7183
7184
7185StringHasher::StringHasher(int length, uint32_t seed)
7186  : length_(length),
7187    raw_running_hash_(seed),
7188    array_index_(0),
7189    is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
7190    is_first_char_(true) {
7191  DCHECK(FLAG_randomize_hashes || raw_running_hash_ == 0);
7192}
7193
7194
7195bool StringHasher::has_trivial_hash() {
7196  return length_ > String::kMaxHashCalcLength;
7197}
7198
7199
7200uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
7201  running_hash += c;
7202  running_hash += (running_hash << 10);
7203  running_hash ^= (running_hash >> 6);
7204  return running_hash;
7205}
7206
7207
7208uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
7209  running_hash += (running_hash << 3);
7210  running_hash ^= (running_hash >> 11);
7211  running_hash += (running_hash << 15);
7212  if ((running_hash & String::kHashBitMask) == 0) {
7213    return kZeroHash;
7214  }
7215  return running_hash;
7216}
7217
7218
7219uint32_t StringHasher::ComputeRunningHash(uint32_t running_hash,
7220                                          const uc16* chars, int length) {
7221  DCHECK_NOT_NULL(chars);
7222  DCHECK(length >= 0);
7223  for (int i = 0; i < length; ++i) {
7224    running_hash = AddCharacterCore(running_hash, *chars++);
7225  }
7226  return running_hash;
7227}
7228
7229
7230uint32_t StringHasher::ComputeRunningHashOneByte(uint32_t running_hash,
7231                                                 const char* chars,
7232                                                 int length) {
7233  DCHECK_NOT_NULL(chars);
7234  DCHECK(length >= 0);
7235  for (int i = 0; i < length; ++i) {
7236    uint16_t c = static_cast<uint16_t>(*chars++);
7237    running_hash = AddCharacterCore(running_hash, c);
7238  }
7239  return running_hash;
7240}
7241
7242
7243void StringHasher::AddCharacter(uint16_t c) {
7244  // Use the Jenkins one-at-a-time hash function to update the hash
7245  // for the given character.
7246  raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
7247}
7248
7249
7250bool StringHasher::UpdateIndex(uint16_t c) {
7251  DCHECK(is_array_index_);
7252  if (c < '0' || c > '9') {
7253    is_array_index_ = false;
7254    return false;
7255  }
7256  int d = c - '0';
7257  if (is_first_char_) {
7258    is_first_char_ = false;
7259    if (c == '0' && length_ > 1) {
7260      is_array_index_ = false;
7261      return false;
7262    }
7263  }
7264  if (array_index_ > 429496729U - ((d + 3) >> 3)) {
7265    is_array_index_ = false;
7266    return false;
7267  }
7268  array_index_ = array_index_ * 10 + d;
7269  return true;
7270}
7271
7272
7273template<typename Char>
7274inline void StringHasher::AddCharacters(const Char* chars, int length) {
7275  DCHECK(sizeof(Char) == 1 || sizeof(Char) == 2);
7276  int i = 0;
7277  if (is_array_index_) {
7278    for (; i < length; i++) {
7279      AddCharacter(chars[i]);
7280      if (!UpdateIndex(chars[i])) {
7281        i++;
7282        break;
7283      }
7284    }
7285  }
7286  for (; i < length; i++) {
7287    DCHECK(!is_array_index_);
7288    AddCharacter(chars[i]);
7289  }
7290}
7291
7292
7293template <typename schar>
7294uint32_t StringHasher::HashSequentialString(const schar* chars,
7295                                            int length,
7296                                            uint32_t seed) {
7297  StringHasher hasher(length, seed);
7298  if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
7299  return hasher.GetHashField();
7300}
7301
7302
7303IteratingStringHasher::IteratingStringHasher(int len, uint32_t seed)
7304    : StringHasher(len, seed) {}
7305
7306
7307uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) {
7308  IteratingStringHasher hasher(string->length(), seed);
7309  // Nothing to do.
7310  if (hasher.has_trivial_hash()) return hasher.GetHashField();
7311  ConsString* cons_string = String::VisitFlat(&hasher, string);
7312  if (cons_string == nullptr) return hasher.GetHashField();
7313  hasher.VisitConsString(cons_string);
7314  return hasher.GetHashField();
7315}
7316
7317
7318void IteratingStringHasher::VisitOneByteString(const uint8_t* chars,
7319                                               int length) {
7320  AddCharacters(chars, length);
7321}
7322
7323
7324void IteratingStringHasher::VisitTwoByteString(const uint16_t* chars,
7325                                               int length) {
7326  AddCharacters(chars, length);
7327}
7328
7329
7330bool Name::AsArrayIndex(uint32_t* index) {
7331  return IsString() && String::cast(this)->AsArrayIndex(index);
7332}
7333
7334
7335bool String::AsArrayIndex(uint32_t* index) {
7336  uint32_t field = hash_field();
7337  if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
7338    return false;
7339  }
7340  return SlowAsArrayIndex(index);
7341}
7342
7343
7344void String::SetForwardedInternalizedString(String* canonical) {
7345  DCHECK(IsInternalizedString());
7346  DCHECK(HasHashCode());
7347  if (canonical == this) return;  // No need to forward.
7348  DCHECK(SlowEquals(canonical));
7349  DCHECK(canonical->IsInternalizedString());
7350  DCHECK(canonical->HasHashCode());
7351  WRITE_FIELD(this, kHashFieldSlot, canonical);
7352  // Setting the hash field to a tagged value sets the LSB, causing the hash
7353  // code to be interpreted as uninitialized.  We use this fact to recognize
7354  // that we have a forwarded string.
7355  DCHECK(!HasHashCode());
7356}
7357
7358
7359String* String::GetForwardedInternalizedString() {
7360  DCHECK(IsInternalizedString());
7361  if (HasHashCode()) return this;
7362  String* canonical = String::cast(READ_FIELD(this, kHashFieldSlot));
7363  DCHECK(canonical->IsInternalizedString());
7364  DCHECK(SlowEquals(canonical));
7365  DCHECK(canonical->HasHashCode());
7366  return canonical;
7367}
7368
7369
7370// static
7371Maybe<bool> Object::GreaterThan(Handle<Object> x, Handle<Object> y) {
7372  Maybe<ComparisonResult> result = Compare(x, y);
7373  if (result.IsJust()) {
7374    switch (result.FromJust()) {
7375      case ComparisonResult::kGreaterThan:
7376        return Just(true);
7377      case ComparisonResult::kLessThan:
7378      case ComparisonResult::kEqual:
7379      case ComparisonResult::kUndefined:
7380        return Just(false);
7381    }
7382  }
7383  return Nothing<bool>();
7384}
7385
7386
7387// static
7388Maybe<bool> Object::GreaterThanOrEqual(Handle<Object> x, Handle<Object> y) {
7389  Maybe<ComparisonResult> result = Compare(x, y);
7390  if (result.IsJust()) {
7391    switch (result.FromJust()) {
7392      case ComparisonResult::kEqual:
7393      case ComparisonResult::kGreaterThan:
7394        return Just(true);
7395      case ComparisonResult::kLessThan:
7396      case ComparisonResult::kUndefined:
7397        return Just(false);
7398    }
7399  }
7400  return Nothing<bool>();
7401}
7402
7403
7404// static
7405Maybe<bool> Object::LessThan(Handle<Object> x, Handle<Object> y) {
7406  Maybe<ComparisonResult> result = Compare(x, y);
7407  if (result.IsJust()) {
7408    switch (result.FromJust()) {
7409      case ComparisonResult::kLessThan:
7410        return Just(true);
7411      case ComparisonResult::kEqual:
7412      case ComparisonResult::kGreaterThan:
7413      case ComparisonResult::kUndefined:
7414        return Just(false);
7415    }
7416  }
7417  return Nothing<bool>();
7418}
7419
7420
7421// static
7422Maybe<bool> Object::LessThanOrEqual(Handle<Object> x, Handle<Object> y) {
7423  Maybe<ComparisonResult> result = Compare(x, y);
7424  if (result.IsJust()) {
7425    switch (result.FromJust()) {
7426      case ComparisonResult::kEqual:
7427      case ComparisonResult::kLessThan:
7428        return Just(true);
7429      case ComparisonResult::kGreaterThan:
7430      case ComparisonResult::kUndefined:
7431        return Just(false);
7432    }
7433  }
7434  return Nothing<bool>();
7435}
7436
7437MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
7438                                                 Handle<Name> name) {
7439  LookupIterator it =
7440      LookupIterator::PropertyOrElement(name->GetIsolate(), object, name);
7441  return GetProperty(&it);
7442}
7443
7444MaybeHandle<Object> Object::SetPropertyOrElement(Handle<Object> object,
7445                                                 Handle<Name> name,
7446                                                 Handle<Object> value,
7447                                                 LanguageMode language_mode,
7448                                                 StoreFromKeyed store_mode) {
7449  LookupIterator it =
7450      LookupIterator::PropertyOrElement(name->GetIsolate(), object, name);
7451  MAYBE_RETURN_NULL(SetProperty(&it, value, language_mode, store_mode));
7452  return value;
7453}
7454
7455MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> receiver,
7456                                                 Handle<Name> name,
7457                                                 Handle<JSReceiver> holder) {
7458  LookupIterator it = LookupIterator::PropertyOrElement(
7459      name->GetIsolate(), receiver, name, holder);
7460  return GetProperty(&it);
7461}
7462
7463
7464void JSReceiver::initialize_properties() {
7465  DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
7466  DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_properties_dictionary()));
7467  if (map()->is_dictionary_map()) {
7468    WRITE_FIELD(this, kPropertiesOffset,
7469                GetHeap()->empty_properties_dictionary());
7470  } else {
7471    WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
7472  }
7473}
7474
7475
7476bool JSReceiver::HasFastProperties() {
7477  DCHECK_EQ(properties()->IsDictionary(), map()->is_dictionary_map());
7478  return !properties()->IsDictionary();
7479}
7480
7481
7482NameDictionary* JSReceiver::property_dictionary() {
7483  DCHECK(!HasFastProperties());
7484  DCHECK(!IsJSGlobalObject());
7485  return NameDictionary::cast(properties());
7486}
7487
7488Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object,
7489                                    Handle<Name> name) {
7490  LookupIterator it = LookupIterator::PropertyOrElement(object->GetIsolate(),
7491                                                        object, name, object);
7492  return HasProperty(&it);
7493}
7494
7495
7496Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
7497                                       Handle<Name> name) {
7498  if (object->IsJSObject()) {  // Shortcut
7499    LookupIterator it = LookupIterator::PropertyOrElement(
7500        object->GetIsolate(), object, name, object, LookupIterator::OWN);
7501    return HasProperty(&it);
7502  }
7503
7504  Maybe<PropertyAttributes> attributes =
7505      JSReceiver::GetOwnPropertyAttributes(object, name);
7506  MAYBE_RETURN(attributes, Nothing<bool>());
7507  return Just(attributes.FromJust() != ABSENT);
7508}
7509
7510Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
7511                                       uint32_t index) {
7512  if (object->IsJSObject()) {  // Shortcut
7513    LookupIterator it(object->GetIsolate(), object, index, object,
7514                      LookupIterator::OWN);
7515    return HasProperty(&it);
7516  }
7517
7518  Maybe<PropertyAttributes> attributes =
7519      JSReceiver::GetOwnPropertyAttributes(object, index);
7520  MAYBE_RETURN(attributes, Nothing<bool>());
7521  return Just(attributes.FromJust() != ABSENT);
7522}
7523
7524Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
7525    Handle<JSReceiver> object, Handle<Name> name) {
7526  LookupIterator it = LookupIterator::PropertyOrElement(name->GetIsolate(),
7527                                                        object, name, object);
7528  return GetPropertyAttributes(&it);
7529}
7530
7531
7532Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes(
7533    Handle<JSReceiver> object, Handle<Name> name) {
7534  LookupIterator it = LookupIterator::PropertyOrElement(
7535      name->GetIsolate(), object, name, object, LookupIterator::OWN);
7536  return GetPropertyAttributes(&it);
7537}
7538
7539Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes(
7540    Handle<JSReceiver> object, uint32_t index) {
7541  LookupIterator it(object->GetIsolate(), object, index, object,
7542                    LookupIterator::OWN);
7543  return GetPropertyAttributes(&it);
7544}
7545
7546Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
7547  LookupIterator it(object->GetIsolate(), object, index, object);
7548  return HasProperty(&it);
7549}
7550
7551
7552Maybe<PropertyAttributes> JSReceiver::GetElementAttributes(
7553    Handle<JSReceiver> object, uint32_t index) {
7554  Isolate* isolate = object->GetIsolate();
7555  LookupIterator it(isolate, object, index, object);
7556  return GetPropertyAttributes(&it);
7557}
7558
7559
7560Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttributes(
7561    Handle<JSReceiver> object, uint32_t index) {
7562  Isolate* isolate = object->GetIsolate();
7563  LookupIterator it(isolate, object, index, object, LookupIterator::OWN);
7564  return GetPropertyAttributes(&it);
7565}
7566
7567
7568bool JSGlobalObject::IsDetached() {
7569  return JSGlobalProxy::cast(global_proxy())->IsDetachedFrom(this);
7570}
7571
7572
7573bool JSGlobalProxy::IsDetachedFrom(JSGlobalObject* global) const {
7574  const PrototypeIterator iter(this->GetIsolate(),
7575                               const_cast<JSGlobalProxy*>(this));
7576  return iter.GetCurrent() != global;
7577}
7578
7579inline int JSGlobalProxy::SizeWithInternalFields(int internal_field_count) {
7580  DCHECK_GE(internal_field_count, 0);
7581  return kSize + internal_field_count * kPointerSize;
7582}
7583
7584Smi* JSReceiver::GetOrCreateIdentityHash(Isolate* isolate,
7585                                         Handle<JSReceiver> object) {
7586  return object->IsJSProxy() ? JSProxy::GetOrCreateIdentityHash(
7587                                   isolate, Handle<JSProxy>::cast(object))
7588                             : JSObject::GetOrCreateIdentityHash(
7589                                   isolate, Handle<JSObject>::cast(object));
7590}
7591
7592Object* JSReceiver::GetIdentityHash(Isolate* isolate,
7593                                    Handle<JSReceiver> receiver) {
7594  return receiver->IsJSProxy()
7595             ? JSProxy::GetIdentityHash(Handle<JSProxy>::cast(receiver))
7596             : JSObject::GetIdentityHash(isolate,
7597                                         Handle<JSObject>::cast(receiver));
7598}
7599
7600
7601bool AccessorInfo::all_can_read() {
7602  return BooleanBit::get(flag(), kAllCanReadBit);
7603}
7604
7605
7606void AccessorInfo::set_all_can_read(bool value) {
7607  set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
7608}
7609
7610
7611bool AccessorInfo::all_can_write() {
7612  return BooleanBit::get(flag(), kAllCanWriteBit);
7613}
7614
7615
7616void AccessorInfo::set_all_can_write(bool value) {
7617  set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
7618}
7619
7620
7621bool AccessorInfo::is_special_data_property() {
7622  return BooleanBit::get(flag(), kSpecialDataProperty);
7623}
7624
7625
7626void AccessorInfo::set_is_special_data_property(bool value) {
7627  set_flag(BooleanBit::set(flag(), kSpecialDataProperty, value));
7628}
7629
7630bool AccessorInfo::replace_on_access() {
7631  return BooleanBit::get(flag(), kReplaceOnAccess);
7632}
7633
7634void AccessorInfo::set_replace_on_access(bool value) {
7635  set_flag(BooleanBit::set(flag(), kReplaceOnAccess, value));
7636}
7637
7638bool AccessorInfo::is_sloppy() { return BooleanBit::get(flag(), kIsSloppy); }
7639
7640void AccessorInfo::set_is_sloppy(bool value) {
7641  set_flag(BooleanBit::set(flag(), kIsSloppy, value));
7642}
7643
7644PropertyAttributes AccessorInfo::property_attributes() {
7645  return AttributesField::decode(static_cast<uint32_t>(flag()));
7646}
7647
7648
7649void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
7650  set_flag(AttributesField::update(flag(), attributes));
7651}
7652
7653bool FunctionTemplateInfo::IsTemplateFor(JSObject* object) {
7654  return IsTemplateFor(object->map());
7655}
7656
7657bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
7658  if (!HasExpectedReceiverType()) return true;
7659  if (!receiver->IsJSObject()) return false;
7660  return FunctionTemplateInfo::cast(expected_receiver_type())
7661      ->IsTemplateFor(JSObject::cast(receiver)->map());
7662}
7663
7664
7665bool AccessorInfo::HasExpectedReceiverType() {
7666  return expected_receiver_type()->IsFunctionTemplateInfo();
7667}
7668
7669
7670Object* AccessorPair::get(AccessorComponent component) {
7671  return component == ACCESSOR_GETTER ? getter() : setter();
7672}
7673
7674
7675void AccessorPair::set(AccessorComponent component, Object* value) {
7676  if (component == ACCESSOR_GETTER) {
7677    set_getter(value);
7678  } else {
7679    set_setter(value);
7680  }
7681}
7682
7683
7684void AccessorPair::SetComponents(Object* getter, Object* setter) {
7685  Isolate* isolate = GetIsolate();
7686  if (!getter->IsNull(isolate)) set_getter(getter);
7687  if (!setter->IsNull(isolate)) set_setter(setter);
7688}
7689
7690
7691bool AccessorPair::Equals(AccessorPair* pair) {
7692  return (this == pair) || pair->Equals(getter(), setter());
7693}
7694
7695
7696bool AccessorPair::Equals(Object* getter_value, Object* setter_value) {
7697  return (getter() == getter_value) && (setter() == setter_value);
7698}
7699
7700
7701bool AccessorPair::ContainsAccessor() {
7702  return IsJSAccessor(getter()) || IsJSAccessor(setter());
7703}
7704
7705
7706bool AccessorPair::IsJSAccessor(Object* obj) {
7707  return obj->IsCallable() || obj->IsUndefined(GetIsolate());
7708}
7709
7710
7711template<typename Derived, typename Shape, typename Key>
7712void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7713                                               Handle<Object> key,
7714                                               Handle<Object> value) {
7715  this->SetEntry(entry, key, value, PropertyDetails(Smi::kZero));
7716}
7717
7718
7719template<typename Derived, typename Shape, typename Key>
7720void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7721                                               Handle<Object> key,
7722                                               Handle<Object> value,
7723                                               PropertyDetails details) {
7724  Shape::SetEntry(static_cast<Derived*>(this), entry, key, value, details);
7725}
7726
7727
7728template <typename Key>
7729template <typename Dictionary>
7730void BaseDictionaryShape<Key>::SetEntry(Dictionary* dict, int entry,
7731                                        Handle<Object> key,
7732                                        Handle<Object> value,
7733                                        PropertyDetails details) {
7734  STATIC_ASSERT(Dictionary::kEntrySize == 2 || Dictionary::kEntrySize == 3);
7735  DCHECK(!key->IsName() || details.dictionary_index() > 0);
7736  int index = dict->EntryToIndex(entry);
7737  DisallowHeapAllocation no_gc;
7738  WriteBarrierMode mode = dict->GetWriteBarrierMode(no_gc);
7739  dict->set(index + Dictionary::kEntryKeyIndex, *key, mode);
7740  dict->set(index + Dictionary::kEntryValueIndex, *value, mode);
7741  if (Dictionary::kEntrySize == 3) {
7742    dict->set(index + Dictionary::kEntryDetailsIndex, details.AsSmi());
7743  }
7744}
7745
7746
7747template <typename Dictionary>
7748void GlobalDictionaryShape::SetEntry(Dictionary* dict, int entry,
7749                                     Handle<Object> key, Handle<Object> value,
7750                                     PropertyDetails details) {
7751  STATIC_ASSERT(Dictionary::kEntrySize == 2);
7752  DCHECK(!key->IsName() || details.dictionary_index() > 0);
7753  DCHECK(value->IsPropertyCell());
7754  int index = dict->EntryToIndex(entry);
7755  DisallowHeapAllocation no_gc;
7756  WriteBarrierMode mode = dict->GetWriteBarrierMode(no_gc);
7757  dict->set(index + Dictionary::kEntryKeyIndex, *key, mode);
7758  dict->set(index + Dictionary::kEntryValueIndex, *value, mode);
7759  PropertyCell::cast(*value)->set_property_details(details);
7760}
7761
7762
7763bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
7764  DCHECK(other->IsNumber());
7765  return key == static_cast<uint32_t>(other->Number());
7766}
7767
7768
7769uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
7770  return ComputeIntegerHash(key, 0);
7771}
7772
7773
7774uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
7775                                                      Object* other) {
7776  DCHECK(other->IsNumber());
7777  return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
7778}
7779
7780Map* UnseededNumberDictionaryShape::GetMap(Isolate* isolate) {
7781  return isolate->heap()->unseeded_number_dictionary_map();
7782}
7783
7784uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
7785  return ComputeIntegerHash(key, seed);
7786}
7787
7788
7789uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
7790                                                          uint32_t seed,
7791                                                          Object* other) {
7792  DCHECK(other->IsNumber());
7793  return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
7794}
7795
7796
7797Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
7798  return isolate->factory()->NewNumberFromUint(key);
7799}
7800
7801
7802bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
7803  // We know that all entries in a hash table had their hash keys created.
7804  // Use that knowledge to have fast failure.
7805  if (key->Hash() != Name::cast(other)->Hash()) return false;
7806  return key->Equals(Name::cast(other));
7807}
7808
7809
7810uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
7811  return key->Hash();
7812}
7813
7814
7815uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
7816  return Name::cast(other)->Hash();
7817}
7818
7819
7820Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
7821                                             Handle<Name> key) {
7822  DCHECK(key->IsUniqueName());
7823  return key;
7824}
7825
7826
7827Handle<FixedArray> NameDictionary::DoGenerateNewEnumerationIndices(
7828    Handle<NameDictionary> dictionary) {
7829  return DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
7830}
7831
7832
7833template <typename Dictionary>
7834PropertyDetails GlobalDictionaryShape::DetailsAt(Dictionary* dict, int entry) {
7835  DCHECK(entry >= 0);  // Not found is -1, which is not caught by get().
7836  Object* raw_value = dict->ValueAt(entry);
7837  DCHECK(raw_value->IsPropertyCell());
7838  PropertyCell* cell = PropertyCell::cast(raw_value);
7839  return cell->property_details();
7840}
7841
7842
7843template <typename Dictionary>
7844void GlobalDictionaryShape::DetailsAtPut(Dictionary* dict, int entry,
7845                                         PropertyDetails value) {
7846  DCHECK(entry >= 0);  // Not found is -1, which is not caught by get().
7847  Object* raw_value = dict->ValueAt(entry);
7848  DCHECK(raw_value->IsPropertyCell());
7849  PropertyCell* cell = PropertyCell::cast(raw_value);
7850  cell->set_property_details(value);
7851}
7852
7853
7854template <typename Dictionary>
7855bool GlobalDictionaryShape::IsDeleted(Dictionary* dict, int entry) {
7856  DCHECK(dict->ValueAt(entry)->IsPropertyCell());
7857  Isolate* isolate = dict->GetIsolate();
7858  return PropertyCell::cast(dict->ValueAt(entry))->value()->IsTheHole(isolate);
7859}
7860
7861
7862bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
7863  return key->SameValue(other);
7864}
7865
7866
7867uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
7868  return Smi::cast(key->GetHash())->value();
7869}
7870
7871
7872uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
7873                                             Object* other) {
7874  return Smi::cast(other->GetHash())->value();
7875}
7876
7877
7878Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
7879                                              Handle<Object> key) {
7880  return key;
7881}
7882
7883
7884Handle<ObjectHashTable> ObjectHashTable::Shrink(
7885    Handle<ObjectHashTable> table, Handle<Object> key) {
7886  return DerivedHashTable::Shrink(table, key);
7887}
7888
7889
7890Object* OrderedHashMap::ValueAt(int entry) {
7891  return get(EntryToIndex(entry) + kValueOffset);
7892}
7893
7894
7895template <int entrysize>
7896bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
7897  if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
7898  return key->IsWeakCell() ? WeakCell::cast(*key)->value() == other
7899                           : *key == other;
7900}
7901
7902
7903template <int entrysize>
7904uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
7905  intptr_t hash =
7906      key->IsWeakCell()
7907          ? reinterpret_cast<intptr_t>(WeakCell::cast(*key)->value())
7908          : reinterpret_cast<intptr_t>(*key);
7909  return (uint32_t)(hash & 0xFFFFFFFF);
7910}
7911
7912
7913template <int entrysize>
7914uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
7915                                                      Object* other) {
7916  if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
7917  intptr_t hash = reinterpret_cast<intptr_t>(other);
7918  return (uint32_t)(hash & 0xFFFFFFFF);
7919}
7920
7921
7922template <int entrysize>
7923Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
7924                                                       Handle<Object> key) {
7925  return key;
7926}
7927
7928
7929ACCESSORS(ModuleInfoEntry, export_name, Object, kExportNameOffset)
7930ACCESSORS(ModuleInfoEntry, local_name, Object, kLocalNameOffset)
7931ACCESSORS(ModuleInfoEntry, import_name, Object, kImportNameOffset)
7932SMI_ACCESSORS(ModuleInfoEntry, module_request, kModuleRequestOffset)
7933SMI_ACCESSORS(ModuleInfoEntry, cell_index, kCellIndexOffset)
7934SMI_ACCESSORS(ModuleInfoEntry, beg_pos, kBegPosOffset)
7935SMI_ACCESSORS(ModuleInfoEntry, end_pos, kEndPosOffset)
7936
7937void Map::ClearCodeCache(Heap* heap) {
7938  // No write barrier is needed since empty_fixed_array is not in new space.
7939  // Please note this function is used during marking:
7940  //  - MarkCompactCollector::MarkUnmarkedObject
7941  //  - IncrementalMarking::Step
7942  WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
7943}
7944
7945
7946int Map::SlackForArraySize(int old_size, int size_limit) {
7947  const int max_slack = size_limit - old_size;
7948  CHECK_LE(0, max_slack);
7949  if (old_size < 4) {
7950    DCHECK_LE(1, max_slack);
7951    return 1;
7952  }
7953  return Min(max_slack, old_size / 4);
7954}
7955
7956
7957void JSArray::set_length(Smi* length) {
7958  // Don't need a write barrier for a Smi.
7959  set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
7960}
7961
7962
7963bool JSArray::SetLengthWouldNormalize(Heap* heap, uint32_t new_length) {
7964  // This constant is somewhat arbitrary. Any large enough value would work.
7965  const uint32_t kMaxFastArrayLength = 32 * 1024 * 1024;
7966  // If the new array won't fit in a some non-trivial fraction of the max old
7967  // space size, then force it to go dictionary mode.
7968  uint32_t heap_based_upper_bound =
7969      static_cast<uint32_t>((heap->MaxOldGenerationSize() / kDoubleSize) / 4);
7970  return new_length >= Min(kMaxFastArrayLength, heap_based_upper_bound);
7971}
7972
7973
7974bool JSArray::AllowsSetLength() {
7975  bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
7976  DCHECK(result == !HasFixedTypedArrayElements());
7977  return result;
7978}
7979
7980
7981void JSArray::SetContent(Handle<JSArray> array,
7982                         Handle<FixedArrayBase> storage) {
7983  EnsureCanContainElements(array, storage, storage->length(),
7984                           ALLOW_COPIED_DOUBLE_ELEMENTS);
7985
7986  DCHECK((storage->map() == array->GetHeap()->fixed_double_array_map() &&
7987          IsFastDoubleElementsKind(array->GetElementsKind())) ||
7988         ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
7989          (IsFastObjectElementsKind(array->GetElementsKind()) ||
7990           (IsFastSmiElementsKind(array->GetElementsKind()) &&
7991            Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
7992  array->set_elements(*storage);
7993  array->set_length(Smi::FromInt(storage->length()));
7994}
7995
7996
7997bool JSArray::HasArrayPrototype(Isolate* isolate) {
7998  return map()->prototype() == *isolate->initial_array_prototype();
7999}
8000
8001
8002int TypeFeedbackInfo::ic_total_count() {
8003  int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
8004  return ICTotalCountField::decode(current);
8005}
8006
8007
8008void TypeFeedbackInfo::set_ic_total_count(int count) {
8009  int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
8010  value = ICTotalCountField::update(value,
8011                                    ICTotalCountField::decode(count));
8012  WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
8013}
8014
8015
8016int TypeFeedbackInfo::ic_with_type_info_count() {
8017  int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
8018  return ICsWithTypeInfoCountField::decode(current);
8019}
8020
8021
8022void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
8023  if (delta == 0) return;
8024  int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
8025  int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
8026  // We can get negative count here when the type-feedback info is
8027  // shared between two code objects. The can only happen when
8028  // the debugger made a shallow copy of code object (see Heap::CopyCode).
8029  // Since we do not optimize when the debugger is active, we can skip
8030  // this counter update.
8031  if (new_count >= 0) {
8032    new_count &= ICsWithTypeInfoCountField::kMask;
8033    value = ICsWithTypeInfoCountField::update(value, new_count);
8034    WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
8035  }
8036}
8037
8038
8039int TypeFeedbackInfo::ic_generic_count() {
8040  return Smi::cast(READ_FIELD(this, kStorage3Offset))->value();
8041}
8042
8043
8044void TypeFeedbackInfo::change_ic_generic_count(int delta) {
8045  if (delta == 0) return;
8046  int new_count = ic_generic_count() + delta;
8047  if (new_count >= 0) {
8048    new_count &= ~Smi::kMinValue;
8049    WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(new_count));
8050  }
8051}
8052
8053
8054void TypeFeedbackInfo::initialize_storage() {
8055  WRITE_FIELD(this, kStorage1Offset, Smi::kZero);
8056  WRITE_FIELD(this, kStorage2Offset, Smi::kZero);
8057  WRITE_FIELD(this, kStorage3Offset, Smi::kZero);
8058}
8059
8060
8061void TypeFeedbackInfo::change_own_type_change_checksum() {
8062  int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
8063  int checksum = OwnTypeChangeChecksum::decode(value);
8064  checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
8065  value = OwnTypeChangeChecksum::update(value, checksum);
8066  // Ensure packed bit field is in Smi range.
8067  if (value > Smi::kMaxValue) value |= Smi::kMinValue;
8068  if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
8069  WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
8070}
8071
8072
8073void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
8074  int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
8075  int mask = (1 << kTypeChangeChecksumBits) - 1;
8076  value = InlinedTypeChangeChecksum::update(value, checksum & mask);
8077  // Ensure packed bit field is in Smi range.
8078  if (value > Smi::kMaxValue) value |= Smi::kMinValue;
8079  if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
8080  WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
8081}
8082
8083
8084int TypeFeedbackInfo::own_type_change_checksum() {
8085  int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
8086  return OwnTypeChangeChecksum::decode(value);
8087}
8088
8089
8090bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
8091  int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
8092  int mask = (1 << kTypeChangeChecksumBits) - 1;
8093  return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
8094}
8095
8096
8097SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
8098
8099
8100Relocatable::Relocatable(Isolate* isolate) {
8101  isolate_ = isolate;
8102  prev_ = isolate->relocatable_top();
8103  isolate->set_relocatable_top(this);
8104}
8105
8106
8107Relocatable::~Relocatable() {
8108  DCHECK_EQ(isolate_->relocatable_top(), this);
8109  isolate_->set_relocatable_top(prev_);
8110}
8111
8112
8113template<class Derived, class TableType>
8114Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
8115  TableType* table(TableType::cast(this->table()));
8116  int index = Smi::cast(this->index())->value();
8117  Object* key = table->KeyAt(index);
8118  DCHECK(!key->IsTheHole(table->GetIsolate()));
8119  return key;
8120}
8121
8122
8123void JSSetIterator::PopulateValueArray(FixedArray* array) {
8124  array->set(0, CurrentKey());
8125}
8126
8127
8128void JSMapIterator::PopulateValueArray(FixedArray* array) {
8129  array->set(0, CurrentKey());
8130  array->set(1, CurrentValue());
8131}
8132
8133
8134Object* JSMapIterator::CurrentValue() {
8135  OrderedHashMap* table(OrderedHashMap::cast(this->table()));
8136  int index = Smi::cast(this->index())->value();
8137  Object* value = table->ValueAt(index);
8138  DCHECK(!value->IsTheHole(table->GetIsolate()));
8139  return value;
8140}
8141
8142
8143String::SubStringRange::SubStringRange(String* string, int first, int length)
8144    : string_(string),
8145      first_(first),
8146      length_(length == -1 ? string->length() : length) {}
8147
8148
8149class String::SubStringRange::iterator final {
8150 public:
8151  typedef std::forward_iterator_tag iterator_category;
8152  typedef int difference_type;
8153  typedef uc16 value_type;
8154  typedef uc16* pointer;
8155  typedef uc16& reference;
8156
8157  iterator(const iterator& other)
8158      : content_(other.content_), offset_(other.offset_) {}
8159
8160  uc16 operator*() { return content_.Get(offset_); }
8161  bool operator==(const iterator& other) const {
8162    return content_.UsesSameString(other.content_) && offset_ == other.offset_;
8163  }
8164  bool operator!=(const iterator& other) const {
8165    return !content_.UsesSameString(other.content_) || offset_ != other.offset_;
8166  }
8167  iterator& operator++() {
8168    ++offset_;
8169    return *this;
8170  }
8171  iterator operator++(int);
8172
8173 private:
8174  friend class String;
8175  iterator(String* from, int offset)
8176      : content_(from->GetFlatContent()), offset_(offset) {}
8177  String::FlatContent content_;
8178  int offset_;
8179};
8180
8181
8182String::SubStringRange::iterator String::SubStringRange::begin() {
8183  return String::SubStringRange::iterator(string_, first_);
8184}
8185
8186
8187String::SubStringRange::iterator String::SubStringRange::end() {
8188  return String::SubStringRange::iterator(string_, first_ + length_);
8189}
8190
8191
8192// Predictably converts HeapObject* or Address to uint32 by calculating
8193// offset of the address in respective MemoryChunk.
8194static inline uint32_t ObjectAddressForHashing(void* object) {
8195  uint32_t value = static_cast<uint32_t>(reinterpret_cast<uintptr_t>(object));
8196  return value & MemoryChunk::kAlignmentMask;
8197}
8198
8199static inline Handle<Object> MakeEntryPair(Isolate* isolate, uint32_t index,
8200                                           Handle<Object> value) {
8201  Handle<Object> key = isolate->factory()->Uint32ToString(index);
8202  Handle<FixedArray> entry_storage =
8203      isolate->factory()->NewUninitializedFixedArray(2);
8204  {
8205    entry_storage->set(0, *key, SKIP_WRITE_BARRIER);
8206    entry_storage->set(1, *value, SKIP_WRITE_BARRIER);
8207  }
8208  return isolate->factory()->NewJSArrayWithElements(entry_storage,
8209                                                    FAST_ELEMENTS, 2);
8210}
8211
8212static inline Handle<Object> MakeEntryPair(Isolate* isolate, Handle<Name> key,
8213                                           Handle<Object> value) {
8214  Handle<FixedArray> entry_storage =
8215      isolate->factory()->NewUninitializedFixedArray(2);
8216  {
8217    entry_storage->set(0, *key, SKIP_WRITE_BARRIER);
8218    entry_storage->set(1, *value, SKIP_WRITE_BARRIER);
8219  }
8220  return isolate->factory()->NewJSArrayWithElements(entry_storage,
8221                                                    FAST_ELEMENTS, 2);
8222}
8223
8224ACCESSORS(JSIteratorResult, value, Object, kValueOffset)
8225ACCESSORS(JSIteratorResult, done, Object, kDoneOffset)
8226
8227ACCESSORS(JSArrayIterator, object, Object, kIteratedObjectOffset)
8228ACCESSORS(JSArrayIterator, index, Object, kNextIndexOffset)
8229ACCESSORS(JSArrayIterator, object_map, Object, kIteratedObjectMapOffset)
8230
8231ACCESSORS(JSAsyncFromSyncIterator, sync_iterator, JSReceiver,
8232          kSyncIteratorOffset)
8233
8234ACCESSORS(JSStringIterator, string, String, kStringOffset)
8235SMI_ACCESSORS(JSStringIterator, index, kNextIndexOffset)
8236
8237#undef INT_ACCESSORS
8238#undef ACCESSORS
8239#undef ACCESSORS_CHECKED
8240#undef ACCESSORS_CHECKED2
8241#undef SMI_ACCESSORS
8242#undef SYNCHRONIZED_SMI_ACCESSORS
8243#undef NOBARRIER_SMI_ACCESSORS
8244#undef BOOL_GETTER
8245#undef BOOL_ACCESSORS
8246#undef FIELD_ADDR
8247#undef FIELD_ADDR_CONST
8248#undef READ_FIELD
8249#undef NOBARRIER_READ_FIELD
8250#undef WRITE_FIELD
8251#undef NOBARRIER_WRITE_FIELD
8252#undef WRITE_BARRIER
8253#undef CONDITIONAL_WRITE_BARRIER
8254#undef READ_DOUBLE_FIELD
8255#undef WRITE_DOUBLE_FIELD
8256#undef READ_INT_FIELD
8257#undef WRITE_INT_FIELD
8258#undef READ_INTPTR_FIELD
8259#undef WRITE_INTPTR_FIELD
8260#undef READ_UINT8_FIELD
8261#undef WRITE_UINT8_FIELD
8262#undef READ_INT8_FIELD
8263#undef WRITE_INT8_FIELD
8264#undef READ_UINT16_FIELD
8265#undef WRITE_UINT16_FIELD
8266#undef READ_INT16_FIELD
8267#undef WRITE_INT16_FIELD
8268#undef READ_UINT32_FIELD
8269#undef WRITE_UINT32_FIELD
8270#undef READ_INT32_FIELD
8271#undef WRITE_INT32_FIELD
8272#undef READ_FLOAT_FIELD
8273#undef WRITE_FLOAT_FIELD
8274#undef READ_UINT64_FIELD
8275#undef WRITE_UINT64_FIELD
8276#undef READ_INT64_FIELD
8277#undef WRITE_INT64_FIELD
8278#undef READ_BYTE_FIELD
8279#undef WRITE_BYTE_FIELD
8280#undef NOBARRIER_READ_BYTE_FIELD
8281#undef NOBARRIER_WRITE_BYTE_FIELD
8282
8283}  // namespace internal
8284}  // namespace v8
8285
8286#endif  // V8_OBJECTS_INL_H_
8287