1// Copyright 2013 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "accessors.h"
31#include "api.h"
32#include "arguments.h"
33#include "bootstrapper.h"
34#include "codegen.h"
35#include "debug.h"
36#include "deoptimizer.h"
37#include "date.h"
38#include "elements.h"
39#include "execution.h"
40#include "full-codegen.h"
41#include "hydrogen.h"
42#include "isolate-inl.h"
43#include "objects-inl.h"
44#include "objects-visiting.h"
45#include "objects-visiting-inl.h"
46#include "macro-assembler.h"
47#include "mark-compact.h"
48#include "safepoint-table.h"
49#include "string-stream.h"
50#include "utils.h"
51
52#ifdef ENABLE_DISASSEMBLER
53#include "disasm.h"
54#include "disassembler.h"
55#endif
56
57namespace v8 {
58namespace internal {
59
60
61MUST_USE_RESULT static MaybeObject* CreateJSValue(JSFunction* constructor,
62                                                  Object* value) {
63  Object* result;
64  { MaybeObject* maybe_result =
65        constructor->GetHeap()->AllocateJSObject(constructor);
66    if (!maybe_result->ToObject(&result)) return maybe_result;
67  }
68  JSValue::cast(result)->set_value(value);
69  return result;
70}
71
72
73MaybeObject* Object::ToObject(Context* native_context) {
74  if (IsNumber()) {
75    return CreateJSValue(native_context->number_function(), this);
76  } else if (IsBoolean()) {
77    return CreateJSValue(native_context->boolean_function(), this);
78  } else if (IsString()) {
79    return CreateJSValue(native_context->string_function(), this);
80  }
81  ASSERT(IsJSObject());
82  return this;
83}
84
85
86MaybeObject* Object::ToObject() {
87  if (IsJSReceiver()) {
88    return this;
89  } else if (IsNumber()) {
90    Isolate* isolate = Isolate::Current();
91    Context* native_context = isolate->context()->native_context();
92    return CreateJSValue(native_context->number_function(), this);
93  } else if (IsBoolean()) {
94    Isolate* isolate = HeapObject::cast(this)->GetIsolate();
95    Context* native_context = isolate->context()->native_context();
96    return CreateJSValue(native_context->boolean_function(), this);
97  } else if (IsString()) {
98    Isolate* isolate = HeapObject::cast(this)->GetIsolate();
99    Context* native_context = isolate->context()->native_context();
100    return CreateJSValue(native_context->string_function(), this);
101  } else if (IsSymbol()) {
102    Isolate* isolate = HeapObject::cast(this)->GetIsolate();
103    Context* native_context = isolate->context()->native_context();
104    return CreateJSValue(native_context->symbol_function(), this);
105  }
106
107  // Throw a type error.
108  return Failure::InternalError();
109}
110
111
112bool Object::BooleanValue() {
113  if (IsBoolean()) return IsTrue();
114  if (IsSmi()) return Smi::cast(this)->value() != 0;
115  if (IsUndefined() || IsNull()) return false;
116  if (IsUndetectableObject()) return false;   // Undetectable object is false.
117  if (IsString()) return String::cast(this)->length() != 0;
118  if (IsHeapNumber()) return HeapNumber::cast(this)->HeapNumberBooleanValue();
119  return true;
120}
121
122
123void Object::Lookup(Name* name, LookupResult* result) {
124  Object* holder = NULL;
125  if (IsJSReceiver()) {
126    holder = this;
127  } else {
128    Context* native_context = result->isolate()->context()->native_context();
129    if (IsNumber()) {
130      holder = native_context->number_function()->instance_prototype();
131    } else if (IsString()) {
132      holder = native_context->string_function()->instance_prototype();
133    } else if (IsSymbol()) {
134      holder = native_context->symbol_function()->instance_prototype();
135    } else if (IsBoolean()) {
136      holder = native_context->boolean_function()->instance_prototype();
137    } else {
138      Isolate::Current()->PushStackTraceAndDie(
139          0xDEAD0000, this, JSReceiver::cast(this)->map(), 0xDEAD0001);
140    }
141  }
142  ASSERT(holder != NULL);  // Cannot handle null or undefined.
143  JSReceiver::cast(holder)->Lookup(name, result);
144}
145
146
147MaybeObject* Object::GetPropertyWithReceiver(Object* receiver,
148                                             Name* name,
149                                             PropertyAttributes* attributes) {
150  LookupResult result(name->GetIsolate());
151  Lookup(name, &result);
152  MaybeObject* value = GetProperty(receiver, &result, name, attributes);
153  ASSERT(*attributes <= ABSENT);
154  return value;
155}
156
157
158bool Object::ToInt32(int32_t* value) {
159  if (IsSmi()) {
160    *value = Smi::cast(this)->value();
161    return true;
162  }
163  if (IsHeapNumber()) {
164    double num = HeapNumber::cast(this)->value();
165    if (FastI2D(FastD2I(num)) == num) {
166      *value = FastD2I(num);
167      return true;
168    }
169  }
170  return false;
171}
172
173
174bool Object::ToUint32(uint32_t* value) {
175  if (IsSmi()) {
176    int num = Smi::cast(this)->value();
177    if (num >= 0) {
178      *value = static_cast<uint32_t>(num);
179      return true;
180    }
181  }
182  if (IsHeapNumber()) {
183    double num = HeapNumber::cast(this)->value();
184    if (num >= 0 && FastUI2D(FastD2UI(num)) == num) {
185      *value = FastD2UI(num);
186      return true;
187    }
188  }
189  return false;
190}
191
192
193template<typename To>
194static inline To* CheckedCast(void *from) {
195  uintptr_t temp = reinterpret_cast<uintptr_t>(from);
196  ASSERT(temp % sizeof(To) == 0);
197  return reinterpret_cast<To*>(temp);
198}
199
200
201static MaybeObject* PerformCompare(const BitmaskCompareDescriptor& descriptor,
202                                   char* ptr,
203                                   Heap* heap) {
204  uint32_t bitmask = descriptor.bitmask;
205  uint32_t compare_value = descriptor.compare_value;
206  uint32_t value;
207  switch (descriptor.size) {
208    case 1:
209      value = static_cast<uint32_t>(*CheckedCast<uint8_t>(ptr));
210      compare_value &= 0xff;
211      bitmask &= 0xff;
212      break;
213    case 2:
214      value = static_cast<uint32_t>(*CheckedCast<uint16_t>(ptr));
215      compare_value &= 0xffff;
216      bitmask &= 0xffff;
217      break;
218    case 4:
219      value = *CheckedCast<uint32_t>(ptr);
220      break;
221    default:
222      UNREACHABLE();
223      return NULL;
224  }
225  return heap->ToBoolean((bitmask & value) == (bitmask & compare_value));
226}
227
228
229static MaybeObject* PerformCompare(const PointerCompareDescriptor& descriptor,
230                                   char* ptr,
231                                   Heap* heap) {
232  uintptr_t compare_value =
233      reinterpret_cast<uintptr_t>(descriptor.compare_value);
234  uintptr_t value = *CheckedCast<uintptr_t>(ptr);
235  return heap->ToBoolean(compare_value == value);
236}
237
238
239static MaybeObject* GetPrimitiveValue(
240    const PrimitiveValueDescriptor& descriptor,
241    char* ptr,
242    Heap* heap) {
243  int32_t int32_value = 0;
244  switch (descriptor.data_type) {
245    case kDescriptorInt8Type:
246      int32_value = *CheckedCast<int8_t>(ptr);
247      break;
248    case kDescriptorUint8Type:
249      int32_value = *CheckedCast<uint8_t>(ptr);
250      break;
251    case kDescriptorInt16Type:
252      int32_value = *CheckedCast<int16_t>(ptr);
253      break;
254    case kDescriptorUint16Type:
255      int32_value = *CheckedCast<uint16_t>(ptr);
256      break;
257    case kDescriptorInt32Type:
258      int32_value = *CheckedCast<int32_t>(ptr);
259      break;
260    case kDescriptorUint32Type: {
261      uint32_t value = *CheckedCast<uint32_t>(ptr);
262      return heap->NumberFromUint32(value);
263    }
264    case kDescriptorBoolType: {
265      uint8_t byte = *CheckedCast<uint8_t>(ptr);
266      return heap->ToBoolean(byte & (0x1 << descriptor.bool_offset));
267    }
268    case kDescriptorFloatType: {
269      float value = *CheckedCast<float>(ptr);
270      return heap->NumberFromDouble(value);
271    }
272    case kDescriptorDoubleType: {
273      double value = *CheckedCast<double>(ptr);
274      return heap->NumberFromDouble(value);
275    }
276  }
277  return heap->NumberFromInt32(int32_value);
278}
279
280
281static MaybeObject* GetDeclaredAccessorProperty(Object* receiver,
282                                                DeclaredAccessorInfo* info,
283                                                Isolate* isolate) {
284  char* current = reinterpret_cast<char*>(receiver);
285  DeclaredAccessorDescriptorIterator iterator(info->descriptor());
286  while (true) {
287    const DeclaredAccessorDescriptorData* data = iterator.Next();
288    switch (data->type) {
289      case kDescriptorReturnObject: {
290        ASSERT(iterator.Complete());
291        current = *CheckedCast<char*>(current);
292        return *CheckedCast<Object*>(current);
293      }
294      case kDescriptorPointerDereference:
295        ASSERT(!iterator.Complete());
296        current = *reinterpret_cast<char**>(current);
297        break;
298      case kDescriptorPointerShift:
299        ASSERT(!iterator.Complete());
300        current += data->pointer_shift_descriptor.byte_offset;
301        break;
302      case kDescriptorObjectDereference: {
303        ASSERT(!iterator.Complete());
304        Object* object = CheckedCast<Object>(current);
305        int field = data->object_dereference_descriptor.internal_field;
306        Object* smi = JSObject::cast(object)->GetInternalField(field);
307        ASSERT(smi->IsSmi());
308        current = reinterpret_cast<char*>(smi);
309        break;
310      }
311      case kDescriptorBitmaskCompare:
312        ASSERT(iterator.Complete());
313        return PerformCompare(data->bitmask_compare_descriptor,
314                              current,
315                              isolate->heap());
316      case kDescriptorPointerCompare:
317        ASSERT(iterator.Complete());
318        return PerformCompare(data->pointer_compare_descriptor,
319                              current,
320                              isolate->heap());
321      case kDescriptorPrimitiveValue:
322        ASSERT(iterator.Complete());
323        return GetPrimitiveValue(data->primitive_value_descriptor,
324                                 current,
325                                 isolate->heap());
326    }
327  }
328  UNREACHABLE();
329  return NULL;
330}
331
332
333MaybeObject* JSObject::GetPropertyWithCallback(Object* receiver,
334                                               Object* structure,
335                                               Name* name) {
336  Isolate* isolate = name->GetIsolate();
337  // To accommodate both the old and the new api we switch on the
338  // data structure used to store the callbacks.  Eventually foreign
339  // callbacks should be phased out.
340  if (structure->IsForeign()) {
341    AccessorDescriptor* callback =
342        reinterpret_cast<AccessorDescriptor*>(
343            Foreign::cast(structure)->foreign_address());
344    MaybeObject* value = (callback->getter)(receiver, callback->data);
345    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
346    return value;
347  }
348
349  // api style callbacks.
350  if (structure->IsAccessorInfo()) {
351    if (!AccessorInfo::cast(structure)->IsCompatibleReceiver(receiver)) {
352      Handle<Object> name_handle(name, isolate);
353      Handle<Object> receiver_handle(receiver, isolate);
354      Handle<Object> args[2] = { name_handle, receiver_handle };
355      Handle<Object> error =
356          isolate->factory()->NewTypeError("incompatible_method_receiver",
357                                           HandleVector(args,
358                                                        ARRAY_SIZE(args)));
359      return isolate->Throw(*error);
360    }
361    // TODO(rossberg): Handling symbols in the API requires changing the API,
362    // so we do not support it for now.
363    if (name->IsSymbol()) return isolate->heap()->undefined_value();
364    if (structure->IsDeclaredAccessorInfo()) {
365      return GetDeclaredAccessorProperty(receiver,
366                                         DeclaredAccessorInfo::cast(structure),
367                                         isolate);
368    }
369    ExecutableAccessorInfo* data = ExecutableAccessorInfo::cast(structure);
370    Object* fun_obj = data->getter();
371    v8::AccessorGetter call_fun = v8::ToCData<v8::AccessorGetter>(fun_obj);
372    if (call_fun == NULL) return isolate->heap()->undefined_value();
373    HandleScope scope(isolate);
374    JSObject* self = JSObject::cast(receiver);
375    Handle<String> key(String::cast(name));
376    LOG(isolate, ApiNamedPropertyAccess("load", self, name));
377    PropertyCallbackArguments args(isolate, data->data(), self, this);
378    v8::Handle<v8::Value> result =
379        args.Call(call_fun, v8::Utils::ToLocal(key));
380    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
381    if (result.IsEmpty()) {
382      return isolate->heap()->undefined_value();
383    }
384    Object* return_value = *v8::Utils::OpenHandle(*result);
385    return_value->VerifyApiCallResultType();
386    return return_value;
387  }
388
389  // __defineGetter__ callback
390  if (structure->IsAccessorPair()) {
391    Object* getter = AccessorPair::cast(structure)->getter();
392    if (getter->IsSpecFunction()) {
393      // TODO(rossberg): nicer would be to cast to some JSCallable here...
394      return GetPropertyWithDefinedGetter(receiver, JSReceiver::cast(getter));
395    }
396    // Getter is not a function.
397    return isolate->heap()->undefined_value();
398  }
399
400  UNREACHABLE();
401  return NULL;
402}
403
404
405MaybeObject* JSProxy::GetPropertyWithHandler(Object* receiver_raw,
406                                             Name* name_raw) {
407  Isolate* isolate = GetIsolate();
408  HandleScope scope(isolate);
409  Handle<Object> receiver(receiver_raw, isolate);
410  Handle<Object> name(name_raw, isolate);
411
412  // TODO(rossberg): adjust once there is a story for symbols vs proxies.
413  if (name->IsSymbol()) return isolate->heap()->undefined_value();
414
415  Handle<Object> args[] = { receiver, name };
416  Handle<Object> result = CallTrap(
417    "get", isolate->derived_get_trap(), ARRAY_SIZE(args), args);
418  if (isolate->has_pending_exception()) return Failure::Exception();
419
420  return *result;
421}
422
423
424Handle<Object> Object::GetProperty(Handle<Object> object, Handle<Name> name) {
425  // TODO(rossberg): The index test should not be here but in the GetProperty
426  // method (or somewhere else entirely). Needs more global clean-up.
427  uint32_t index;
428  if (name->AsArrayIndex(&index))
429    return GetElement(object, index);
430  Isolate* isolate = object->IsHeapObject()
431      ? Handle<HeapObject>::cast(object)->GetIsolate()
432      : Isolate::Current();
433  CALL_HEAP_FUNCTION(isolate, object->GetProperty(*name), Object);
434}
435
436
437Handle<Object> Object::GetElement(Handle<Object> object, uint32_t index) {
438  Isolate* isolate = object->IsHeapObject()
439      ? Handle<HeapObject>::cast(object)->GetIsolate()
440      : Isolate::Current();
441  CALL_HEAP_FUNCTION(isolate, object->GetElement(index), Object);
442}
443
444
445MaybeObject* JSProxy::GetElementWithHandler(Object* receiver,
446                                            uint32_t index) {
447  String* name;
448  MaybeObject* maybe = GetHeap()->Uint32ToString(index);
449  if (!maybe->To<String>(&name)) return maybe;
450  return GetPropertyWithHandler(receiver, name);
451}
452
453
454MaybeObject* JSProxy::SetElementWithHandler(JSReceiver* receiver,
455                                            uint32_t index,
456                                            Object* value,
457                                            StrictModeFlag strict_mode) {
458  String* name;
459  MaybeObject* maybe = GetHeap()->Uint32ToString(index);
460  if (!maybe->To<String>(&name)) return maybe;
461  return SetPropertyWithHandler(receiver, name, value, NONE, strict_mode);
462}
463
464
465bool JSProxy::HasElementWithHandler(uint32_t index) {
466  String* name;
467  MaybeObject* maybe = GetHeap()->Uint32ToString(index);
468  if (!maybe->To<String>(&name)) return maybe;
469  return HasPropertyWithHandler(name);
470}
471
472
473MaybeObject* Object::GetPropertyWithDefinedGetter(Object* receiver,
474                                                  JSReceiver* getter) {
475  Isolate* isolate = getter->GetIsolate();
476  HandleScope scope(isolate);
477  Handle<JSReceiver> fun(getter);
478  Handle<Object> self(receiver, isolate);
479#ifdef ENABLE_DEBUGGER_SUPPORT
480  Debug* debug = isolate->debug();
481  // Handle stepping into a getter if step into is active.
482  // TODO(rossberg): should this apply to getters that are function proxies?
483  if (debug->StepInActive() && fun->IsJSFunction()) {
484    debug->HandleStepIn(
485        Handle<JSFunction>::cast(fun), Handle<Object>::null(), 0, false);
486  }
487#endif
488
489  bool has_pending_exception;
490  Handle<Object> result =
491      Execution::Call(fun, self, 0, NULL, &has_pending_exception, true);
492  // Check for pending exception and return the result.
493  if (has_pending_exception) return Failure::Exception();
494  return *result;
495}
496
497
498// Only deal with CALLBACKS and INTERCEPTOR
499MaybeObject* JSObject::GetPropertyWithFailedAccessCheck(
500    Object* receiver,
501    LookupResult* result,
502    Name* name,
503    PropertyAttributes* attributes) {
504  if (result->IsProperty()) {
505    switch (result->type()) {
506      case CALLBACKS: {
507        // Only allow API accessors.
508        Object* obj = result->GetCallbackObject();
509        if (obj->IsAccessorInfo()) {
510          AccessorInfo* info = AccessorInfo::cast(obj);
511          if (info->all_can_read()) {
512            *attributes = result->GetAttributes();
513            return result->holder()->GetPropertyWithCallback(
514                receiver, result->GetCallbackObject(), name);
515          }
516        }
517        break;
518      }
519      case NORMAL:
520      case FIELD:
521      case CONSTANT: {
522        // Search ALL_CAN_READ accessors in prototype chain.
523        LookupResult r(GetIsolate());
524        result->holder()->LookupRealNamedPropertyInPrototypes(name, &r);
525        if (r.IsProperty()) {
526          return GetPropertyWithFailedAccessCheck(receiver,
527                                                  &r,
528                                                  name,
529                                                  attributes);
530        }
531        break;
532      }
533      case INTERCEPTOR: {
534        // If the object has an interceptor, try real named properties.
535        // No access check in GetPropertyAttributeWithInterceptor.
536        LookupResult r(GetIsolate());
537        result->holder()->LookupRealNamedProperty(name, &r);
538        if (r.IsProperty()) {
539          return GetPropertyWithFailedAccessCheck(receiver,
540                                                  &r,
541                                                  name,
542                                                  attributes);
543        }
544        break;
545      }
546      default:
547        UNREACHABLE();
548    }
549  }
550
551  // No accessible property found.
552  *attributes = ABSENT;
553  Heap* heap = name->GetHeap();
554  Isolate* isolate = heap->isolate();
555  isolate->ReportFailedAccessCheck(this, v8::ACCESS_GET);
556  RETURN_IF_SCHEDULED_EXCEPTION(isolate);
557  return heap->undefined_value();
558}
559
560
561PropertyAttributes JSObject::GetPropertyAttributeWithFailedAccessCheck(
562    Object* receiver,
563    LookupResult* result,
564    Name* name,
565    bool continue_search) {
566  if (result->IsProperty()) {
567    switch (result->type()) {
568      case CALLBACKS: {
569        // Only allow API accessors.
570        Object* obj = result->GetCallbackObject();
571        if (obj->IsAccessorInfo()) {
572          AccessorInfo* info = AccessorInfo::cast(obj);
573          if (info->all_can_read()) {
574            return result->GetAttributes();
575          }
576        }
577        break;
578      }
579
580      case NORMAL:
581      case FIELD:
582      case CONSTANT: {
583        if (!continue_search) break;
584        // Search ALL_CAN_READ accessors in prototype chain.
585        LookupResult r(GetIsolate());
586        result->holder()->LookupRealNamedPropertyInPrototypes(name, &r);
587        if (r.IsProperty()) {
588          return GetPropertyAttributeWithFailedAccessCheck(receiver,
589                                                           &r,
590                                                           name,
591                                                           continue_search);
592        }
593        break;
594      }
595
596      case INTERCEPTOR: {
597        // If the object has an interceptor, try real named properties.
598        // No access check in GetPropertyAttributeWithInterceptor.
599        LookupResult r(GetIsolate());
600        if (continue_search) {
601          result->holder()->LookupRealNamedProperty(name, &r);
602        } else {
603          result->holder()->LocalLookupRealNamedProperty(name, &r);
604        }
605        if (!r.IsFound()) break;
606        return GetPropertyAttributeWithFailedAccessCheck(receiver,
607                                                         &r,
608                                                         name,
609                                                         continue_search);
610      }
611
612      case HANDLER:
613      case TRANSITION:
614      case NONEXISTENT:
615        UNREACHABLE();
616    }
617  }
618
619  GetIsolate()->ReportFailedAccessCheck(this, v8::ACCESS_HAS);
620  return ABSENT;
621}
622
623
624Object* JSObject::GetNormalizedProperty(LookupResult* result) {
625  ASSERT(!HasFastProperties());
626  Object* value = property_dictionary()->ValueAt(result->GetDictionaryEntry());
627  if (IsGlobalObject()) {
628    value = PropertyCell::cast(value)->value();
629  }
630  ASSERT(!value->IsPropertyCell() && !value->IsCell());
631  return value;
632}
633
634
635Handle<Object> JSObject::SetNormalizedProperty(Handle<JSObject> object,
636                                               LookupResult* result,
637                                               Handle<Object> value) {
638  CALL_HEAP_FUNCTION(object->GetIsolate(),
639                     object->SetNormalizedProperty(result, *value),
640                     Object);
641}
642
643
644MaybeObject* JSObject::SetNormalizedProperty(LookupResult* result,
645                                             Object* value) {
646  ASSERT(!HasFastProperties());
647  if (IsGlobalObject()) {
648    PropertyCell* cell = PropertyCell::cast(
649        property_dictionary()->ValueAt(result->GetDictionaryEntry()));
650    MaybeObject* maybe_type = cell->SetValueInferType(value);
651    if (maybe_type->IsFailure()) return maybe_type;
652  } else {
653    property_dictionary()->ValueAtPut(result->GetDictionaryEntry(), value);
654  }
655  return value;
656}
657
658
659Handle<Object> JSObject::SetNormalizedProperty(Handle<JSObject> object,
660                                               Handle<Name> key,
661                                               Handle<Object> value,
662                                               PropertyDetails details) {
663  CALL_HEAP_FUNCTION(object->GetIsolate(),
664                     object->SetNormalizedProperty(*key, *value, details),
665                     Object);
666}
667
668
669MaybeObject* JSObject::SetNormalizedProperty(Name* name,
670                                             Object* value,
671                                             PropertyDetails details) {
672  ASSERT(!HasFastProperties());
673  int entry = property_dictionary()->FindEntry(name);
674  if (entry == NameDictionary::kNotFound) {
675    Object* store_value = value;
676    if (IsGlobalObject()) {
677      Heap* heap = name->GetHeap();
678      MaybeObject* maybe_store_value = heap->AllocatePropertyCell(value);
679      if (!maybe_store_value->ToObject(&store_value)) return maybe_store_value;
680    }
681    Object* dict;
682    { MaybeObject* maybe_dict =
683          property_dictionary()->Add(name, store_value, details);
684      if (!maybe_dict->ToObject(&dict)) return maybe_dict;
685    }
686    set_properties(NameDictionary::cast(dict));
687    return value;
688  }
689
690  PropertyDetails original_details = property_dictionary()->DetailsAt(entry);
691  int enumeration_index;
692  // Preserve the enumeration index unless the property was deleted.
693  if (original_details.IsDeleted()) {
694    enumeration_index = property_dictionary()->NextEnumerationIndex();
695    property_dictionary()->SetNextEnumerationIndex(enumeration_index + 1);
696  } else {
697    enumeration_index = original_details.dictionary_index();
698    ASSERT(enumeration_index > 0);
699  }
700
701  details = PropertyDetails(
702      details.attributes(), details.type(), enumeration_index);
703
704  if (IsGlobalObject()) {
705    PropertyCell* cell =
706        PropertyCell::cast(property_dictionary()->ValueAt(entry));
707    MaybeObject* maybe_type = cell->SetValueInferType(value);
708    if (maybe_type->IsFailure()) return maybe_type;
709    // Please note we have to update the property details.
710    property_dictionary()->DetailsAtPut(entry, details);
711  } else {
712    property_dictionary()->SetEntry(entry, name, value, details);
713  }
714  return value;
715}
716
717
718// TODO(mstarzinger): Temporary wrapper until target is handlified.
719Handle<NameDictionary> NameDictionaryShrink(Handle<NameDictionary> dict,
720                                            Handle<Name> name) {
721  CALL_HEAP_FUNCTION(dict->GetIsolate(), dict->Shrink(*name), NameDictionary);
722}
723
724
725static void CellSetValueInferType(Handle<PropertyCell> cell,
726                                  Handle<Object> value) {
727  CALL_HEAP_FUNCTION_VOID(cell->GetIsolate(), cell->SetValueInferType(*value));
728}
729
730
731Handle<Object> JSObject::DeleteNormalizedProperty(Handle<JSObject> object,
732                                                  Handle<Name> name,
733                                                  DeleteMode mode) {
734  ASSERT(!object->HasFastProperties());
735  Isolate* isolate = object->GetIsolate();
736  Handle<NameDictionary> dictionary(object->property_dictionary());
737  int entry = dictionary->FindEntry(*name);
738  if (entry != NameDictionary::kNotFound) {
739    // If we have a global object set the cell to the hole.
740    if (object->IsGlobalObject()) {
741      PropertyDetails details = dictionary->DetailsAt(entry);
742      if (details.IsDontDelete()) {
743        if (mode != FORCE_DELETION) return isolate->factory()->false_value();
744        // When forced to delete global properties, we have to make a
745        // map change to invalidate any ICs that think they can load
746        // from the DontDelete cell without checking if it contains
747        // the hole value.
748        Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map()));
749        ASSERT(new_map->is_dictionary_map());
750        object->set_map(*new_map);
751      }
752      Handle<PropertyCell> cell(PropertyCell::cast(dictionary->ValueAt(entry)));
753      CellSetValueInferType(cell, isolate->factory()->the_hole_value());
754      dictionary->DetailsAtPut(entry, details.AsDeleted());
755    } else {
756      Handle<Object> deleted(dictionary->DeleteProperty(entry, mode), isolate);
757      if (*deleted == isolate->heap()->true_value()) {
758        Handle<NameDictionary> new_properties =
759            NameDictionaryShrink(dictionary, name);
760        object->set_properties(*new_properties);
761      }
762      return deleted;
763    }
764  }
765  return isolate->factory()->true_value();
766}
767
768
769bool JSObject::IsDirty() {
770  Object* cons_obj = map()->constructor();
771  if (!cons_obj->IsJSFunction())
772    return true;
773  JSFunction* fun = JSFunction::cast(cons_obj);
774  if (!fun->shared()->IsApiFunction())
775    return true;
776  // If the object is fully fast case and has the same map it was
777  // created with then no changes can have been made to it.
778  return map() != fun->initial_map()
779      || !HasFastObjectElements()
780      || !HasFastProperties();
781}
782
783
784Handle<Object> Object::GetProperty(Handle<Object> object,
785                                   Handle<Object> receiver,
786                                   LookupResult* result,
787                                   Handle<Name> key,
788                                   PropertyAttributes* attributes) {
789  Isolate* isolate = object->IsHeapObject()
790      ? Handle<HeapObject>::cast(object)->GetIsolate()
791      : Isolate::Current();
792  CALL_HEAP_FUNCTION(
793      isolate,
794      object->GetProperty(*receiver, result, *key, attributes),
795      Object);
796}
797
798
799MaybeObject* Object::GetPropertyOrFail(Handle<Object> object,
800                                       Handle<Object> receiver,
801                                       LookupResult* result,
802                                       Handle<Name> key,
803                                       PropertyAttributes* attributes) {
804  Isolate* isolate = object->IsHeapObject()
805      ? Handle<HeapObject>::cast(object)->GetIsolate()
806      : Isolate::Current();
807  CALL_HEAP_FUNCTION_PASS_EXCEPTION(
808      isolate,
809      object->GetProperty(*receiver, result, *key, attributes));
810}
811
812
813MaybeObject* Object::GetProperty(Object* receiver,
814                                 LookupResult* result,
815                                 Name* name,
816                                 PropertyAttributes* attributes) {
817  // Make sure that the top context does not change when doing
818  // callbacks or interceptor calls.
819  AssertNoContextChange ncc;
820  Isolate* isolate = name->GetIsolate();
821  Heap* heap = isolate->heap();
822
823  // Traverse the prototype chain from the current object (this) to
824  // the holder and check for access rights. This avoids traversing the
825  // objects more than once in case of interceptors, because the
826  // holder will always be the interceptor holder and the search may
827  // only continue with a current object just after the interceptor
828  // holder in the prototype chain.
829  // Proxy handlers do not use the proxy's prototype, so we can skip this.
830  if (!result->IsHandler()) {
831    Object* last = result->IsProperty()
832        ? result->holder()
833        : Object::cast(heap->null_value());
834    ASSERT(this != this->GetPrototype(isolate));
835    for (Object* current = this;
836         true;
837         current = current->GetPrototype(isolate)) {
838      if (current->IsAccessCheckNeeded()) {
839        // Check if we're allowed to read from the current object. Note
840        // that even though we may not actually end up loading the named
841        // property from the current object, we still check that we have
842        // access to it.
843        JSObject* checked = JSObject::cast(current);
844        if (!heap->isolate()->MayNamedAccess(checked, name, v8::ACCESS_GET)) {
845          return checked->GetPropertyWithFailedAccessCheck(receiver,
846                                                           result,
847                                                           name,
848                                                           attributes);
849        }
850      }
851      // Stop traversing the chain once we reach the last object in the
852      // chain; either the holder of the result or null in case of an
853      // absent property.
854      if (current == last) break;
855    }
856  }
857
858  if (!result->IsProperty()) {
859    *attributes = ABSENT;
860    return heap->undefined_value();
861  }
862  *attributes = result->GetAttributes();
863  Object* value;
864  switch (result->type()) {
865    case NORMAL:
866      value = result->holder()->GetNormalizedProperty(result);
867      ASSERT(!value->IsTheHole() || result->IsReadOnly());
868      return value->IsTheHole() ? heap->undefined_value() : value;
869    case FIELD: {
870      MaybeObject* maybe_result = result->holder()->FastPropertyAt(
871          result->representation(),
872          result->GetFieldIndex().field_index());
873      if (!maybe_result->To(&value)) return maybe_result;
874      ASSERT(!value->IsTheHole() || result->IsReadOnly());
875      return value->IsTheHole() ? heap->undefined_value() : value;
876    }
877    case CONSTANT:
878      return result->GetConstant();
879    case CALLBACKS:
880      return result->holder()->GetPropertyWithCallback(
881          receiver, result->GetCallbackObject(), name);
882    case HANDLER:
883      return result->proxy()->GetPropertyWithHandler(receiver, name);
884    case INTERCEPTOR:
885      return result->holder()->GetPropertyWithInterceptor(
886          receiver, name, attributes);
887    case TRANSITION:
888    case NONEXISTENT:
889      UNREACHABLE();
890      break;
891  }
892  UNREACHABLE();
893  return NULL;
894}
895
896
897MaybeObject* Object::GetElementWithReceiver(Object* receiver, uint32_t index) {
898  Isolate* isolate = IsSmi()
899      ? Isolate::Current()
900      : HeapObject::cast(this)->GetIsolate();
901  Heap* heap = isolate->heap();
902  Object* holder = this;
903
904  // Iterate up the prototype chain until an element is found or the null
905  // prototype is encountered.
906  for (holder = this;
907       holder != heap->null_value();
908       holder = holder->GetPrototype(isolate)) {
909    if (!holder->IsJSObject()) {
910      Context* native_context = isolate->context()->native_context();
911      if (holder->IsNumber()) {
912        holder = native_context->number_function()->instance_prototype();
913      } else if (holder->IsString()) {
914        holder = native_context->string_function()->instance_prototype();
915      } else if (holder->IsSymbol()) {
916        holder = native_context->symbol_function()->instance_prototype();
917      } else if (holder->IsBoolean()) {
918        holder = native_context->boolean_function()->instance_prototype();
919      } else if (holder->IsJSProxy()) {
920        return JSProxy::cast(holder)->GetElementWithHandler(receiver, index);
921      } else {
922        // Undefined and null have no indexed properties.
923        ASSERT(holder->IsUndefined() || holder->IsNull());
924        return heap->undefined_value();
925      }
926    }
927
928    // Inline the case for JSObjects. Doing so significantly improves the
929    // performance of fetching elements where checking the prototype chain is
930    // necessary.
931    JSObject* js_object = JSObject::cast(holder);
932
933    // Check access rights if needed.
934    if (js_object->IsAccessCheckNeeded()) {
935      Isolate* isolate = heap->isolate();
936      if (!isolate->MayIndexedAccess(js_object, index, v8::ACCESS_GET)) {
937        isolate->ReportFailedAccessCheck(js_object, v8::ACCESS_GET);
938        RETURN_IF_SCHEDULED_EXCEPTION(isolate);
939        return heap->undefined_value();
940      }
941    }
942
943    if (js_object->HasIndexedInterceptor()) {
944      return js_object->GetElementWithInterceptor(receiver, index);
945    }
946
947    if (js_object->elements() != heap->empty_fixed_array()) {
948      MaybeObject* result = js_object->GetElementsAccessor()->Get(
949          receiver, js_object, index);
950      if (result != heap->the_hole_value()) return result;
951    }
952  }
953
954  return heap->undefined_value();
955}
956
957
958Object* Object::GetPrototype(Isolate* isolate) {
959  if (IsSmi()) {
960    Context* context = isolate->context()->native_context();
961    return context->number_function()->instance_prototype();
962  }
963
964  HeapObject* heap_object = HeapObject::cast(this);
965
966  // The object is either a number, a string, a boolean,
967  // a real JS object, or a Harmony proxy.
968  if (heap_object->IsJSReceiver()) {
969    return heap_object->map()->prototype();
970  }
971  Context* context = isolate->context()->native_context();
972
973  if (heap_object->IsHeapNumber()) {
974    return context->number_function()->instance_prototype();
975  }
976  if (heap_object->IsString()) {
977    return context->string_function()->instance_prototype();
978  }
979  if (heap_object->IsSymbol()) {
980    return context->symbol_function()->instance_prototype();
981  }
982  if (heap_object->IsBoolean()) {
983    return context->boolean_function()->instance_prototype();
984  } else {
985    return isolate->heap()->null_value();
986  }
987}
988
989
990MaybeObject* Object::GetHash(CreationFlag flag) {
991  // The object is either a number, a name, an odd-ball,
992  // a real JS object, or a Harmony proxy.
993  if (IsNumber()) {
994    uint32_t hash = ComputeLongHash(double_to_uint64(Number()));
995    return Smi::FromInt(hash & Smi::kMaxValue);
996  }
997  if (IsName()) {
998    uint32_t hash = Name::cast(this)->Hash();
999    return Smi::FromInt(hash);
1000  }
1001  if (IsOddball()) {
1002    uint32_t hash = Oddball::cast(this)->to_string()->Hash();
1003    return Smi::FromInt(hash);
1004  }
1005  if (IsJSReceiver()) {
1006    return JSReceiver::cast(this)->GetIdentityHash(flag);
1007  }
1008
1009  UNREACHABLE();
1010  return Smi::FromInt(0);
1011}
1012
1013
1014bool Object::SameValue(Object* other) {
1015  if (other == this) return true;
1016
1017  // The object is either a number, a name, an odd-ball,
1018  // a real JS object, or a Harmony proxy.
1019  if (IsNumber() && other->IsNumber()) {
1020    double this_value = Number();
1021    double other_value = other->Number();
1022    return (this_value == other_value) ||
1023        (std::isnan(this_value) && std::isnan(other_value));
1024  }
1025  if (IsString() && other->IsString()) {
1026    return String::cast(this)->Equals(String::cast(other));
1027  }
1028  return false;
1029}
1030
1031
1032void Object::ShortPrint(FILE* out) {
1033  HeapStringAllocator allocator;
1034  StringStream accumulator(&allocator);
1035  ShortPrint(&accumulator);
1036  accumulator.OutputToFile(out);
1037}
1038
1039
1040void Object::ShortPrint(StringStream* accumulator) {
1041  if (IsSmi()) {
1042    Smi::cast(this)->SmiPrint(accumulator);
1043  } else if (IsFailure()) {
1044    Failure::cast(this)->FailurePrint(accumulator);
1045  } else {
1046    HeapObject::cast(this)->HeapObjectShortPrint(accumulator);
1047  }
1048}
1049
1050
1051void Smi::SmiPrint(FILE* out) {
1052  PrintF(out, "%d", value());
1053}
1054
1055
1056void Smi::SmiPrint(StringStream* accumulator) {
1057  accumulator->Add("%d", value());
1058}
1059
1060
1061void Failure::FailurePrint(StringStream* accumulator) {
1062  accumulator->Add("Failure(%p)", reinterpret_cast<void*>(value()));
1063}
1064
1065
1066void Failure::FailurePrint(FILE* out) {
1067  PrintF(out, "Failure(%p)", reinterpret_cast<void*>(value()));
1068}
1069
1070
1071// Should a word be prefixed by 'a' or 'an' in order to read naturally in
1072// English?  Returns false for non-ASCII or words that don't start with
1073// a capital letter.  The a/an rule follows pronunciation in English.
1074// We don't use the BBC's overcorrect "an historic occasion" though if
1075// you speak a dialect you may well say "an 'istoric occasion".
1076static bool AnWord(String* str) {
1077  if (str->length() == 0) return false;  // A nothing.
1078  int c0 = str->Get(0);
1079  int c1 = str->length() > 1 ? str->Get(1) : 0;
1080  if (c0 == 'U') {
1081    if (c1 > 'Z') {
1082      return true;  // An Umpire, but a UTF8String, a U.
1083    }
1084  } else if (c0 == 'A' || c0 == 'E' || c0 == 'I' || c0 == 'O') {
1085    return true;    // An Ape, an ABCBook.
1086  } else if ((c1 == 0 || (c1 >= 'A' && c1 <= 'Z')) &&
1087           (c0 == 'F' || c0 == 'H' || c0 == 'M' || c0 == 'N' || c0 == 'R' ||
1088            c0 == 'S' || c0 == 'X')) {
1089    return true;    // An MP3File, an M.
1090  }
1091  return false;
1092}
1093
1094
1095MaybeObject* String::SlowTryFlatten(PretenureFlag pretenure) {
1096#ifdef DEBUG
1097  // Do not attempt to flatten in debug mode when allocation is not
1098  // allowed.  This is to avoid an assertion failure when allocating.
1099  // Flattening strings is the only case where we always allow
1100  // allocation because no GC is performed if the allocation fails.
1101  if (!AllowHeapAllocation::IsAllowed()) return this;
1102#endif
1103
1104  Heap* heap = GetHeap();
1105  switch (StringShape(this).representation_tag()) {
1106    case kConsStringTag: {
1107      ConsString* cs = ConsString::cast(this);
1108      if (cs->second()->length() == 0) {
1109        return cs->first();
1110      }
1111      // There's little point in putting the flat string in new space if the
1112      // cons string is in old space.  It can never get GCed until there is
1113      // an old space GC.
1114      PretenureFlag tenure = heap->InNewSpace(this) ? pretenure : TENURED;
1115      int len = length();
1116      Object* object;
1117      String* result;
1118      if (IsOneByteRepresentation()) {
1119        { MaybeObject* maybe_object =
1120              heap->AllocateRawOneByteString(len, tenure);
1121          if (!maybe_object->ToObject(&object)) return maybe_object;
1122        }
1123        result = String::cast(object);
1124        String* first = cs->first();
1125        int first_length = first->length();
1126        uint8_t* dest = SeqOneByteString::cast(result)->GetChars();
1127        WriteToFlat(first, dest, 0, first_length);
1128        String* second = cs->second();
1129        WriteToFlat(second,
1130                    dest + first_length,
1131                    0,
1132                    len - first_length);
1133      } else {
1134        { MaybeObject* maybe_object =
1135              heap->AllocateRawTwoByteString(len, tenure);
1136          if (!maybe_object->ToObject(&object)) return maybe_object;
1137        }
1138        result = String::cast(object);
1139        uc16* dest = SeqTwoByteString::cast(result)->GetChars();
1140        String* first = cs->first();
1141        int first_length = first->length();
1142        WriteToFlat(first, dest, 0, first_length);
1143        String* second = cs->second();
1144        WriteToFlat(second,
1145                    dest + first_length,
1146                    0,
1147                    len - first_length);
1148      }
1149      cs->set_first(result);
1150      cs->set_second(heap->empty_string(), SKIP_WRITE_BARRIER);
1151      return result;
1152    }
1153    default:
1154      return this;
1155  }
1156}
1157
1158
1159bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
1160  // Externalizing twice leaks the external resource, so it's
1161  // prohibited by the API.
1162  ASSERT(!this->IsExternalString());
1163#ifdef DEBUG
1164  if (FLAG_enable_slow_asserts) {
1165    // Assert that the resource and the string are equivalent.
1166    ASSERT(static_cast<size_t>(this->length()) == resource->length());
1167    ScopedVector<uc16> smart_chars(this->length());
1168    String::WriteToFlat(this, smart_chars.start(), 0, this->length());
1169    ASSERT(memcmp(smart_chars.start(),
1170                  resource->data(),
1171                  resource->length() * sizeof(smart_chars[0])) == 0);
1172  }
1173#endif  // DEBUG
1174  Heap* heap = GetHeap();
1175  int size = this->Size();  // Byte size of the original string.
1176  if (size < ExternalString::kShortSize) {
1177    return false;
1178  }
1179  bool is_ascii = this->IsOneByteRepresentation();
1180  bool is_internalized = this->IsInternalizedString();
1181
1182  // Morph the object to an external string by adjusting the map and
1183  // reinitializing the fields.
1184  if (size >= ExternalString::kSize) {
1185    this->set_map_no_write_barrier(
1186        is_internalized
1187            ? (is_ascii
1188                   ? heap->external_internalized_string_with_one_byte_data_map()
1189                   : heap->external_internalized_string_map())
1190            : (is_ascii
1191                   ? heap->external_string_with_one_byte_data_map()
1192                   : heap->external_string_map()));
1193  } else {
1194    this->set_map_no_write_barrier(
1195        is_internalized
1196          ? (is_ascii
1197               ? heap->
1198                   short_external_internalized_string_with_one_byte_data_map()
1199               : heap->short_external_internalized_string_map())
1200          : (is_ascii
1201                 ? heap->short_external_string_with_one_byte_data_map()
1202                 : heap->short_external_string_map()));
1203  }
1204  ExternalTwoByteString* self = ExternalTwoByteString::cast(this);
1205  self->set_resource(resource);
1206  if (is_internalized) self->Hash();  // Force regeneration of the hash value.
1207
1208  // Fill the remainder of the string with dead wood.
1209  int new_size = this->Size();  // Byte size of the external String object.
1210  heap->CreateFillerObjectAt(this->address() + new_size, size - new_size);
1211  if (Marking::IsBlack(Marking::MarkBitFrom(this))) {
1212    MemoryChunk::IncrementLiveBytesFromMutator(this->address(),
1213                                               new_size - size);
1214  }
1215  return true;
1216}
1217
1218
1219bool String::MakeExternal(v8::String::ExternalAsciiStringResource* resource) {
1220#ifdef DEBUG
1221  if (FLAG_enable_slow_asserts) {
1222    // Assert that the resource and the string are equivalent.
1223    ASSERT(static_cast<size_t>(this->length()) == resource->length());
1224    if (this->IsTwoByteRepresentation()) {
1225      ScopedVector<uint16_t> smart_chars(this->length());
1226      String::WriteToFlat(this, smart_chars.start(), 0, this->length());
1227      ASSERT(String::IsOneByte(smart_chars.start(), this->length()));
1228    }
1229    ScopedVector<char> smart_chars(this->length());
1230    String::WriteToFlat(this, smart_chars.start(), 0, this->length());
1231    ASSERT(memcmp(smart_chars.start(),
1232                  resource->data(),
1233                  resource->length() * sizeof(smart_chars[0])) == 0);
1234  }
1235#endif  // DEBUG
1236  Heap* heap = GetHeap();
1237  int size = this->Size();  // Byte size of the original string.
1238  if (size < ExternalString::kShortSize) {
1239    return false;
1240  }
1241  bool is_internalized = this->IsInternalizedString();
1242
1243  // Morph the object to an external string by adjusting the map and
1244  // reinitializing the fields.  Use short version if space is limited.
1245  if (size >= ExternalString::kSize) {
1246    this->set_map_no_write_barrier(
1247        is_internalized ? heap->external_ascii_internalized_string_map()
1248                        : heap->external_ascii_string_map());
1249  } else {
1250    this->set_map_no_write_barrier(
1251        is_internalized ? heap->short_external_ascii_internalized_string_map()
1252                        : heap->short_external_ascii_string_map());
1253  }
1254  ExternalAsciiString* self = ExternalAsciiString::cast(this);
1255  self->set_resource(resource);
1256  if (is_internalized) self->Hash();  // Force regeneration of the hash value.
1257
1258  // Fill the remainder of the string with dead wood.
1259  int new_size = this->Size();  // Byte size of the external String object.
1260  heap->CreateFillerObjectAt(this->address() + new_size, size - new_size);
1261  if (Marking::IsBlack(Marking::MarkBitFrom(this))) {
1262    MemoryChunk::IncrementLiveBytesFromMutator(this->address(),
1263                                               new_size - size);
1264  }
1265  return true;
1266}
1267
1268
1269void String::StringShortPrint(StringStream* accumulator) {
1270  int len = length();
1271  if (len > kMaxShortPrintLength) {
1272    accumulator->Add("<Very long string[%u]>", len);
1273    return;
1274  }
1275
1276  if (!LooksValid()) {
1277    accumulator->Add("<Invalid String>");
1278    return;
1279  }
1280
1281  ConsStringIteratorOp op;
1282  StringCharacterStream stream(this, &op);
1283
1284  bool truncated = false;
1285  if (len > kMaxShortPrintLength) {
1286    len = kMaxShortPrintLength;
1287    truncated = true;
1288  }
1289  bool ascii = true;
1290  for (int i = 0; i < len; i++) {
1291    uint16_t c = stream.GetNext();
1292
1293    if (c < 32 || c >= 127) {
1294      ascii = false;
1295    }
1296  }
1297  stream.Reset(this);
1298  if (ascii) {
1299    accumulator->Add("<String[%u]: ", length());
1300    for (int i = 0; i < len; i++) {
1301      accumulator->Put(static_cast<char>(stream.GetNext()));
1302    }
1303    accumulator->Put('>');
1304  } else {
1305    // Backslash indicates that the string contains control
1306    // characters and that backslashes are therefore escaped.
1307    accumulator->Add("<String[%u]\\: ", length());
1308    for (int i = 0; i < len; i++) {
1309      uint16_t c = stream.GetNext();
1310      if (c == '\n') {
1311        accumulator->Add("\\n");
1312      } else if (c == '\r') {
1313        accumulator->Add("\\r");
1314      } else if (c == '\\') {
1315        accumulator->Add("\\\\");
1316      } else if (c < 32 || c > 126) {
1317        accumulator->Add("\\x%02x", c);
1318      } else {
1319        accumulator->Put(static_cast<char>(c));
1320      }
1321    }
1322    if (truncated) {
1323      accumulator->Put('.');
1324      accumulator->Put('.');
1325      accumulator->Put('.');
1326    }
1327    accumulator->Put('>');
1328  }
1329  return;
1330}
1331
1332
1333void JSObject::JSObjectShortPrint(StringStream* accumulator) {
1334  switch (map()->instance_type()) {
1335    case JS_ARRAY_TYPE: {
1336      double length = JSArray::cast(this)->length()->IsUndefined()
1337          ? 0
1338          : JSArray::cast(this)->length()->Number();
1339      accumulator->Add("<JS Array[%u]>", static_cast<uint32_t>(length));
1340      break;
1341    }
1342    case JS_WEAK_MAP_TYPE: {
1343      accumulator->Add("<JS WeakMap>");
1344      break;
1345    }
1346    case JS_WEAK_SET_TYPE: {
1347      accumulator->Add("<JS WeakSet>");
1348      break;
1349    }
1350    case JS_REGEXP_TYPE: {
1351      accumulator->Add("<JS RegExp>");
1352      break;
1353    }
1354    case JS_FUNCTION_TYPE: {
1355      JSFunction* function = JSFunction::cast(this);
1356      Object* fun_name = function->shared()->DebugName();
1357      bool printed = false;
1358      if (fun_name->IsString()) {
1359        String* str = String::cast(fun_name);
1360        if (str->length() > 0) {
1361          accumulator->Add("<JS Function ");
1362          accumulator->Put(str);
1363          printed = true;
1364        }
1365      }
1366      if (!printed) {
1367        accumulator->Add("<JS Function");
1368      }
1369      accumulator->Add(" (SharedFunctionInfo %p)",
1370                       reinterpret_cast<void*>(function->shared()));
1371      accumulator->Put('>');
1372      break;
1373    }
1374    case JS_GENERATOR_OBJECT_TYPE: {
1375      accumulator->Add("<JS Generator>");
1376      break;
1377    }
1378    case JS_MODULE_TYPE: {
1379      accumulator->Add("<JS Module>");
1380      break;
1381    }
1382    // All other JSObjects are rather similar to each other (JSObject,
1383    // JSGlobalProxy, JSGlobalObject, JSUndetectableObject, JSValue).
1384    default: {
1385      Map* map_of_this = map();
1386      Heap* heap = GetHeap();
1387      Object* constructor = map_of_this->constructor();
1388      bool printed = false;
1389      if (constructor->IsHeapObject() &&
1390          !heap->Contains(HeapObject::cast(constructor))) {
1391        accumulator->Add("!!!INVALID CONSTRUCTOR!!!");
1392      } else {
1393        bool global_object = IsJSGlobalProxy();
1394        if (constructor->IsJSFunction()) {
1395          if (!heap->Contains(JSFunction::cast(constructor)->shared())) {
1396            accumulator->Add("!!!INVALID SHARED ON CONSTRUCTOR!!!");
1397          } else {
1398            Object* constructor_name =
1399                JSFunction::cast(constructor)->shared()->name();
1400            if (constructor_name->IsString()) {
1401              String* str = String::cast(constructor_name);
1402              if (str->length() > 0) {
1403                bool vowel = AnWord(str);
1404                accumulator->Add("<%sa%s ",
1405                       global_object ? "Global Object: " : "",
1406                       vowel ? "n" : "");
1407                accumulator->Put(str);
1408                accumulator->Add(" with %smap %p",
1409                    map_of_this->is_deprecated() ? "deprecated " : "",
1410                    map_of_this);
1411                printed = true;
1412              }
1413            }
1414          }
1415        }
1416        if (!printed) {
1417          accumulator->Add("<JS %sObject", global_object ? "Global " : "");
1418        }
1419      }
1420      if (IsJSValue()) {
1421        accumulator->Add(" value = ");
1422        JSValue::cast(this)->value()->ShortPrint(accumulator);
1423      }
1424      accumulator->Put('>');
1425      break;
1426    }
1427  }
1428}
1429
1430
1431void JSObject::PrintElementsTransition(
1432    FILE* file, ElementsKind from_kind, FixedArrayBase* from_elements,
1433    ElementsKind to_kind, FixedArrayBase* to_elements) {
1434  if (from_kind != to_kind) {
1435    PrintF(file, "elements transition [");
1436    PrintElementsKind(file, from_kind);
1437    PrintF(file, " -> ");
1438    PrintElementsKind(file, to_kind);
1439    PrintF(file, "] in ");
1440    JavaScriptFrame::PrintTop(GetIsolate(), file, false, true);
1441    PrintF(file, " for ");
1442    ShortPrint(file);
1443    PrintF(file, " from ");
1444    from_elements->ShortPrint(file);
1445    PrintF(file, " to ");
1446    to_elements->ShortPrint(file);
1447    PrintF(file, "\n");
1448  }
1449}
1450
1451
1452void HeapObject::HeapObjectShortPrint(StringStream* accumulator) {
1453  Heap* heap = GetHeap();
1454  if (!heap->Contains(this)) {
1455    accumulator->Add("!!!INVALID POINTER!!!");
1456    return;
1457  }
1458  if (!heap->Contains(map())) {
1459    accumulator->Add("!!!INVALID MAP!!!");
1460    return;
1461  }
1462
1463  accumulator->Add("%p ", this);
1464
1465  if (IsString()) {
1466    String::cast(this)->StringShortPrint(accumulator);
1467    return;
1468  }
1469  if (IsJSObject()) {
1470    JSObject::cast(this)->JSObjectShortPrint(accumulator);
1471    return;
1472  }
1473  switch (map()->instance_type()) {
1474    case MAP_TYPE:
1475      accumulator->Add("<Map(elements=%u)>", Map::cast(this)->elements_kind());
1476      break;
1477    case FIXED_ARRAY_TYPE:
1478      accumulator->Add("<FixedArray[%u]>", FixedArray::cast(this)->length());
1479      break;
1480    case FIXED_DOUBLE_ARRAY_TYPE:
1481      accumulator->Add("<FixedDoubleArray[%u]>",
1482                       FixedDoubleArray::cast(this)->length());
1483      break;
1484    case BYTE_ARRAY_TYPE:
1485      accumulator->Add("<ByteArray[%u]>", ByteArray::cast(this)->length());
1486      break;
1487    case FREE_SPACE_TYPE:
1488      accumulator->Add("<FreeSpace[%u]>", FreeSpace::cast(this)->Size());
1489      break;
1490    case EXTERNAL_PIXEL_ARRAY_TYPE:
1491      accumulator->Add("<ExternalPixelArray[%u]>",
1492                       ExternalPixelArray::cast(this)->length());
1493      break;
1494    case EXTERNAL_BYTE_ARRAY_TYPE:
1495      accumulator->Add("<ExternalByteArray[%u]>",
1496                       ExternalByteArray::cast(this)->length());
1497      break;
1498    case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
1499      accumulator->Add("<ExternalUnsignedByteArray[%u]>",
1500                       ExternalUnsignedByteArray::cast(this)->length());
1501      break;
1502    case EXTERNAL_SHORT_ARRAY_TYPE:
1503      accumulator->Add("<ExternalShortArray[%u]>",
1504                       ExternalShortArray::cast(this)->length());
1505      break;
1506    case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE:
1507      accumulator->Add("<ExternalUnsignedShortArray[%u]>",
1508                       ExternalUnsignedShortArray::cast(this)->length());
1509      break;
1510    case EXTERNAL_INT_ARRAY_TYPE:
1511      accumulator->Add("<ExternalIntArray[%u]>",
1512                       ExternalIntArray::cast(this)->length());
1513      break;
1514    case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
1515      accumulator->Add("<ExternalUnsignedIntArray[%u]>",
1516                       ExternalUnsignedIntArray::cast(this)->length());
1517      break;
1518    case EXTERNAL_FLOAT_ARRAY_TYPE:
1519      accumulator->Add("<ExternalFloatArray[%u]>",
1520                       ExternalFloatArray::cast(this)->length());
1521      break;
1522    case EXTERNAL_DOUBLE_ARRAY_TYPE:
1523      accumulator->Add("<ExternalDoubleArray[%u]>",
1524                       ExternalDoubleArray::cast(this)->length());
1525      break;
1526    case SHARED_FUNCTION_INFO_TYPE: {
1527      SharedFunctionInfo* shared = SharedFunctionInfo::cast(this);
1528      SmartArrayPointer<char> debug_name =
1529          shared->DebugName()->ToCString();
1530      if (debug_name[0] != 0) {
1531        accumulator->Add("<SharedFunctionInfo %s>", *debug_name);
1532      } else {
1533        accumulator->Add("<SharedFunctionInfo>");
1534      }
1535      break;
1536    }
1537    case JS_MESSAGE_OBJECT_TYPE:
1538      accumulator->Add("<JSMessageObject>");
1539      break;
1540#define MAKE_STRUCT_CASE(NAME, Name, name) \
1541  case NAME##_TYPE:                        \
1542    accumulator->Put('<');                 \
1543    accumulator->Add(#Name);               \
1544    accumulator->Put('>');                 \
1545    break;
1546  STRUCT_LIST(MAKE_STRUCT_CASE)
1547#undef MAKE_STRUCT_CASE
1548    case CODE_TYPE:
1549      accumulator->Add("<Code>");
1550      break;
1551    case ODDBALL_TYPE: {
1552      if (IsUndefined())
1553        accumulator->Add("<undefined>");
1554      else if (IsTheHole())
1555        accumulator->Add("<the hole>");
1556      else if (IsNull())
1557        accumulator->Add("<null>");
1558      else if (IsTrue())
1559        accumulator->Add("<true>");
1560      else if (IsFalse())
1561        accumulator->Add("<false>");
1562      else
1563        accumulator->Add("<Odd Oddball>");
1564      break;
1565    }
1566    case SYMBOL_TYPE: {
1567      Symbol* symbol = Symbol::cast(this);
1568      accumulator->Add("<Symbol: %d", symbol->Hash());
1569      if (!symbol->name()->IsUndefined()) {
1570        accumulator->Add(" ");
1571        String::cast(symbol->name())->StringShortPrint(accumulator);
1572      }
1573      accumulator->Add(">");
1574      break;
1575    }
1576    case HEAP_NUMBER_TYPE:
1577      accumulator->Add("<Number: ");
1578      HeapNumber::cast(this)->HeapNumberPrint(accumulator);
1579      accumulator->Put('>');
1580      break;
1581    case JS_PROXY_TYPE:
1582      accumulator->Add("<JSProxy>");
1583      break;
1584    case JS_FUNCTION_PROXY_TYPE:
1585      accumulator->Add("<JSFunctionProxy>");
1586      break;
1587    case FOREIGN_TYPE:
1588      accumulator->Add("<Foreign>");
1589      break;
1590    case CELL_TYPE:
1591      accumulator->Add("Cell for ");
1592      Cell::cast(this)->value()->ShortPrint(accumulator);
1593      break;
1594    case PROPERTY_CELL_TYPE:
1595      accumulator->Add("PropertyCell for ");
1596      PropertyCell::cast(this)->value()->ShortPrint(accumulator);
1597      break;
1598    default:
1599      accumulator->Add("<Other heap object (%d)>", map()->instance_type());
1600      break;
1601  }
1602}
1603
1604
1605void HeapObject::Iterate(ObjectVisitor* v) {
1606  // Handle header
1607  IteratePointer(v, kMapOffset);
1608  // Handle object body
1609  Map* m = map();
1610  IterateBody(m->instance_type(), SizeFromMap(m), v);
1611}
1612
1613
1614void HeapObject::IterateBody(InstanceType type, int object_size,
1615                             ObjectVisitor* v) {
1616  // Avoiding <Type>::cast(this) because it accesses the map pointer field.
1617  // During GC, the map pointer field is encoded.
1618  if (type < FIRST_NONSTRING_TYPE) {
1619    switch (type & kStringRepresentationMask) {
1620      case kSeqStringTag:
1621        break;
1622      case kConsStringTag:
1623        ConsString::BodyDescriptor::IterateBody(this, v);
1624        break;
1625      case kSlicedStringTag:
1626        SlicedString::BodyDescriptor::IterateBody(this, v);
1627        break;
1628      case kExternalStringTag:
1629        if ((type & kStringEncodingMask) == kOneByteStringTag) {
1630          reinterpret_cast<ExternalAsciiString*>(this)->
1631              ExternalAsciiStringIterateBody(v);
1632        } else {
1633          reinterpret_cast<ExternalTwoByteString*>(this)->
1634              ExternalTwoByteStringIterateBody(v);
1635        }
1636        break;
1637    }
1638    return;
1639  }
1640
1641  switch (type) {
1642    case FIXED_ARRAY_TYPE:
1643      FixedArray::BodyDescriptor::IterateBody(this, object_size, v);
1644      break;
1645    case FIXED_DOUBLE_ARRAY_TYPE:
1646      break;
1647    case JS_OBJECT_TYPE:
1648    case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1649    case JS_GENERATOR_OBJECT_TYPE:
1650    case JS_MODULE_TYPE:
1651    case JS_VALUE_TYPE:
1652    case JS_DATE_TYPE:
1653    case JS_ARRAY_TYPE:
1654    case JS_ARRAY_BUFFER_TYPE:
1655    case JS_TYPED_ARRAY_TYPE:
1656    case JS_DATA_VIEW_TYPE:
1657    case JS_SET_TYPE:
1658    case JS_MAP_TYPE:
1659    case JS_WEAK_MAP_TYPE:
1660    case JS_WEAK_SET_TYPE:
1661    case JS_REGEXP_TYPE:
1662    case JS_GLOBAL_PROXY_TYPE:
1663    case JS_GLOBAL_OBJECT_TYPE:
1664    case JS_BUILTINS_OBJECT_TYPE:
1665    case JS_MESSAGE_OBJECT_TYPE:
1666      JSObject::BodyDescriptor::IterateBody(this, object_size, v);
1667      break;
1668    case JS_FUNCTION_TYPE:
1669      reinterpret_cast<JSFunction*>(this)
1670          ->JSFunctionIterateBody(object_size, v);
1671      break;
1672    case ODDBALL_TYPE:
1673      Oddball::BodyDescriptor::IterateBody(this, v);
1674      break;
1675    case JS_PROXY_TYPE:
1676      JSProxy::BodyDescriptor::IterateBody(this, v);
1677      break;
1678    case JS_FUNCTION_PROXY_TYPE:
1679      JSFunctionProxy::BodyDescriptor::IterateBody(this, v);
1680      break;
1681    case FOREIGN_TYPE:
1682      reinterpret_cast<Foreign*>(this)->ForeignIterateBody(v);
1683      break;
1684    case MAP_TYPE:
1685      Map::BodyDescriptor::IterateBody(this, v);
1686      break;
1687    case CODE_TYPE:
1688      reinterpret_cast<Code*>(this)->CodeIterateBody(v);
1689      break;
1690    case CELL_TYPE:
1691      Cell::BodyDescriptor::IterateBody(this, v);
1692      break;
1693    case PROPERTY_CELL_TYPE:
1694      PropertyCell::BodyDescriptor::IterateBody(this, v);
1695      break;
1696    case SYMBOL_TYPE:
1697      Symbol::BodyDescriptor::IterateBody(this, v);
1698      break;
1699    case HEAP_NUMBER_TYPE:
1700    case FILLER_TYPE:
1701    case BYTE_ARRAY_TYPE:
1702    case FREE_SPACE_TYPE:
1703    case EXTERNAL_PIXEL_ARRAY_TYPE:
1704    case EXTERNAL_BYTE_ARRAY_TYPE:
1705    case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
1706    case EXTERNAL_SHORT_ARRAY_TYPE:
1707    case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE:
1708    case EXTERNAL_INT_ARRAY_TYPE:
1709    case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
1710    case EXTERNAL_FLOAT_ARRAY_TYPE:
1711    case EXTERNAL_DOUBLE_ARRAY_TYPE:
1712      break;
1713    case SHARED_FUNCTION_INFO_TYPE: {
1714      SharedFunctionInfo::BodyDescriptor::IterateBody(this, v);
1715      break;
1716    }
1717
1718#define MAKE_STRUCT_CASE(NAME, Name, name) \
1719        case NAME##_TYPE:
1720      STRUCT_LIST(MAKE_STRUCT_CASE)
1721#undef MAKE_STRUCT_CASE
1722      if (type == ALLOCATION_SITE_TYPE) {
1723        AllocationSite::BodyDescriptor::IterateBody(this, v);
1724      } else {
1725        StructBodyDescriptor::IterateBody(this, object_size, v);
1726      }
1727      break;
1728    default:
1729      PrintF("Unknown type: %d\n", type);
1730      UNREACHABLE();
1731  }
1732}
1733
1734
1735bool HeapNumber::HeapNumberBooleanValue() {
1736  // NaN, +0, and -0 should return the false object
1737#if __BYTE_ORDER == __LITTLE_ENDIAN
1738  union IeeeDoubleLittleEndianArchType u;
1739#elif __BYTE_ORDER == __BIG_ENDIAN
1740  union IeeeDoubleBigEndianArchType u;
1741#endif
1742  u.d = value();
1743  if (u.bits.exp == 2047) {
1744    // Detect NaN for IEEE double precision floating point.
1745    if ((u.bits.man_low | u.bits.man_high) != 0) return false;
1746  }
1747  if (u.bits.exp == 0) {
1748    // Detect +0, and -0 for IEEE double precision floating point.
1749    if ((u.bits.man_low | u.bits.man_high) == 0) return false;
1750  }
1751  return true;
1752}
1753
1754
1755void HeapNumber::HeapNumberPrint(FILE* out) {
1756  PrintF(out, "%.16g", Number());
1757}
1758
1759
1760void HeapNumber::HeapNumberPrint(StringStream* accumulator) {
1761  // The Windows version of vsnprintf can allocate when printing a %g string
1762  // into a buffer that may not be big enough.  We don't want random memory
1763  // allocation when producing post-crash stack traces, so we print into a
1764  // buffer that is plenty big enough for any floating point number, then
1765  // print that using vsnprintf (which may truncate but never allocate if
1766  // there is no more space in the buffer).
1767  EmbeddedVector<char, 100> buffer;
1768  OS::SNPrintF(buffer, "%.16g", Number());
1769  accumulator->Add("%s", buffer.start());
1770}
1771
1772
1773String* JSReceiver::class_name() {
1774  if (IsJSFunction() && IsJSFunctionProxy()) {
1775    return GetHeap()->function_class_string();
1776  }
1777  if (map()->constructor()->IsJSFunction()) {
1778    JSFunction* constructor = JSFunction::cast(map()->constructor());
1779    return String::cast(constructor->shared()->instance_class_name());
1780  }
1781  // If the constructor is not present, return "Object".
1782  return GetHeap()->Object_string();
1783}
1784
1785
1786String* JSReceiver::constructor_name() {
1787  if (map()->constructor()->IsJSFunction()) {
1788    JSFunction* constructor = JSFunction::cast(map()->constructor());
1789    String* name = String::cast(constructor->shared()->name());
1790    if (name->length() > 0) return name;
1791    String* inferred_name = constructor->shared()->inferred_name();
1792    if (inferred_name->length() > 0) return inferred_name;
1793    Object* proto = GetPrototype();
1794    if (proto->IsJSObject()) return JSObject::cast(proto)->constructor_name();
1795  }
1796  // TODO(rossberg): what about proxies?
1797  // If the constructor is not present, return "Object".
1798  return GetHeap()->Object_string();
1799}
1800
1801
1802MaybeObject* JSObject::AddFastPropertyUsingMap(Map* new_map,
1803                                               Name* name,
1804                                               Object* value,
1805                                               int field_index,
1806                                               Representation representation) {
1807  // This method is used to transition to a field. If we are transitioning to a
1808  // double field, allocate new storage.
1809  Object* storage;
1810  MaybeObject* maybe_storage =
1811      value->AllocateNewStorageFor(GetHeap(), representation);
1812  if (!maybe_storage->To(&storage)) return maybe_storage;
1813
1814  if (map()->unused_property_fields() == 0) {
1815    int new_unused = new_map->unused_property_fields();
1816    FixedArray* values;
1817    MaybeObject* maybe_values =
1818        properties()->CopySize(properties()->length() + new_unused + 1);
1819    if (!maybe_values->To(&values)) return maybe_values;
1820
1821    set_properties(values);
1822  }
1823
1824  set_map(new_map);
1825
1826  FastPropertyAtPut(field_index, storage);
1827  return value;
1828}
1829
1830
1831static bool IsIdentifier(UnicodeCache* cache, Name* name) {
1832  // Checks whether the buffer contains an identifier (no escape).
1833  if (!name->IsString()) return false;
1834  String* string = String::cast(name);
1835  if (string->length() == 0) return false;
1836  ConsStringIteratorOp op;
1837  StringCharacterStream stream(string, &op);
1838  if (!cache->IsIdentifierStart(stream.GetNext())) {
1839    return false;
1840  }
1841  while (stream.HasMore()) {
1842    if (!cache->IsIdentifierPart(stream.GetNext())) {
1843      return false;
1844    }
1845  }
1846  return true;
1847}
1848
1849
1850MaybeObject* JSObject::AddFastProperty(Name* name,
1851                                       Object* value,
1852                                       PropertyAttributes attributes,
1853                                       StoreFromKeyed store_mode,
1854                                       ValueType value_type) {
1855  ASSERT(!IsJSGlobalProxy());
1856  ASSERT(DescriptorArray::kNotFound ==
1857         map()->instance_descriptors()->Search(
1858             name, map()->NumberOfOwnDescriptors()));
1859
1860  // Normalize the object if the name is an actual name (not the
1861  // hidden strings) and is not a real identifier.
1862  // Normalize the object if it will have too many fast properties.
1863  Isolate* isolate = GetHeap()->isolate();
1864  if ((!name->IsSymbol() && !IsIdentifier(isolate->unicode_cache(), name)
1865       && name != isolate->heap()->hidden_string()) ||
1866      (map()->unused_property_fields() == 0 &&
1867       TooManyFastProperties(properties()->length(), store_mode))) {
1868    Object* obj;
1869    MaybeObject* maybe_obj =
1870        NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
1871    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1872
1873    return AddSlowProperty(name, value, attributes);
1874  }
1875
1876  // Compute the new index for new field.
1877  int index = map()->NextFreePropertyIndex();
1878
1879  // Allocate new instance descriptors with (name, index) added
1880  if (IsJSContextExtensionObject()) value_type = FORCE_TAGGED;
1881  Representation representation = value->OptimalRepresentation(value_type);
1882
1883  FieldDescriptor new_field(name, index, attributes, representation);
1884
1885  ASSERT(index < map()->inobject_properties() ||
1886         (index - map()->inobject_properties()) < properties()->length() ||
1887         map()->unused_property_fields() == 0);
1888
1889  FixedArray* values = NULL;
1890
1891  // TODO(verwaest): Merge with AddFastPropertyUsingMap.
1892  if (map()->unused_property_fields() == 0) {
1893    // Make room for the new value
1894    MaybeObject* maybe_values =
1895        properties()->CopySize(properties()->length() + kFieldsAdded);
1896    if (!maybe_values->To(&values)) return maybe_values;
1897  }
1898
1899  TransitionFlag flag = INSERT_TRANSITION;
1900
1901  Heap* heap = isolate->heap();
1902
1903  Object* storage;
1904  MaybeObject* maybe_storage =
1905      value->AllocateNewStorageFor(heap, representation);
1906  if (!maybe_storage->To(&storage)) return maybe_storage;
1907
1908  // Note that Map::CopyAddDescriptor has side-effects, the new map is already
1909  // inserted in the transition tree. No more allocations that might fail are
1910  // allowed after this point.
1911  Map* new_map;
1912  MaybeObject* maybe_new_map = map()->CopyAddDescriptor(&new_field, flag);
1913  if (!maybe_new_map->To(&new_map)) return maybe_new_map;
1914
1915  if (map()->unused_property_fields() == 0) {
1916    ASSERT(values != NULL);
1917    set_properties(values);
1918    new_map->set_unused_property_fields(kFieldsAdded - 1);
1919  } else {
1920    new_map->set_unused_property_fields(map()->unused_property_fields() - 1);
1921  }
1922
1923  set_map(new_map);
1924
1925  FastPropertyAtPut(index, storage);
1926  return value;
1927}
1928
1929
1930MaybeObject* JSObject::AddConstantProperty(
1931    Name* name,
1932    Object* constant,
1933    PropertyAttributes attributes) {
1934  // Allocate new instance descriptors with (name, constant) added
1935  ConstantDescriptor d(name, constant, attributes);
1936
1937  TransitionFlag flag =
1938      // Do not add transitions to  global objects.
1939      (IsGlobalObject() ||
1940      // Don't add transitions to special properties with non-trivial
1941      // attributes.
1942       attributes != NONE)
1943      ? OMIT_TRANSITION
1944      : INSERT_TRANSITION;
1945
1946  Map* new_map;
1947  MaybeObject* maybe_new_map = map()->CopyAddDescriptor(&d, flag);
1948  if (!maybe_new_map->To(&new_map)) return maybe_new_map;
1949
1950  set_map(new_map);
1951  return constant;
1952}
1953
1954
1955// Add property in slow mode
1956MaybeObject* JSObject::AddSlowProperty(Name* name,
1957                                       Object* value,
1958                                       PropertyAttributes attributes) {
1959  ASSERT(!HasFastProperties());
1960  NameDictionary* dict = property_dictionary();
1961  Object* store_value = value;
1962  if (IsGlobalObject()) {
1963    // In case name is an orphaned property reuse the cell.
1964    int entry = dict->FindEntry(name);
1965    if (entry != NameDictionary::kNotFound) {
1966      store_value = dict->ValueAt(entry);
1967      MaybeObject* maybe_type =
1968          PropertyCell::cast(store_value)->SetValueInferType(value);
1969      if (maybe_type->IsFailure()) return maybe_type;
1970      // Assign an enumeration index to the property and update
1971      // SetNextEnumerationIndex.
1972      int index = dict->NextEnumerationIndex();
1973      PropertyDetails details = PropertyDetails(attributes, NORMAL, index);
1974      dict->SetNextEnumerationIndex(index + 1);
1975      dict->SetEntry(entry, name, store_value, details);
1976      return value;
1977    }
1978    Heap* heap = GetHeap();
1979    { MaybeObject* maybe_store_value =
1980          heap->AllocatePropertyCell(value);
1981      if (!maybe_store_value->ToObject(&store_value)) return maybe_store_value;
1982    }
1983    MaybeObject* maybe_type =
1984        PropertyCell::cast(store_value)->SetValueInferType(value);
1985    if (maybe_type->IsFailure()) return maybe_type;
1986  }
1987  PropertyDetails details = PropertyDetails(attributes, NORMAL, 0);
1988  Object* result;
1989  { MaybeObject* maybe_result = dict->Add(name, store_value, details);
1990    if (!maybe_result->ToObject(&result)) return maybe_result;
1991  }
1992  if (dict != result) set_properties(NameDictionary::cast(result));
1993  return value;
1994}
1995
1996
1997MaybeObject* JSObject::AddProperty(Name* name,
1998                                   Object* value,
1999                                   PropertyAttributes attributes,
2000                                   StrictModeFlag strict_mode,
2001                                   JSReceiver::StoreFromKeyed store_mode,
2002                                   ExtensibilityCheck extensibility_check,
2003                                   ValueType value_type,
2004                                   StoreMode mode) {
2005  ASSERT(!IsJSGlobalProxy());
2006  Map* map_of_this = map();
2007  Heap* heap = GetHeap();
2008  Isolate* isolate = heap->isolate();
2009  MaybeObject* result;
2010  if (extensibility_check == PERFORM_EXTENSIBILITY_CHECK &&
2011      !map_of_this->is_extensible()) {
2012    if (strict_mode == kNonStrictMode) {
2013      return value;
2014    } else {
2015      Handle<Object> args[1] = {Handle<Name>(name)};
2016      return isolate->Throw(
2017          *isolate->factory()->NewTypeError("object_not_extensible",
2018                                            HandleVector(args, 1)));
2019    }
2020  }
2021
2022  if (HasFastProperties()) {
2023    // Ensure the descriptor array does not get too big.
2024    if (map_of_this->NumberOfOwnDescriptors() <
2025        DescriptorArray::kMaxNumberOfDescriptors) {
2026      // TODO(verwaest): Support other constants.
2027      // if (mode == ALLOW_AS_CONSTANT &&
2028      //     !value->IsTheHole() &&
2029      //     !value->IsConsString()) {
2030      if (value->IsJSFunction()) {
2031        result = AddConstantProperty(name, value, attributes);
2032      } else {
2033        result = AddFastProperty(
2034            name, value, attributes, store_mode, value_type);
2035      }
2036    } else {
2037      // Normalize the object to prevent very large instance descriptors.
2038      // This eliminates unwanted N^2 allocation and lookup behavior.
2039      Object* obj;
2040      MaybeObject* maybe = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
2041      if (!maybe->To(&obj)) return maybe;
2042      result = AddSlowProperty(name, value, attributes);
2043    }
2044  } else {
2045    result = AddSlowProperty(name, value, attributes);
2046  }
2047
2048  Handle<Object> hresult;
2049  if (!result->ToHandle(&hresult, isolate)) return result;
2050
2051  if (FLAG_harmony_observation && map()->is_observed()) {
2052    EnqueueChangeRecord(handle(this, isolate),
2053                        "new",
2054                        handle(name, isolate),
2055                        handle(heap->the_hole_value(), isolate));
2056  }
2057
2058  return *hresult;
2059}
2060
2061
2062void JSObject::EnqueueChangeRecord(Handle<JSObject> object,
2063                                   const char* type_str,
2064                                   Handle<Name> name,
2065                                   Handle<Object> old_value) {
2066  Isolate* isolate = object->GetIsolate();
2067  HandleScope scope(isolate);
2068  Handle<String> type = isolate->factory()->InternalizeUtf8String(type_str);
2069  if (object->IsJSGlobalObject()) {
2070    object = handle(JSGlobalObject::cast(*object)->global_receiver(), isolate);
2071  }
2072  Handle<Object> args[] = { type, object, name, old_value };
2073  bool threw;
2074  Execution::Call(Handle<JSFunction>(isolate->observers_notify_change()),
2075                  isolate->factory()->undefined_value(),
2076                  old_value->IsTheHole() ? 3 : 4, args,
2077                  &threw);
2078  ASSERT(!threw);
2079}
2080
2081
2082void JSObject::DeliverChangeRecords(Isolate* isolate) {
2083  ASSERT(isolate->observer_delivery_pending());
2084  bool threw = false;
2085  Execution::Call(
2086      isolate->observers_deliver_changes(),
2087      isolate->factory()->undefined_value(),
2088      0,
2089      NULL,
2090      &threw);
2091  ASSERT(!threw);
2092  isolate->set_observer_delivery_pending(false);
2093}
2094
2095
2096MaybeObject* JSObject::SetPropertyPostInterceptor(
2097    Name* name,
2098    Object* value,
2099    PropertyAttributes attributes,
2100    StrictModeFlag strict_mode,
2101    ExtensibilityCheck extensibility_check,
2102    StoreMode mode) {
2103  // Check local property, ignore interceptor.
2104  LookupResult result(GetIsolate());
2105  LocalLookupRealNamedProperty(name, &result);
2106  if (!result.IsFound()) map()->LookupTransition(this, name, &result);
2107  if (result.IsFound()) {
2108    // An existing property or a map transition was found. Use set property to
2109    // handle all these cases.
2110    return SetProperty(&result, name, value, attributes, strict_mode);
2111  }
2112  bool done = false;
2113  MaybeObject* result_object;
2114  result_object =
2115      SetPropertyViaPrototypes(name, value, attributes, strict_mode, &done);
2116  if (done) return result_object;
2117  // Add a new real property.
2118  return AddProperty(name, value, attributes, strict_mode,
2119                     MAY_BE_STORE_FROM_KEYED, extensibility_check,
2120                     OPTIMAL_REPRESENTATION, mode);
2121}
2122
2123
2124MaybeObject* JSObject::ReplaceSlowProperty(Name* name,
2125                                           Object* value,
2126                                           PropertyAttributes attributes) {
2127  NameDictionary* dictionary = property_dictionary();
2128  int old_index = dictionary->FindEntry(name);
2129  int new_enumeration_index = 0;  // 0 means "Use the next available index."
2130  if (old_index != -1) {
2131    // All calls to ReplaceSlowProperty have had all transitions removed.
2132    new_enumeration_index = dictionary->DetailsAt(old_index).dictionary_index();
2133  }
2134
2135  PropertyDetails new_details(attributes, NORMAL, new_enumeration_index);
2136  return SetNormalizedProperty(name, value, new_details);
2137}
2138
2139
2140MaybeObject* JSObject::ConvertTransitionToMapTransition(
2141    int transition_index,
2142    Name* name,
2143    Object* new_value,
2144    PropertyAttributes attributes) {
2145  Map* old_map = map();
2146  Map* old_target = old_map->GetTransition(transition_index);
2147  Object* result;
2148
2149  MaybeObject* maybe_result = ConvertDescriptorToField(
2150      name, new_value, attributes, OMIT_TRANSITION_KEEP_REPRESENTATIONS);
2151  if (!maybe_result->To(&result)) return maybe_result;
2152
2153  if (!HasFastProperties()) return result;
2154
2155  // This method should only be used to convert existing transitions.
2156  Map* new_map = map();
2157
2158  // TODO(verwaest): From here on we lose existing map transitions, causing
2159  // invalid back pointers. This will change once we can store multiple
2160  // transitions with the same key.
2161  bool owned_descriptors = old_map->owns_descriptors();
2162  if (owned_descriptors ||
2163      old_target->instance_descriptors() == old_map->instance_descriptors()) {
2164    // Since the conversion above generated a new fast map with an additional
2165    // property which can be shared as well, install this descriptor pointer
2166    // along the entire chain of smaller maps.
2167    Map* map;
2168    DescriptorArray* new_descriptors = new_map->instance_descriptors();
2169    DescriptorArray* old_descriptors = old_map->instance_descriptors();
2170    for (Object* current = old_map;
2171         !current->IsUndefined();
2172         current = map->GetBackPointer()) {
2173      map = Map::cast(current);
2174      if (map->instance_descriptors() != old_descriptors) break;
2175      map->SetEnumLength(Map::kInvalidEnumCache);
2176      map->set_instance_descriptors(new_descriptors);
2177    }
2178    old_map->set_owns_descriptors(false);
2179  }
2180
2181  old_target->DeprecateTransitionTree();
2182
2183  old_map->SetTransition(transition_index, new_map);
2184  new_map->SetBackPointer(old_map);
2185  return result;
2186}
2187
2188
2189MaybeObject* JSObject::ConvertDescriptorToField(Name* name,
2190                                                Object* new_value,
2191                                                PropertyAttributes attributes,
2192                                                TransitionFlag flag) {
2193  if (map()->unused_property_fields() == 0 &&
2194      TooManyFastProperties(properties()->length(), MAY_BE_STORE_FROM_KEYED)) {
2195    Object* obj;
2196    MaybeObject* maybe_obj = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
2197    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2198    return ReplaceSlowProperty(name, new_value, attributes);
2199  }
2200
2201  Representation representation = IsJSContextExtensionObject()
2202      ? Representation::Tagged() : new_value->OptimalRepresentation();
2203  int index = map()->NextFreePropertyIndex();
2204  FieldDescriptor new_field(name, index, attributes, representation);
2205
2206  // Make a new map for the object.
2207  Map* new_map;
2208  MaybeObject* maybe_new_map = map()->CopyInsertDescriptor(&new_field, flag);
2209  if (!maybe_new_map->To(&new_map)) return maybe_new_map;
2210
2211  // Make new properties array if necessary.
2212  FixedArray* new_properties = NULL;
2213  int new_unused_property_fields = map()->unused_property_fields() - 1;
2214  if (map()->unused_property_fields() == 0) {
2215    new_unused_property_fields = kFieldsAdded - 1;
2216    MaybeObject* maybe_new_properties =
2217        properties()->CopySize(properties()->length() + kFieldsAdded);
2218    if (!maybe_new_properties->To(&new_properties)) return maybe_new_properties;
2219  }
2220
2221  Heap* heap = GetHeap();
2222  Object* storage;
2223  MaybeObject* maybe_storage =
2224      new_value->AllocateNewStorageFor(heap, representation);
2225  if (!maybe_storage->To(&storage)) return maybe_storage;
2226
2227  // Update pointers to commit changes.
2228  // Object points to the new map.
2229  new_map->set_unused_property_fields(new_unused_property_fields);
2230  set_map(new_map);
2231  if (new_properties != NULL) {
2232    set_properties(new_properties);
2233  }
2234  FastPropertyAtPut(index, new_value);
2235  return new_value;
2236}
2237
2238
2239const char* Representation::Mnemonic() const {
2240  switch (kind_) {
2241    case kNone: return "v";
2242    case kTagged: return "t";
2243    case kSmi: return "s";
2244    case kDouble: return "d";
2245    case kInteger32: return "i";
2246    case kHeapObject: return "h";
2247    case kExternal: return "x";
2248    default:
2249      UNREACHABLE();
2250      return NULL;
2251  }
2252}
2253
2254
2255enum RightTrimMode { FROM_GC, FROM_MUTATOR };
2256
2257
2258static void ZapEndOfFixedArray(Address new_end, int to_trim) {
2259  // If we are doing a big trim in old space then we zap the space.
2260  Object** zap = reinterpret_cast<Object**>(new_end);
2261  zap++;  // Header of filler must be at least one word so skip that.
2262  for (int i = 1; i < to_trim; i++) {
2263    *zap++ = Smi::FromInt(0);
2264  }
2265}
2266
2267
2268template<RightTrimMode trim_mode>
2269static void RightTrimFixedArray(Heap* heap, FixedArray* elms, int to_trim) {
2270  ASSERT(elms->map() != HEAP->fixed_cow_array_map());
2271  // For now this trick is only applied to fixed arrays in new and paged space.
2272  ASSERT(!HEAP->lo_space()->Contains(elms));
2273
2274  const int len = elms->length();
2275
2276  ASSERT(to_trim < len);
2277
2278  Address new_end = elms->address() + FixedArray::SizeFor(len - to_trim);
2279
2280  if (trim_mode != FROM_GC || Heap::ShouldZapGarbage()) {
2281    ZapEndOfFixedArray(new_end, to_trim);
2282  }
2283
2284  int size_delta = to_trim * kPointerSize;
2285
2286  // Technically in new space this write might be omitted (except for
2287  // debug mode which iterates through the heap), but to play safer
2288  // we still do it.
2289  heap->CreateFillerObjectAt(new_end, size_delta);
2290
2291  elms->set_length(len - to_trim);
2292
2293  // Maintain marking consistency for IncrementalMarking.
2294  if (Marking::IsBlack(Marking::MarkBitFrom(elms))) {
2295    if (trim_mode == FROM_GC) {
2296      MemoryChunk::IncrementLiveBytesFromGC(elms->address(), -size_delta);
2297    } else {
2298      MemoryChunk::IncrementLiveBytesFromMutator(elms->address(), -size_delta);
2299    }
2300  }
2301}
2302
2303
2304bool Map::InstancesNeedRewriting(Map* target,
2305                                 int target_number_of_fields,
2306                                 int target_inobject,
2307                                 int target_unused) {
2308  // If fields were added (or removed), rewrite the instance.
2309  int number_of_fields = NumberOfFields();
2310  ASSERT(target_number_of_fields >= number_of_fields);
2311  if (target_number_of_fields != number_of_fields) return true;
2312
2313  if (FLAG_track_double_fields) {
2314    // If smi descriptors were replaced by double descriptors, rewrite.
2315    DescriptorArray* old_desc = instance_descriptors();
2316    DescriptorArray* new_desc = target->instance_descriptors();
2317    int limit = NumberOfOwnDescriptors();
2318    for (int i = 0; i < limit; i++) {
2319      if (new_desc->GetDetails(i).representation().IsDouble() &&
2320          !old_desc->GetDetails(i).representation().IsDouble()) {
2321        return true;
2322      }
2323    }
2324  }
2325
2326  // If no fields were added, and no inobject properties were removed, setting
2327  // the map is sufficient.
2328  if (target_inobject == inobject_properties()) return false;
2329  // In-object slack tracking may have reduced the object size of the new map.
2330  // In that case, succeed if all existing fields were inobject, and they still
2331  // fit within the new inobject size.
2332  ASSERT(target_inobject < inobject_properties());
2333  if (target_number_of_fields <= target_inobject) {
2334    ASSERT(target_number_of_fields + target_unused == target_inobject);
2335    return false;
2336  }
2337  // Otherwise, properties will need to be moved to the backing store.
2338  return true;
2339}
2340
2341
2342// To migrate an instance to a map:
2343// - First check whether the instance needs to be rewritten. If not, simply
2344//   change the map.
2345// - Otherwise, allocate a fixed array large enough to hold all fields, in
2346//   addition to unused space.
2347// - Copy all existing properties in, in the following order: backing store
2348//   properties, unused fields, inobject properties.
2349// - If all allocation succeeded, commit the state atomically:
2350//   * Copy inobject properties from the backing store back into the object.
2351//   * Trim the difference in instance size of the object. This also cleanly
2352//     frees inobject properties that moved to the backing store.
2353//   * If there are properties left in the backing store, trim of the space used
2354//     to temporarily store the inobject properties.
2355//   * If there are properties left in the backing store, install the backing
2356//     store.
2357MaybeObject* JSObject::MigrateToMap(Map* new_map) {
2358  Heap* heap = GetHeap();
2359  Map* old_map = map();
2360  int number_of_fields = new_map->NumberOfFields();
2361  int inobject = new_map->inobject_properties();
2362  int unused = new_map->unused_property_fields();
2363
2364  // Nothing to do if no functions were converted to fields.
2365  if (!old_map->InstancesNeedRewriting(
2366          new_map, number_of_fields, inobject, unused)) {
2367    set_map(new_map);
2368    return this;
2369  }
2370
2371  int total_size = number_of_fields + unused;
2372  int external = total_size - inobject;
2373  FixedArray* array;
2374  MaybeObject* maybe_array = heap->AllocateFixedArray(total_size);
2375  if (!maybe_array->To(&array)) return maybe_array;
2376
2377  DescriptorArray* old_descriptors = old_map->instance_descriptors();
2378  DescriptorArray* new_descriptors = new_map->instance_descriptors();
2379  int descriptors = new_map->NumberOfOwnDescriptors();
2380
2381  for (int i = 0; i < descriptors; i++) {
2382    PropertyDetails details = new_descriptors->GetDetails(i);
2383    if (details.type() != FIELD) continue;
2384    PropertyDetails old_details = old_descriptors->GetDetails(i);
2385    ASSERT(old_details.type() == CONSTANT ||
2386           old_details.type() == FIELD);
2387    Object* value = old_details.type() == CONSTANT
2388        ? old_descriptors->GetValue(i)
2389        : RawFastPropertyAt(old_descriptors->GetFieldIndex(i));
2390    if (FLAG_track_double_fields &&
2391        !old_details.representation().IsDouble() &&
2392        details.representation().IsDouble()) {
2393      if (old_details.representation().IsNone()) value = Smi::FromInt(0);
2394      // Objects must be allocated in the old object space, since the
2395      // overall number of HeapNumbers needed for the conversion might
2396      // exceed the capacity of new space, and we would fail repeatedly
2397      // trying to migrate the instance.
2398      MaybeObject* maybe_storage =
2399          value->AllocateNewStorageFor(heap, details.representation(), TENURED);
2400      if (!maybe_storage->To(&value)) return maybe_storage;
2401    }
2402    ASSERT(!(FLAG_track_double_fields &&
2403             details.representation().IsDouble() &&
2404             value->IsSmi()));
2405    int target_index = new_descriptors->GetFieldIndex(i) - inobject;
2406    if (target_index < 0) target_index += total_size;
2407    array->set(target_index, value);
2408  }
2409
2410  // From here on we cannot fail anymore.
2411
2412  // Copy (real) inobject properties. If necessary, stop at number_of_fields to
2413  // avoid overwriting |one_pointer_filler_map|.
2414  int limit = Min(inobject, number_of_fields);
2415  for (int i = 0; i < limit; i++) {
2416    FastPropertyAtPut(i, array->get(external + i));
2417  }
2418
2419  // Create filler object past the new instance size.
2420  int new_instance_size = new_map->instance_size();
2421  int instance_size_delta = old_map->instance_size() - new_instance_size;
2422  ASSERT(instance_size_delta >= 0);
2423  Address address = this->address() + new_instance_size;
2424  heap->CreateFillerObjectAt(address, instance_size_delta);
2425
2426  // If there are properties in the new backing store, trim it to the correct
2427  // size and install the backing store into the object.
2428  if (external > 0) {
2429    RightTrimFixedArray<FROM_MUTATOR>(heap, array, inobject);
2430    set_properties(array);
2431  }
2432
2433  set_map(new_map);
2434
2435  return this;
2436}
2437
2438
2439MaybeObject* JSObject::GeneralizeFieldRepresentation(
2440    int modify_index,
2441    Representation new_representation) {
2442  Map* new_map;
2443  MaybeObject* maybe_new_map =
2444      map()->GeneralizeRepresentation(modify_index, new_representation);
2445  if (!maybe_new_map->To(&new_map)) return maybe_new_map;
2446  if (map() == new_map) return this;
2447
2448  return MigrateToMap(new_map);
2449}
2450
2451
2452int Map::NumberOfFields() {
2453  DescriptorArray* descriptors = instance_descriptors();
2454  int result = 0;
2455  for (int i = 0; i < NumberOfOwnDescriptors(); i++) {
2456    if (descriptors->GetDetails(i).type() == FIELD) result++;
2457  }
2458  return result;
2459}
2460
2461
2462MaybeObject* Map::CopyGeneralizeAllRepresentations() {
2463  Map* new_map;
2464  MaybeObject* maybe_map = this->Copy();
2465  if (!maybe_map->To(&new_map)) return maybe_map;
2466
2467  new_map->instance_descriptors()->InitializeRepresentations(
2468      Representation::Tagged());
2469  if (FLAG_trace_generalization) {
2470    PrintF("failed generalization %p -> %p\n",
2471           static_cast<void*>(this), static_cast<void*>(new_map));
2472  }
2473  return new_map;
2474}
2475
2476
2477void Map::DeprecateTransitionTree() {
2478  if (!FLAG_track_fields) return;
2479  if (is_deprecated()) return;
2480  if (HasTransitionArray()) {
2481    TransitionArray* transitions = this->transitions();
2482    for (int i = 0; i < transitions->number_of_transitions(); i++) {
2483      transitions->GetTarget(i)->DeprecateTransitionTree();
2484    }
2485  }
2486  deprecate();
2487  dependent_code()->DeoptimizeDependentCodeGroup(
2488      GetIsolate(), DependentCode::kTransitionGroup);
2489  NotifyLeafMapLayoutChange();
2490}
2491
2492
2493// Invalidates a transition target at |key|, and installs |new_descriptors| over
2494// the current instance_descriptors to ensure proper sharing of descriptor
2495// arrays.
2496void Map::DeprecateTarget(Name* key, DescriptorArray* new_descriptors) {
2497  if (HasTransitionArray()) {
2498    TransitionArray* transitions = this->transitions();
2499    int transition = transitions->Search(key);
2500    if (transition != TransitionArray::kNotFound) {
2501      transitions->GetTarget(transition)->DeprecateTransitionTree();
2502    }
2503  }
2504
2505  // Don't overwrite the empty descriptor array.
2506  if (NumberOfOwnDescriptors() == 0) return;
2507
2508  DescriptorArray* to_replace = instance_descriptors();
2509  Map* current = this;
2510  while (current->instance_descriptors() == to_replace) {
2511    current->SetEnumLength(Map::kInvalidEnumCache);
2512    current->set_instance_descriptors(new_descriptors);
2513    Object* next = current->GetBackPointer();
2514    if (next->IsUndefined()) break;
2515    current = Map::cast(next);
2516  }
2517
2518  set_owns_descriptors(false);
2519}
2520
2521
2522Map* Map::FindRootMap() {
2523  Map* result = this;
2524  while (true) {
2525    Object* back = result->GetBackPointer();
2526    if (back->IsUndefined()) return result;
2527    result = Map::cast(back);
2528  }
2529}
2530
2531
2532// Returns NULL if the updated map is incompatible.
2533Map* Map::FindUpdatedMap(int verbatim,
2534                         int length,
2535                         DescriptorArray* descriptors) {
2536  // This can only be called on roots of transition trees.
2537  ASSERT(GetBackPointer()->IsUndefined());
2538
2539  Map* current = this;
2540
2541  for (int i = verbatim; i < length; i++) {
2542    if (!current->HasTransitionArray()) break;
2543    Name* name = descriptors->GetKey(i);
2544    TransitionArray* transitions = current->transitions();
2545    int transition = transitions->Search(name);
2546    if (transition == TransitionArray::kNotFound) break;
2547    current = transitions->GetTarget(transition);
2548    PropertyDetails details = descriptors->GetDetails(i);
2549    PropertyDetails target_details =
2550        current->instance_descriptors()->GetDetails(i);
2551    if (details.attributes() != target_details.attributes()) return NULL;
2552    if (details.type() == CALLBACKS) {
2553      if (target_details.type() != CALLBACKS) return NULL;
2554      if (descriptors->GetValue(i) !=
2555              current->instance_descriptors()->GetValue(i)) {
2556        return NULL;
2557      }
2558    }
2559  }
2560
2561  return current;
2562}
2563
2564
2565Map* Map::FindLastMatchMap(int verbatim,
2566                           int length,
2567                           DescriptorArray* descriptors) {
2568  // This can only be called on roots of transition trees.
2569  ASSERT(GetBackPointer()->IsUndefined());
2570
2571  Map* current = this;
2572
2573  for (int i = verbatim; i < length; i++) {
2574    if (!current->HasTransitionArray()) break;
2575    Name* name = descriptors->GetKey(i);
2576    TransitionArray* transitions = current->transitions();
2577    int transition = transitions->Search(name);
2578    if (transition == TransitionArray::kNotFound) break;
2579
2580    Map* next = transitions->GetTarget(transition);
2581    DescriptorArray* next_descriptors = next->instance_descriptors();
2582
2583    if (next_descriptors->GetValue(i) != descriptors->GetValue(i)) break;
2584
2585    PropertyDetails details = descriptors->GetDetails(i);
2586    PropertyDetails next_details = next_descriptors->GetDetails(i);
2587    if (details.type() != next_details.type()) break;
2588    if (details.attributes() != next_details.attributes()) break;
2589    if (!details.representation().Equals(next_details.representation())) break;
2590
2591    current = next;
2592  }
2593  return current;
2594}
2595
2596
2597// Generalize the representation of the descriptor at |modify_index|.
2598// This method rewrites the transition tree to reflect the new change. To avoid
2599// high degrees over polymorphism, and to stabilize quickly, on every rewrite
2600// the new type is deduced by merging the current type with any potential new
2601// (partial) version of the type in the transition tree.
2602// To do this, on each rewrite:
2603// - Search the root of the transition tree using FindRootMap.
2604// - Find |updated|, the newest matching version of this map using
2605//   FindUpdatedMap. This uses the keys in the own map's descriptor array to
2606//   walk the transition tree.
2607// - Merge/generalize the descriptor array of the current map and |updated|.
2608// - Generalize the |modify_index| descriptor using |new_representation|.
2609// - Walk the tree again starting from the root towards |updated|. Stop at
2610//   |split_map|, the first map who's descriptor array does not match the merged
2611//   descriptor array.
2612// - If |updated| == |split_map|, |updated| is in the expected state. Return it.
2613// - Otherwise, invalidate the outdated transition target from |updated|, and
2614//   replace its transition tree with a new branch for the updated descriptors.
2615MaybeObject* Map::GeneralizeRepresentation(int modify_index,
2616                                           Representation new_representation) {
2617  Map* old_map = this;
2618  DescriptorArray* old_descriptors = old_map->instance_descriptors();
2619  Representation old_representation =
2620      old_descriptors->GetDetails(modify_index).representation();
2621
2622  // It's fine to transition from None to anything but double without any
2623  // modification to the object, because the default uninitialized value for
2624  // representation None can be overwritten by both smi and tagged values.
2625  // Doubles, however, would require a box allocation.
2626  if (old_representation.IsNone() &&
2627      !new_representation.IsNone() &&
2628      !new_representation.IsDouble()) {
2629    if (FLAG_trace_generalization) {
2630      PrintF("initializing representation %i: %p -> %s\n",
2631             modify_index,
2632             static_cast<void*>(this),
2633             new_representation.Mnemonic());
2634    }
2635    old_descriptors->SetRepresentation(modify_index, new_representation);
2636    return old_map;
2637  }
2638
2639  int descriptors = old_map->NumberOfOwnDescriptors();
2640  Map* root_map = old_map->FindRootMap();
2641
2642  // Check the state of the root map.
2643  if (!old_map->EquivalentToForTransition(root_map)) {
2644    return CopyGeneralizeAllRepresentations();
2645  }
2646
2647  int verbatim = root_map->NumberOfOwnDescriptors();
2648
2649  Map* updated = root_map->FindUpdatedMap(
2650      verbatim, descriptors, old_descriptors);
2651  if (updated == NULL) return CopyGeneralizeAllRepresentations();
2652
2653  DescriptorArray* updated_descriptors = updated->instance_descriptors();
2654
2655  int valid = updated->NumberOfOwnDescriptors();
2656  if (updated_descriptors->IsMoreGeneralThan(
2657          verbatim, valid, descriptors, old_descriptors)) {
2658    Representation updated_representation =
2659        updated_descriptors->GetDetails(modify_index).representation();
2660    if (new_representation.fits_into(updated_representation)) {
2661      if (FLAG_trace_generalization &&
2662          !(modify_index == 0 && new_representation.IsNone())) {
2663        PropertyDetails old_details = old_descriptors->GetDetails(modify_index);
2664        PrintF("migrating to existing map %p(%s) -> %p(%s)\n",
2665               static_cast<void*>(this),
2666               old_details.representation().Mnemonic(),
2667               static_cast<void*>(updated),
2668               updated_representation.Mnemonic());
2669      }
2670      return updated;
2671    }
2672  }
2673
2674  DescriptorArray* new_descriptors;
2675  MaybeObject* maybe_descriptors = updated_descriptors->Merge(
2676      verbatim, valid, descriptors, old_descriptors);
2677  if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
2678
2679  old_representation =
2680      new_descriptors->GetDetails(modify_index).representation();
2681  Representation updated_representation =
2682      new_representation.generalize(old_representation);
2683  if (!updated_representation.Equals(old_representation)) {
2684    new_descriptors->SetRepresentation(modify_index, updated_representation);
2685  }
2686
2687  Map* split_map = root_map->FindLastMatchMap(
2688      verbatim, descriptors, new_descriptors);
2689
2690  int split_descriptors = split_map->NumberOfOwnDescriptors();
2691  // This is shadowed by |updated_descriptors| being more general than
2692  // |old_descriptors|.
2693  ASSERT(descriptors != split_descriptors);
2694
2695  int descriptor = split_descriptors;
2696  split_map->DeprecateTarget(
2697      old_descriptors->GetKey(descriptor), new_descriptors);
2698
2699  if (FLAG_trace_generalization &&
2700      !(modify_index == 0 && new_representation.IsNone())) {
2701    PrintF("migrating to new map %i: %p(%s) -> %p(%s) (%i steps)\n",
2702           modify_index,
2703           static_cast<void*>(this),
2704           old_representation.Mnemonic(),
2705           static_cast<void*>(new_descriptors),
2706           updated_representation.Mnemonic(),
2707           descriptors - descriptor);
2708  }
2709
2710  Map* new_map = split_map;
2711  // Add missing transitions.
2712  for (; descriptor < descriptors; descriptor++) {
2713    MaybeObject* maybe_map = new_map->CopyInstallDescriptors(
2714        descriptor, new_descriptors);
2715    if (!maybe_map->To(&new_map)) {
2716      // Create a handle for the last created map to ensure it stays alive
2717      // during GC. Its descriptor array is too large, but it will be
2718      // overwritten during retry anyway.
2719      Handle<Map>(new_map);
2720      return maybe_map;
2721    }
2722    new_map->set_migration_target(true);
2723  }
2724
2725  new_map->set_owns_descriptors(true);
2726  return new_map;
2727}
2728
2729
2730Map* Map::CurrentMapForDeprecated() {
2731  DisallowHeapAllocation no_allocation;
2732  if (!is_deprecated()) return this;
2733
2734  DescriptorArray* old_descriptors = instance_descriptors();
2735
2736  int descriptors = NumberOfOwnDescriptors();
2737  Map* root_map = FindRootMap();
2738
2739  // Check the state of the root map.
2740  if (!EquivalentToForTransition(root_map)) return NULL;
2741  int verbatim = root_map->NumberOfOwnDescriptors();
2742
2743  Map* updated = root_map->FindUpdatedMap(
2744      verbatim, descriptors, old_descriptors);
2745  if (updated == NULL) return NULL;
2746
2747  DescriptorArray* updated_descriptors = updated->instance_descriptors();
2748  int valid = updated->NumberOfOwnDescriptors();
2749  if (!updated_descriptors->IsMoreGeneralThan(
2750          verbatim, valid, descriptors, old_descriptors)) {
2751    return NULL;
2752  }
2753
2754  return updated;
2755}
2756
2757
2758MaybeObject* JSObject::SetPropertyWithInterceptor(
2759    Name* name,
2760    Object* value,
2761    PropertyAttributes attributes,
2762    StrictModeFlag strict_mode) {
2763  // TODO(rossberg): Support symbols in the API.
2764  if (name->IsSymbol()) return value;
2765  Isolate* isolate = GetIsolate();
2766  HandleScope scope(isolate);
2767  Handle<JSObject> this_handle(this);
2768  Handle<String> name_handle(String::cast(name));
2769  Handle<Object> value_handle(value, isolate);
2770  Handle<InterceptorInfo> interceptor(GetNamedInterceptor());
2771  if (!interceptor->setter()->IsUndefined()) {
2772    LOG(isolate, ApiNamedPropertyAccess("interceptor-named-set", this, name));
2773    PropertyCallbackArguments args(isolate, interceptor->data(), this, this);
2774    v8::NamedPropertySetter setter =
2775        v8::ToCData<v8::NamedPropertySetter>(interceptor->setter());
2776    Handle<Object> value_unhole(value->IsTheHole() ?
2777                                isolate->heap()->undefined_value() :
2778                                value,
2779                                isolate);
2780    v8::Handle<v8::Value> result = args.Call(setter,
2781                                             v8::Utils::ToLocal(name_handle),
2782                                             v8::Utils::ToLocal(value_unhole));
2783    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
2784    if (!result.IsEmpty()) return *value_handle;
2785  }
2786  MaybeObject* raw_result =
2787      this_handle->SetPropertyPostInterceptor(*name_handle,
2788                                              *value_handle,
2789                                              attributes,
2790                                              strict_mode,
2791                                              PERFORM_EXTENSIBILITY_CHECK);
2792  RETURN_IF_SCHEDULED_EXCEPTION(isolate);
2793  return raw_result;
2794}
2795
2796
2797Handle<Object> JSReceiver::SetProperty(Handle<JSReceiver> object,
2798                                       Handle<Name> key,
2799                                       Handle<Object> value,
2800                                       PropertyAttributes attributes,
2801                                       StrictModeFlag strict_mode) {
2802  CALL_HEAP_FUNCTION(object->GetIsolate(),
2803                     object->SetProperty(*key, *value, attributes, strict_mode),
2804                     Object);
2805}
2806
2807
2808MaybeObject* JSReceiver::SetPropertyOrFail(
2809    Handle<JSReceiver> object,
2810    Handle<Name> key,
2811    Handle<Object> value,
2812    PropertyAttributes attributes,
2813    StrictModeFlag strict_mode,
2814    JSReceiver::StoreFromKeyed store_mode) {
2815  CALL_HEAP_FUNCTION_PASS_EXCEPTION(
2816      object->GetIsolate(),
2817      object->SetProperty(*key, *value, attributes, strict_mode, store_mode));
2818}
2819
2820
2821MaybeObject* JSReceiver::SetProperty(Name* name,
2822                                     Object* value,
2823                                     PropertyAttributes attributes,
2824                                     StrictModeFlag strict_mode,
2825                                     JSReceiver::StoreFromKeyed store_mode) {
2826  LookupResult result(GetIsolate());
2827  LocalLookup(name, &result, true);
2828  if (!result.IsFound()) {
2829    map()->LookupTransition(JSObject::cast(this), name, &result);
2830  }
2831  return SetProperty(&result, name, value, attributes, strict_mode, store_mode);
2832}
2833
2834
2835MaybeObject* JSObject::SetPropertyWithCallback(Object* structure,
2836                                               Name* name,
2837                                               Object* value,
2838                                               JSObject* holder,
2839                                               StrictModeFlag strict_mode) {
2840  Isolate* isolate = GetIsolate();
2841  HandleScope scope(isolate);
2842
2843  // We should never get here to initialize a const with the hole
2844  // value since a const declaration would conflict with the setter.
2845  ASSERT(!value->IsTheHole());
2846  Handle<Object> value_handle(value, isolate);
2847
2848  // To accommodate both the old and the new api we switch on the
2849  // data structure used to store the callbacks.  Eventually foreign
2850  // callbacks should be phased out.
2851  if (structure->IsForeign()) {
2852    AccessorDescriptor* callback =
2853        reinterpret_cast<AccessorDescriptor*>(
2854            Foreign::cast(structure)->foreign_address());
2855    MaybeObject* obj = (callback->setter)(this,  value, callback->data);
2856    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
2857    if (obj->IsFailure()) return obj;
2858    return *value_handle;
2859  }
2860
2861  if (structure->IsExecutableAccessorInfo()) {
2862    // api style callbacks
2863    ExecutableAccessorInfo* data = ExecutableAccessorInfo::cast(structure);
2864    if (!data->IsCompatibleReceiver(this)) {
2865      Handle<Object> name_handle(name, isolate);
2866      Handle<Object> receiver_handle(this, isolate);
2867      Handle<Object> args[2] = { name_handle, receiver_handle };
2868      Handle<Object> error =
2869          isolate->factory()->NewTypeError("incompatible_method_receiver",
2870                                           HandleVector(args,
2871                                                        ARRAY_SIZE(args)));
2872      return isolate->Throw(*error);
2873    }
2874    // TODO(rossberg): Support symbols in the API.
2875    if (name->IsSymbol()) return value;
2876    Object* call_obj = data->setter();
2877    v8::AccessorSetter call_fun = v8::ToCData<v8::AccessorSetter>(call_obj);
2878    if (call_fun == NULL) return value;
2879    Handle<String> key(String::cast(name));
2880    LOG(isolate, ApiNamedPropertyAccess("store", this, name));
2881    PropertyCallbackArguments args(
2882        isolate, data->data(), this, JSObject::cast(holder));
2883    args.Call(call_fun,
2884              v8::Utils::ToLocal(key),
2885              v8::Utils::ToLocal(value_handle));
2886    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
2887    return *value_handle;
2888  }
2889
2890  if (structure->IsAccessorPair()) {
2891    Object* setter = AccessorPair::cast(structure)->setter();
2892    if (setter->IsSpecFunction()) {
2893      // TODO(rossberg): nicer would be to cast to some JSCallable here...
2894     return SetPropertyWithDefinedSetter(JSReceiver::cast(setter), value);
2895    } else {
2896      if (strict_mode == kNonStrictMode) {
2897        return value;
2898      }
2899      Handle<Name> key(name);
2900      Handle<Object> holder_handle(holder, isolate);
2901      Handle<Object> args[2] = { key, holder_handle };
2902      return isolate->Throw(
2903          *isolate->factory()->NewTypeError("no_setter_in_callback",
2904                                            HandleVector(args, 2)));
2905    }
2906  }
2907
2908  // TODO(dcarney): Handle correctly.
2909  if (structure->IsDeclaredAccessorInfo()) {
2910    return value;
2911  }
2912
2913  UNREACHABLE();
2914  return NULL;
2915}
2916
2917
2918MaybeObject* JSReceiver::SetPropertyWithDefinedSetter(JSReceiver* setter,
2919                                                      Object* value) {
2920  Isolate* isolate = GetIsolate();
2921  Handle<Object> value_handle(value, isolate);
2922  Handle<JSReceiver> fun(setter, isolate);
2923  Handle<JSReceiver> self(this, isolate);
2924#ifdef ENABLE_DEBUGGER_SUPPORT
2925  Debug* debug = isolate->debug();
2926  // Handle stepping into a setter if step into is active.
2927  // TODO(rossberg): should this apply to getters that are function proxies?
2928  if (debug->StepInActive() && fun->IsJSFunction()) {
2929    debug->HandleStepIn(
2930        Handle<JSFunction>::cast(fun), Handle<Object>::null(), 0, false);
2931  }
2932#endif
2933  bool has_pending_exception;
2934  Handle<Object> argv[] = { value_handle };
2935  Execution::Call(fun, self, ARRAY_SIZE(argv), argv, &has_pending_exception);
2936  // Check for pending exception and return the result.
2937  if (has_pending_exception) return Failure::Exception();
2938  return *value_handle;
2939}
2940
2941
2942MaybeObject* JSObject::SetElementWithCallbackSetterInPrototypes(
2943    uint32_t index,
2944    Object* value,
2945    bool* found,
2946    StrictModeFlag strict_mode) {
2947  Heap* heap = GetHeap();
2948  for (Object* pt = GetPrototype();
2949       pt != heap->null_value();
2950       pt = pt->GetPrototype(GetIsolate())) {
2951    if (pt->IsJSProxy()) {
2952      String* name;
2953      MaybeObject* maybe = heap->Uint32ToString(index);
2954      if (!maybe->To<String>(&name)) {
2955        *found = true;  // Force abort
2956        return maybe;
2957      }
2958      return JSProxy::cast(pt)->SetPropertyViaPrototypesWithHandler(
2959          this, name, value, NONE, strict_mode, found);
2960    }
2961    if (!JSObject::cast(pt)->HasDictionaryElements()) {
2962      continue;
2963    }
2964    SeededNumberDictionary* dictionary =
2965        JSObject::cast(pt)->element_dictionary();
2966    int entry = dictionary->FindEntry(index);
2967    if (entry != SeededNumberDictionary::kNotFound) {
2968      PropertyDetails details = dictionary->DetailsAt(entry);
2969      if (details.type() == CALLBACKS) {
2970        *found = true;
2971        return SetElementWithCallback(dictionary->ValueAt(entry),
2972                                      index,
2973                                      value,
2974                                      JSObject::cast(pt),
2975                                      strict_mode);
2976      }
2977    }
2978  }
2979  *found = false;
2980  return heap->the_hole_value();
2981}
2982
2983MaybeObject* JSObject::SetPropertyViaPrototypes(
2984    Name* name,
2985    Object* value,
2986    PropertyAttributes attributes,
2987    StrictModeFlag strict_mode,
2988    bool* done) {
2989  Heap* heap = GetHeap();
2990  Isolate* isolate = heap->isolate();
2991
2992  *done = false;
2993  // We could not find a local property so let's check whether there is an
2994  // accessor that wants to handle the property, or whether the property is
2995  // read-only on the prototype chain.
2996  LookupResult result(isolate);
2997  LookupRealNamedPropertyInPrototypes(name, &result);
2998  if (result.IsFound()) {
2999    switch (result.type()) {
3000      case NORMAL:
3001      case FIELD:
3002      case CONSTANT:
3003        *done = result.IsReadOnly();
3004        break;
3005      case INTERCEPTOR: {
3006        PropertyAttributes attr =
3007            result.holder()->GetPropertyAttributeWithInterceptor(
3008                this, name, true);
3009        *done = !!(attr & READ_ONLY);
3010        break;
3011      }
3012      case CALLBACKS: {
3013        if (!FLAG_es5_readonly && result.IsReadOnly()) break;
3014        *done = true;
3015        return SetPropertyWithCallback(result.GetCallbackObject(),
3016            name, value, result.holder(), strict_mode);
3017      }
3018      case HANDLER: {
3019        return result.proxy()->SetPropertyViaPrototypesWithHandler(
3020            this, name, value, attributes, strict_mode, done);
3021      }
3022      case TRANSITION:
3023      case NONEXISTENT:
3024        UNREACHABLE();
3025        break;
3026    }
3027  }
3028
3029  // If we get here with *done true, we have encountered a read-only property.
3030  if (!FLAG_es5_readonly) *done = false;
3031  if (*done) {
3032    if (strict_mode == kNonStrictMode) return value;
3033    Handle<Object> args[] = { Handle<Object>(name, isolate),
3034                              Handle<Object>(this, isolate)};
3035    return isolate->Throw(*isolate->factory()->NewTypeError(
3036      "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args))));
3037  }
3038  return heap->the_hole_value();
3039}
3040
3041
3042void Map::EnsureDescriptorSlack(Handle<Map> map, int slack) {
3043  Handle<DescriptorArray> descriptors(map->instance_descriptors());
3044  if (slack <= descriptors->NumberOfSlackDescriptors()) return;
3045  int number_of_descriptors = descriptors->number_of_descriptors();
3046  Isolate* isolate = map->GetIsolate();
3047  Handle<DescriptorArray> new_descriptors =
3048      isolate->factory()->NewDescriptorArray(number_of_descriptors, slack);
3049  DescriptorArray::WhitenessWitness witness(*new_descriptors);
3050
3051  for (int i = 0; i < number_of_descriptors; ++i) {
3052    new_descriptors->CopyFrom(i, *descriptors, i, witness);
3053  }
3054
3055  map->set_instance_descriptors(*new_descriptors);
3056}
3057
3058
3059void Map::AppendCallbackDescriptors(Handle<Map> map,
3060                                    Handle<Object> descriptors) {
3061  Isolate* isolate = map->GetIsolate();
3062  Handle<DescriptorArray> array(map->instance_descriptors());
3063  NeanderArray callbacks(descriptors);
3064  int nof_callbacks = callbacks.length();
3065
3066  ASSERT(array->NumberOfSlackDescriptors() >= nof_callbacks);
3067
3068  // Ensure the keys are unique names before writing them into the
3069  // instance descriptor. Since it may cause a GC, it has to be done before we
3070  // temporarily put the heap in an invalid state while appending descriptors.
3071  for (int i = 0; i < nof_callbacks; ++i) {
3072    Handle<AccessorInfo> entry(AccessorInfo::cast(callbacks.get(i)));
3073    if (!entry->name()->IsUniqueName()) {
3074      Handle<String> key =
3075          isolate->factory()->InternalizedStringFromString(
3076              Handle<String>(String::cast(entry->name())));
3077      entry->set_name(*key);
3078    }
3079  }
3080
3081  int nof = map->NumberOfOwnDescriptors();
3082
3083  // Fill in new callback descriptors.  Process the callbacks from
3084  // back to front so that the last callback with a given name takes
3085  // precedence over previously added callbacks with that name.
3086  for (int i = nof_callbacks - 1; i >= 0; i--) {
3087    AccessorInfo* entry = AccessorInfo::cast(callbacks.get(i));
3088    Name* key = Name::cast(entry->name());
3089    // Check if a descriptor with this name already exists before writing.
3090    if (array->Search(key, nof) == DescriptorArray::kNotFound) {
3091      CallbacksDescriptor desc(key, entry, entry->property_attributes());
3092      array->Append(&desc);
3093      nof += 1;
3094    }
3095  }
3096
3097  map->SetNumberOfOwnDescriptors(nof);
3098}
3099
3100
3101static bool ContainsMap(MapHandleList* maps, Handle<Map> map) {
3102  ASSERT(!map.is_null());
3103  for (int i = 0; i < maps->length(); ++i) {
3104    if (!maps->at(i).is_null() && maps->at(i).is_identical_to(map)) return true;
3105  }
3106  return false;
3107}
3108
3109
3110template <class T>
3111static Handle<T> MaybeNull(T* p) {
3112  if (p == NULL) return Handle<T>::null();
3113  return Handle<T>(p);
3114}
3115
3116
3117Handle<Map> Map::FindTransitionedMap(MapHandleList* candidates) {
3118  ElementsKind kind = elements_kind();
3119  Handle<Map> transitioned_map = Handle<Map>::null();
3120  Handle<Map> current_map(this);
3121  bool packed = IsFastPackedElementsKind(kind);
3122  if (IsTransitionableFastElementsKind(kind)) {
3123    while (CanTransitionToMoreGeneralFastElementsKind(kind, false)) {
3124      kind = GetNextMoreGeneralFastElementsKind(kind, false);
3125      Handle<Map> maybe_transitioned_map =
3126          MaybeNull(current_map->LookupElementsTransitionMap(kind));
3127      if (maybe_transitioned_map.is_null()) break;
3128      if (ContainsMap(candidates, maybe_transitioned_map) &&
3129          (packed || !IsFastPackedElementsKind(kind))) {
3130        transitioned_map = maybe_transitioned_map;
3131        if (!IsFastPackedElementsKind(kind)) packed = false;
3132      }
3133      current_map = maybe_transitioned_map;
3134    }
3135  }
3136  return transitioned_map;
3137}
3138
3139
3140static Map* FindClosestElementsTransition(Map* map, ElementsKind to_kind) {
3141  Map* current_map = map;
3142  int index = GetSequenceIndexFromFastElementsKind(map->elements_kind());
3143  int to_index = IsFastElementsKind(to_kind)
3144      ? GetSequenceIndexFromFastElementsKind(to_kind)
3145      : GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
3146
3147  ASSERT(index <= to_index);
3148
3149  for (; index < to_index; ++index) {
3150    if (!current_map->HasElementsTransition()) return current_map;
3151    current_map = current_map->elements_transition_map();
3152  }
3153  if (!IsFastElementsKind(to_kind) && current_map->HasElementsTransition()) {
3154    Map* next_map = current_map->elements_transition_map();
3155    if (next_map->elements_kind() == to_kind) return next_map;
3156  }
3157  ASSERT(IsFastElementsKind(to_kind)
3158         ? current_map->elements_kind() == to_kind
3159         : current_map->elements_kind() == TERMINAL_FAST_ELEMENTS_KIND);
3160  return current_map;
3161}
3162
3163
3164Map* Map::LookupElementsTransitionMap(ElementsKind to_kind) {
3165  Map* to_map = FindClosestElementsTransition(this, to_kind);
3166  if (to_map->elements_kind() == to_kind) return to_map;
3167  return NULL;
3168}
3169
3170
3171bool Map::IsMapInArrayPrototypeChain() {
3172  Isolate* isolate = GetIsolate();
3173  if (isolate->initial_array_prototype()->map() == this) {
3174    return true;
3175  }
3176
3177  if (isolate->initial_object_prototype()->map() == this) {
3178    return true;
3179  }
3180
3181  return false;
3182}
3183
3184
3185static MaybeObject* AddMissingElementsTransitions(Map* map,
3186                                                  ElementsKind to_kind) {
3187  ASSERT(IsFastElementsKind(map->elements_kind()));
3188  int index = GetSequenceIndexFromFastElementsKind(map->elements_kind());
3189  int to_index = IsFastElementsKind(to_kind)
3190      ? GetSequenceIndexFromFastElementsKind(to_kind)
3191      : GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
3192
3193  ASSERT(index <= to_index);
3194
3195  Map* current_map = map;
3196
3197  for (; index < to_index; ++index) {
3198    ElementsKind next_kind = GetFastElementsKindFromSequenceIndex(index + 1);
3199    MaybeObject* maybe_next_map =
3200        current_map->CopyAsElementsKind(next_kind, INSERT_TRANSITION);
3201    if (!maybe_next_map->To(&current_map)) return maybe_next_map;
3202  }
3203
3204  // In case we are exiting the fast elements kind system, just add the map in
3205  // the end.
3206  if (!IsFastElementsKind(to_kind)) {
3207    MaybeObject* maybe_next_map =
3208        current_map->CopyAsElementsKind(to_kind, INSERT_TRANSITION);
3209    if (!maybe_next_map->To(&current_map)) return maybe_next_map;
3210  }
3211
3212  ASSERT(current_map->elements_kind() == to_kind);
3213  return current_map;
3214}
3215
3216
3217Handle<Map> JSObject::GetElementsTransitionMap(Handle<JSObject> object,
3218                                               ElementsKind to_kind) {
3219  Isolate* isolate = object->GetIsolate();
3220  CALL_HEAP_FUNCTION(isolate,
3221                     object->GetElementsTransitionMap(isolate, to_kind),
3222                     Map);
3223}
3224
3225
3226MaybeObject* JSObject::GetElementsTransitionMapSlow(ElementsKind to_kind) {
3227  Map* start_map = map();
3228  ElementsKind from_kind = start_map->elements_kind();
3229
3230  if (from_kind == to_kind) {
3231    return start_map;
3232  }
3233
3234  bool allow_store_transition =
3235      // Only remember the map transition if there is not an already existing
3236      // non-matching element transition.
3237      !start_map->IsUndefined() && !start_map->is_shared() &&
3238      IsFastElementsKind(from_kind);
3239
3240  // Only store fast element maps in ascending generality.
3241  if (IsFastElementsKind(to_kind)) {
3242    allow_store_transition &=
3243        IsTransitionableFastElementsKind(from_kind) &&
3244        IsMoreGeneralElementsKindTransition(from_kind, to_kind);
3245  }
3246
3247  if (!allow_store_transition) {
3248    return start_map->CopyAsElementsKind(to_kind, OMIT_TRANSITION);
3249  }
3250
3251  return start_map->AsElementsKind(to_kind);
3252}
3253
3254
3255MaybeObject* Map::AsElementsKind(ElementsKind kind) {
3256  Map* closest_map = FindClosestElementsTransition(this, kind);
3257
3258  if (closest_map->elements_kind() == kind) {
3259    return closest_map;
3260  }
3261
3262  return AddMissingElementsTransitions(closest_map, kind);
3263}
3264
3265
3266void JSObject::LocalLookupRealNamedProperty(Name* name, LookupResult* result) {
3267  if (IsJSGlobalProxy()) {
3268    Object* proto = GetPrototype();
3269    if (proto->IsNull()) return result->NotFound();
3270    ASSERT(proto->IsJSGlobalObject());
3271    return JSObject::cast(proto)->LocalLookupRealNamedProperty(name, result);
3272  }
3273
3274  if (HasFastProperties()) {
3275    map()->LookupDescriptor(this, name, result);
3276    // A property or a map transition was found. We return all of these result
3277    // types because LocalLookupRealNamedProperty is used when setting
3278    // properties where map transitions are handled.
3279    ASSERT(!result->IsFound() ||
3280           (result->holder() == this && result->IsFastPropertyType()));
3281    // Disallow caching for uninitialized constants. These can only
3282    // occur as fields.
3283    if (result->IsField() &&
3284        result->IsReadOnly() &&
3285        RawFastPropertyAt(result->GetFieldIndex().field_index())->IsTheHole()) {
3286      result->DisallowCaching();
3287    }
3288    return;
3289  }
3290
3291  int entry = property_dictionary()->FindEntry(name);
3292  if (entry != NameDictionary::kNotFound) {
3293    Object* value = property_dictionary()->ValueAt(entry);
3294    if (IsGlobalObject()) {
3295      PropertyDetails d = property_dictionary()->DetailsAt(entry);
3296      if (d.IsDeleted()) {
3297        result->NotFound();
3298        return;
3299      }
3300      value = PropertyCell::cast(value)->value();
3301    }
3302    // Make sure to disallow caching for uninitialized constants
3303    // found in the dictionary-mode objects.
3304    if (value->IsTheHole()) result->DisallowCaching();
3305    result->DictionaryResult(this, entry);
3306    return;
3307  }
3308
3309  result->NotFound();
3310}
3311
3312
3313void JSObject::LookupRealNamedProperty(Name* name, LookupResult* result) {
3314  LocalLookupRealNamedProperty(name, result);
3315  if (result->IsFound()) return;
3316
3317  LookupRealNamedPropertyInPrototypes(name, result);
3318}
3319
3320
3321void JSObject::LookupRealNamedPropertyInPrototypes(Name* name,
3322                                                   LookupResult* result) {
3323  Isolate* isolate = GetIsolate();
3324  Heap* heap = isolate->heap();
3325  for (Object* pt = GetPrototype();
3326       pt != heap->null_value();
3327       pt = pt->GetPrototype(isolate)) {
3328    if (pt->IsJSProxy()) {
3329      return result->HandlerResult(JSProxy::cast(pt));
3330    }
3331    JSObject::cast(pt)->LocalLookupRealNamedProperty(name, result);
3332    ASSERT(!(result->IsFound() && result->type() == INTERCEPTOR));
3333    if (result->IsFound()) return;
3334  }
3335  result->NotFound();
3336}
3337
3338
3339// We only need to deal with CALLBACKS and INTERCEPTORS
3340MaybeObject* JSObject::SetPropertyWithFailedAccessCheck(
3341    LookupResult* result,
3342    Name* name,
3343    Object* value,
3344    bool check_prototype,
3345    StrictModeFlag strict_mode) {
3346  if (check_prototype && !result->IsProperty()) {
3347    LookupRealNamedPropertyInPrototypes(name, result);
3348  }
3349
3350  if (result->IsProperty()) {
3351    if (!result->IsReadOnly()) {
3352      switch (result->type()) {
3353        case CALLBACKS: {
3354          Object* obj = result->GetCallbackObject();
3355          if (obj->IsAccessorInfo()) {
3356            AccessorInfo* info = AccessorInfo::cast(obj);
3357            if (info->all_can_write()) {
3358              return SetPropertyWithCallback(result->GetCallbackObject(),
3359                                             name,
3360                                             value,
3361                                             result->holder(),
3362                                             strict_mode);
3363            }
3364          }
3365          break;
3366        }
3367        case INTERCEPTOR: {
3368          // Try lookup real named properties. Note that only property can be
3369          // set is callbacks marked as ALL_CAN_WRITE on the prototype chain.
3370          LookupResult r(GetIsolate());
3371          LookupRealNamedProperty(name, &r);
3372          if (r.IsProperty()) {
3373            return SetPropertyWithFailedAccessCheck(&r,
3374                                                    name,
3375                                                    value,
3376                                                    check_prototype,
3377                                                    strict_mode);
3378          }
3379          break;
3380        }
3381        default: {
3382          break;
3383        }
3384      }
3385    }
3386  }
3387
3388  Isolate* isolate = GetIsolate();
3389  HandleScope scope(isolate);
3390  Handle<Object> value_handle(value, isolate);
3391  isolate->ReportFailedAccessCheck(this, v8::ACCESS_SET);
3392  RETURN_IF_SCHEDULED_EXCEPTION(isolate);
3393  return *value_handle;
3394}
3395
3396
3397MaybeObject* JSReceiver::SetProperty(LookupResult* result,
3398                                     Name* key,
3399                                     Object* value,
3400                                     PropertyAttributes attributes,
3401                                     StrictModeFlag strict_mode,
3402                                     JSReceiver::StoreFromKeyed store_mode) {
3403  if (result->IsHandler()) {
3404    return result->proxy()->SetPropertyWithHandler(
3405        this, key, value, attributes, strict_mode);
3406  } else {
3407    return JSObject::cast(this)->SetPropertyForResult(
3408        result, key, value, attributes, strict_mode, store_mode);
3409  }
3410}
3411
3412
3413bool JSProxy::HasPropertyWithHandler(Name* name_raw) {
3414  Isolate* isolate = GetIsolate();
3415  HandleScope scope(isolate);
3416  Handle<Object> receiver(this, isolate);
3417  Handle<Object> name(name_raw, isolate);
3418
3419  // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3420  if (name->IsSymbol()) return false;
3421
3422  Handle<Object> args[] = { name };
3423  Handle<Object> result = CallTrap(
3424    "has", isolate->derived_has_trap(), ARRAY_SIZE(args), args);
3425  if (isolate->has_pending_exception()) return false;
3426
3427  return result->BooleanValue();
3428}
3429
3430
3431MUST_USE_RESULT MaybeObject* JSProxy::SetPropertyWithHandler(
3432    JSReceiver* receiver_raw,
3433    Name* name_raw,
3434    Object* value_raw,
3435    PropertyAttributes attributes,
3436    StrictModeFlag strict_mode) {
3437  Isolate* isolate = GetIsolate();
3438  HandleScope scope(isolate);
3439  Handle<JSReceiver> receiver(receiver_raw);
3440  Handle<Object> name(name_raw, isolate);
3441  Handle<Object> value(value_raw, isolate);
3442
3443  // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3444  if (name->IsSymbol()) return *value;
3445
3446  Handle<Object> args[] = { receiver, name, value };
3447  CallTrap("set", isolate->derived_set_trap(), ARRAY_SIZE(args), args);
3448  if (isolate->has_pending_exception()) return Failure::Exception();
3449
3450  return *value;
3451}
3452
3453
3454MUST_USE_RESULT MaybeObject* JSProxy::SetPropertyViaPrototypesWithHandler(
3455    JSReceiver* receiver_raw,
3456    Name* name_raw,
3457    Object* value_raw,
3458    PropertyAttributes attributes,
3459    StrictModeFlag strict_mode,
3460    bool* done) {
3461  Isolate* isolate = GetIsolate();
3462  Handle<JSProxy> proxy(this);
3463  Handle<JSReceiver> receiver(receiver_raw);
3464  Handle<Name> name(name_raw);
3465  Handle<Object> value(value_raw, isolate);
3466  Handle<Object> handler(this->handler(), isolate);  // Trap might morph proxy.
3467
3468  // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3469  if (name->IsSymbol()) {
3470    *done = false;
3471    return isolate->heap()->the_hole_value();
3472  }
3473
3474  *done = true;  // except where redefined...
3475  Handle<Object> args[] = { name };
3476  Handle<Object> result = proxy->CallTrap(
3477      "getPropertyDescriptor", Handle<Object>(), ARRAY_SIZE(args), args);
3478  if (isolate->has_pending_exception()) return Failure::Exception();
3479
3480  if (result->IsUndefined()) {
3481    *done = false;
3482    return isolate->heap()->the_hole_value();
3483  }
3484
3485  // Emulate [[GetProperty]] semantics for proxies.
3486  bool has_pending_exception;
3487  Handle<Object> argv[] = { result };
3488  Handle<Object> desc =
3489      Execution::Call(isolate->to_complete_property_descriptor(), result,
3490                      ARRAY_SIZE(argv), argv, &has_pending_exception);
3491  if (has_pending_exception) return Failure::Exception();
3492
3493  // [[GetProperty]] requires to check that all properties are configurable.
3494  Handle<String> configurable_name =
3495      isolate->factory()->InternalizeOneByteString(
3496          STATIC_ASCII_VECTOR("configurable_"));
3497  Handle<Object> configurable(
3498      v8::internal::GetProperty(isolate, desc, configurable_name));
3499  ASSERT(!isolate->has_pending_exception());
3500  ASSERT(configurable->IsTrue() || configurable->IsFalse());
3501  if (configurable->IsFalse()) {
3502    Handle<String> trap =
3503        isolate->factory()->InternalizeOneByteString(
3504            STATIC_ASCII_VECTOR("getPropertyDescriptor"));
3505    Handle<Object> args[] = { handler, trap, name };
3506    Handle<Object> error = isolate->factory()->NewTypeError(
3507        "proxy_prop_not_configurable", HandleVector(args, ARRAY_SIZE(args)));
3508    return isolate->Throw(*error);
3509  }
3510  ASSERT(configurable->IsTrue());
3511
3512  // Check for DataDescriptor.
3513  Handle<String> hasWritable_name =
3514      isolate->factory()->InternalizeOneByteString(
3515          STATIC_ASCII_VECTOR("hasWritable_"));
3516  Handle<Object> hasWritable(
3517      v8::internal::GetProperty(isolate, desc, hasWritable_name));
3518  ASSERT(!isolate->has_pending_exception());
3519  ASSERT(hasWritable->IsTrue() || hasWritable->IsFalse());
3520  if (hasWritable->IsTrue()) {
3521    Handle<String> writable_name =
3522        isolate->factory()->InternalizeOneByteString(
3523            STATIC_ASCII_VECTOR("writable_"));
3524    Handle<Object> writable(
3525        v8::internal::GetProperty(isolate, desc, writable_name));
3526    ASSERT(!isolate->has_pending_exception());
3527    ASSERT(writable->IsTrue() || writable->IsFalse());
3528    *done = writable->IsFalse();
3529    if (!*done) return GetHeap()->the_hole_value();
3530    if (strict_mode == kNonStrictMode) return *value;
3531    Handle<Object> args[] = { name, receiver };
3532    Handle<Object> error = isolate->factory()->NewTypeError(
3533        "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args)));
3534    return isolate->Throw(*error);
3535  }
3536
3537  // We have an AccessorDescriptor.
3538  Handle<String> set_name = isolate->factory()->InternalizeOneByteString(
3539      STATIC_ASCII_VECTOR("set_"));
3540  Handle<Object> setter(v8::internal::GetProperty(isolate, desc, set_name));
3541  ASSERT(!isolate->has_pending_exception());
3542  if (!setter->IsUndefined()) {
3543    // TODO(rossberg): nicer would be to cast to some JSCallable here...
3544    return receiver->SetPropertyWithDefinedSetter(
3545        JSReceiver::cast(*setter), *value);
3546  }
3547
3548  if (strict_mode == kNonStrictMode) return *value;
3549  Handle<Object> args2[] = { name, proxy };
3550  Handle<Object> error = isolate->factory()->NewTypeError(
3551      "no_setter_in_callback", HandleVector(args2, ARRAY_SIZE(args2)));
3552  return isolate->Throw(*error);
3553}
3554
3555
3556Handle<Object> JSProxy::DeletePropertyWithHandler(
3557    Handle<JSProxy> object, Handle<Name> name, DeleteMode mode) {
3558  Isolate* isolate = object->GetIsolate();
3559
3560  // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3561  if (name->IsSymbol()) return isolate->factory()->false_value();
3562
3563  Handle<Object> args[] = { name };
3564  Handle<Object> result = object->CallTrap(
3565      "delete", Handle<Object>(), ARRAY_SIZE(args), args);
3566  if (isolate->has_pending_exception()) return Handle<Object>();
3567
3568  bool result_bool = result->BooleanValue();
3569  if (mode == STRICT_DELETION && !result_bool) {
3570    Handle<Object> handler(object->handler(), isolate);
3571    Handle<String> trap_name = isolate->factory()->InternalizeOneByteString(
3572        STATIC_ASCII_VECTOR("delete"));
3573    Handle<Object> args[] = { handler, trap_name };
3574    Handle<Object> error = isolate->factory()->NewTypeError(
3575        "handler_failed", HandleVector(args, ARRAY_SIZE(args)));
3576    isolate->Throw(*error);
3577    return Handle<Object>();
3578  }
3579  return isolate->factory()->ToBoolean(result_bool);
3580}
3581
3582
3583Handle<Object> JSProxy::DeleteElementWithHandler(
3584    Handle<JSProxy> object, uint32_t index, DeleteMode mode) {
3585  Isolate* isolate = object->GetIsolate();
3586  Handle<String> name = isolate->factory()->Uint32ToString(index);
3587  return JSProxy::DeletePropertyWithHandler(object, name, mode);
3588}
3589
3590
3591MUST_USE_RESULT PropertyAttributes JSProxy::GetPropertyAttributeWithHandler(
3592    JSReceiver* receiver_raw,
3593    Name* name_raw) {
3594  Isolate* isolate = GetIsolate();
3595  HandleScope scope(isolate);
3596  Handle<JSProxy> proxy(this);
3597  Handle<Object> handler(this->handler(), isolate);  // Trap might morph proxy.
3598  Handle<JSReceiver> receiver(receiver_raw);
3599  Handle<Object> name(name_raw, isolate);
3600
3601  // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3602  if (name->IsSymbol()) return ABSENT;
3603
3604  Handle<Object> args[] = { name };
3605  Handle<Object> result = CallTrap(
3606    "getPropertyDescriptor", Handle<Object>(), ARRAY_SIZE(args), args);
3607  if (isolate->has_pending_exception()) return NONE;
3608
3609  if (result->IsUndefined()) return ABSENT;
3610
3611  bool has_pending_exception;
3612  Handle<Object> argv[] = { result };
3613  Handle<Object> desc =
3614      Execution::Call(isolate->to_complete_property_descriptor(), result,
3615                      ARRAY_SIZE(argv), argv, &has_pending_exception);
3616  if (has_pending_exception) return NONE;
3617
3618  // Convert result to PropertyAttributes.
3619  Handle<String> enum_n = isolate->factory()->InternalizeOneByteString(
3620      STATIC_ASCII_VECTOR("enumerable_"));
3621  Handle<Object> enumerable(v8::internal::GetProperty(isolate, desc, enum_n));
3622  if (isolate->has_pending_exception()) return NONE;
3623  Handle<String> conf_n = isolate->factory()->InternalizeOneByteString(
3624      STATIC_ASCII_VECTOR("configurable_"));
3625  Handle<Object> configurable(v8::internal::GetProperty(isolate, desc, conf_n));
3626  if (isolate->has_pending_exception()) return NONE;
3627  Handle<String> writ_n = isolate->factory()->InternalizeOneByteString(
3628      STATIC_ASCII_VECTOR("writable_"));
3629  Handle<Object> writable(v8::internal::GetProperty(isolate, desc, writ_n));
3630  if (isolate->has_pending_exception()) return NONE;
3631  if (!writable->BooleanValue()) {
3632    Handle<String> set_n = isolate->factory()->InternalizeOneByteString(
3633        STATIC_ASCII_VECTOR("set_"));
3634    Handle<Object> setter(v8::internal::GetProperty(isolate, desc, set_n));
3635    if (isolate->has_pending_exception()) return NONE;
3636    writable = isolate->factory()->ToBoolean(!setter->IsUndefined());
3637  }
3638
3639  if (configurable->IsFalse()) {
3640    Handle<String> trap = isolate->factory()->InternalizeOneByteString(
3641        STATIC_ASCII_VECTOR("getPropertyDescriptor"));
3642    Handle<Object> args[] = { handler, trap, name };
3643    Handle<Object> error = isolate->factory()->NewTypeError(
3644        "proxy_prop_not_configurable", HandleVector(args, ARRAY_SIZE(args)));
3645    isolate->Throw(*error);
3646    return NONE;
3647  }
3648
3649  int attributes = NONE;
3650  if (!enumerable->BooleanValue()) attributes |= DONT_ENUM;
3651  if (!configurable->BooleanValue()) attributes |= DONT_DELETE;
3652  if (!writable->BooleanValue()) attributes |= READ_ONLY;
3653  return static_cast<PropertyAttributes>(attributes);
3654}
3655
3656
3657MUST_USE_RESULT PropertyAttributes JSProxy::GetElementAttributeWithHandler(
3658    JSReceiver* receiver_raw,
3659    uint32_t index) {
3660  Isolate* isolate = GetIsolate();
3661  HandleScope scope(isolate);
3662  Handle<JSProxy> proxy(this);
3663  Handle<JSReceiver> receiver(receiver_raw);
3664  Handle<String> name = isolate->factory()->Uint32ToString(index);
3665  return proxy->GetPropertyAttributeWithHandler(*receiver, *name);
3666}
3667
3668
3669void JSProxy::Fix() {
3670  Isolate* isolate = GetIsolate();
3671  HandleScope scope(isolate);
3672  Handle<JSProxy> self(this);
3673
3674  // Save identity hash.
3675  MaybeObject* maybe_hash = GetIdentityHash(OMIT_CREATION);
3676
3677  if (IsJSFunctionProxy()) {
3678    isolate->factory()->BecomeJSFunction(self);
3679    // Code will be set on the JavaScript side.
3680  } else {
3681    isolate->factory()->BecomeJSObject(self);
3682  }
3683  ASSERT(self->IsJSObject());
3684
3685  // Inherit identity, if it was present.
3686  Object* hash;
3687  if (maybe_hash->To<Object>(&hash) && hash->IsSmi()) {
3688    Handle<JSObject> new_self(JSObject::cast(*self));
3689    isolate->factory()->SetIdentityHash(new_self, Smi::cast(hash));
3690  }
3691}
3692
3693
3694MUST_USE_RESULT Handle<Object> JSProxy::CallTrap(const char* name,
3695                                                 Handle<Object> derived,
3696                                                 int argc,
3697                                                 Handle<Object> argv[]) {
3698  Isolate* isolate = GetIsolate();
3699  Handle<Object> handler(this->handler(), isolate);
3700
3701  Handle<String> trap_name = isolate->factory()->InternalizeUtf8String(name);
3702  Handle<Object> trap(v8::internal::GetProperty(isolate, handler, trap_name));
3703  if (isolate->has_pending_exception()) return trap;
3704
3705  if (trap->IsUndefined()) {
3706    if (derived.is_null()) {
3707      Handle<Object> args[] = { handler, trap_name };
3708      Handle<Object> error = isolate->factory()->NewTypeError(
3709        "handler_trap_missing", HandleVector(args, ARRAY_SIZE(args)));
3710      isolate->Throw(*error);
3711      return Handle<Object>();
3712    }
3713    trap = Handle<Object>(derived);
3714  }
3715
3716  bool threw;
3717  return Execution::Call(trap, handler, argc, argv, &threw);
3718}
3719
3720
3721void JSObject::AllocateStorageForMap(Handle<JSObject> object, Handle<Map> map) {
3722  CALL_HEAP_FUNCTION_VOID(
3723      object->GetIsolate(),
3724      object->AllocateStorageForMap(*map));
3725}
3726
3727
3728void JSObject::MigrateInstance(Handle<JSObject> object) {
3729  if (FLAG_trace_migration) {
3730    PrintF("migrating instance %p (%p)\n",
3731           static_cast<void*>(*object),
3732           static_cast<void*>(object->map()));
3733  }
3734  CALL_HEAP_FUNCTION_VOID(
3735      object->GetIsolate(),
3736      object->MigrateInstance());
3737}
3738
3739
3740Handle<Object> JSObject::TryMigrateInstance(Handle<JSObject> object) {
3741  if (FLAG_trace_migration) {
3742    PrintF("migrating instance (no new maps) %p (%p)\n",
3743           static_cast<void*>(*object),
3744           static_cast<void*>(object->map()));
3745  }
3746  CALL_HEAP_FUNCTION(
3747      object->GetIsolate(),
3748      object->MigrateInstance(),
3749      Object);
3750}
3751
3752
3753Handle<Map> Map::GeneralizeRepresentation(Handle<Map> map,
3754                                          int modify_index,
3755                                          Representation representation) {
3756  CALL_HEAP_FUNCTION(
3757      map->GetIsolate(),
3758      map->GeneralizeRepresentation(modify_index, representation),
3759      Map);
3760}
3761
3762
3763MaybeObject* JSObject::SetPropertyForResult(LookupResult* lookup,
3764                                            Name* name_raw,
3765                                            Object* value_raw,
3766                                            PropertyAttributes attributes,
3767                                            StrictModeFlag strict_mode,
3768                                            StoreFromKeyed store_mode) {
3769  Heap* heap = GetHeap();
3770  Isolate* isolate = heap->isolate();
3771  // Make sure that the top context does not change when doing callbacks or
3772  // interceptor calls.
3773  AssertNoContextChange ncc;
3774
3775  // Optimization for 2-byte strings often used as keys in a decompression
3776  // dictionary.  We internalize these short keys to avoid constantly
3777  // reallocating them.
3778  if (name_raw->IsString() && !name_raw->IsInternalizedString() &&
3779      String::cast(name_raw)->length() <= 2) {
3780    Object* internalized_version;
3781    { MaybeObject* maybe_string_version =
3782        heap->InternalizeString(String::cast(name_raw));
3783      if (maybe_string_version->ToObject(&internalized_version)) {
3784        name_raw = String::cast(internalized_version);
3785      }
3786    }
3787  }
3788
3789  // Check access rights if needed.
3790  if (IsAccessCheckNeeded()) {
3791    if (!isolate->MayNamedAccess(this, name_raw, v8::ACCESS_SET)) {
3792      return SetPropertyWithFailedAccessCheck(
3793          lookup, name_raw, value_raw, true, strict_mode);
3794    }
3795  }
3796
3797  if (IsJSGlobalProxy()) {
3798    Object* proto = GetPrototype();
3799    if (proto->IsNull()) return value_raw;
3800    ASSERT(proto->IsJSGlobalObject());
3801    return JSObject::cast(proto)->SetPropertyForResult(
3802        lookup, name_raw, value_raw, attributes, strict_mode, store_mode);
3803  }
3804
3805  ASSERT(!lookup->IsFound() || lookup->holder() == this ||
3806         lookup->holder()->map()->is_hidden_prototype());
3807
3808  // From this point on everything needs to be handlified, because
3809  // SetPropertyViaPrototypes might call back into JavaScript.
3810  HandleScope scope(isolate);
3811  Handle<JSObject> self(this);
3812  Handle<Name> name(name_raw);
3813  Handle<Object> value(value_raw, isolate);
3814
3815  if (!lookup->IsProperty() && !self->IsJSContextExtensionObject()) {
3816    bool done = false;
3817    MaybeObject* result_object = self->SetPropertyViaPrototypes(
3818        *name, *value, attributes, strict_mode, &done);
3819    if (done) return result_object;
3820  }
3821
3822  if (!lookup->IsFound()) {
3823    // Neither properties nor transitions found.
3824    return self->AddProperty(
3825        *name, *value, attributes, strict_mode, store_mode);
3826  }
3827
3828  if (lookup->IsProperty() && lookup->IsReadOnly()) {
3829    if (strict_mode == kStrictMode) {
3830      Handle<Object> args[] = { name, self };
3831      return isolate->Throw(*isolate->factory()->NewTypeError(
3832          "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args))));
3833    } else {
3834      return *value;
3835    }
3836  }
3837
3838  Handle<Object> old_value(heap->the_hole_value(), isolate);
3839  if (FLAG_harmony_observation &&
3840      map()->is_observed() && lookup->IsDataProperty()) {
3841    old_value = Object::GetProperty(self, name);
3842  }
3843
3844  // This is a real property that is not read-only, or it is a
3845  // transition or null descriptor and there are no setters in the prototypes.
3846  MaybeObject* result = *value;
3847  switch (lookup->type()) {
3848    case NORMAL:
3849      result = lookup->holder()->SetNormalizedProperty(lookup, *value);
3850      break;
3851    case FIELD: {
3852      Representation representation = lookup->representation();
3853      if (!value->FitsRepresentation(representation)) {
3854        MaybeObject* maybe_failure =
3855            lookup->holder()->GeneralizeFieldRepresentation(
3856                lookup->GetDescriptorIndex(), value->OptimalRepresentation());
3857        if (maybe_failure->IsFailure()) return maybe_failure;
3858        DescriptorArray* desc = lookup->holder()->map()->instance_descriptors();
3859        int descriptor = lookup->GetDescriptorIndex();
3860        representation = desc->GetDetails(descriptor).representation();
3861      }
3862      if (FLAG_track_double_fields && representation.IsDouble()) {
3863        HeapNumber* storage =
3864            HeapNumber::cast(lookup->holder()->RawFastPropertyAt(
3865                lookup->GetFieldIndex().field_index()));
3866        storage->set_value(value->Number());
3867        result = *value;
3868        break;
3869      }
3870      lookup->holder()->FastPropertyAtPut(
3871          lookup->GetFieldIndex().field_index(), *value);
3872      result = *value;
3873      break;
3874    }
3875    case CONSTANT:
3876      // Only replace the constant if necessary.
3877      if (*value == lookup->GetConstant()) return *value;
3878      // Preserve the attributes of this existing property.
3879      attributes = lookup->GetAttributes();
3880      result = lookup->holder()->ConvertDescriptorToField(
3881          *name, *value, attributes);
3882      break;
3883    case CALLBACKS: {
3884      Object* callback_object = lookup->GetCallbackObject();
3885      return self->SetPropertyWithCallback(
3886          callback_object, *name, *value, lookup->holder(), strict_mode);
3887    }
3888    case INTERCEPTOR:
3889      result = lookup->holder()->SetPropertyWithInterceptor(
3890          *name, *value, attributes, strict_mode);
3891      break;
3892    case TRANSITION: {
3893      Map* transition_map = lookup->GetTransitionTarget();
3894      int descriptor = transition_map->LastAdded();
3895
3896      DescriptorArray* descriptors = transition_map->instance_descriptors();
3897      PropertyDetails details = descriptors->GetDetails(descriptor);
3898
3899      if (details.type() == FIELD) {
3900        if (attributes == details.attributes()) {
3901          Representation representation = details.representation();
3902          if (!value->FitsRepresentation(representation)) {
3903            MaybeObject* maybe_map = transition_map->GeneralizeRepresentation(
3904                descriptor, value->OptimalRepresentation());
3905            if (!maybe_map->To(&transition_map)) return maybe_map;
3906            Object* back = transition_map->GetBackPointer();
3907            if (back->IsMap()) {
3908              MaybeObject* maybe_failure =
3909                  lookup->holder()->MigrateToMap(Map::cast(back));
3910              if (maybe_failure->IsFailure()) return maybe_failure;
3911            }
3912            descriptors = transition_map->instance_descriptors();
3913            representation =
3914                descriptors->GetDetails(descriptor).representation();
3915          }
3916          int field_index = descriptors->GetFieldIndex(descriptor);
3917          result = lookup->holder()->AddFastPropertyUsingMap(
3918              transition_map, *name, *value, field_index, representation);
3919        } else {
3920          result = lookup->holder()->ConvertDescriptorToField(
3921              *name, *value, attributes);
3922        }
3923      } else if (details.type() == CALLBACKS) {
3924        result = lookup->holder()->ConvertDescriptorToField(
3925            *name, *value, attributes);
3926      } else {
3927        ASSERT(details.type() == CONSTANT);
3928
3929        Object* constant = descriptors->GetValue(descriptor);
3930        if (constant == *value) {
3931          // If the same constant function is being added we can simply
3932          // transition to the target map.
3933          lookup->holder()->set_map(transition_map);
3934          result = constant;
3935        } else {
3936          // Otherwise, replace with a map transition to a new map with a FIELD,
3937          // even if the value is a constant function.
3938          result = lookup->holder()->ConvertTransitionToMapTransition(
3939              lookup->GetTransitionIndex(), *name, *value, attributes);
3940        }
3941      }
3942      break;
3943    }
3944    case HANDLER:
3945    case NONEXISTENT:
3946      UNREACHABLE();
3947  }
3948
3949  Handle<Object> hresult;
3950  if (!result->ToHandle(&hresult, isolate)) return result;
3951
3952  if (FLAG_harmony_observation && self->map()->is_observed()) {
3953    if (lookup->IsTransition()) {
3954      EnqueueChangeRecord(self, "new", name, old_value);
3955    } else {
3956      LookupResult new_lookup(isolate);
3957      self->LocalLookup(*name, &new_lookup, true);
3958      if (new_lookup.IsDataProperty()) {
3959        Handle<Object> new_value = Object::GetProperty(self, name);
3960        if (!new_value->SameValue(*old_value)) {
3961          EnqueueChangeRecord(self, "updated", name, old_value);
3962        }
3963      }
3964    }
3965  }
3966
3967  return *hresult;
3968}
3969
3970
3971// Set a real local property, even if it is READ_ONLY.  If the property is not
3972// present, add it with attributes NONE.  This code is an exact clone of
3973// SetProperty, with the check for IsReadOnly and the check for a
3974// callback setter removed.  The two lines looking up the LookupResult
3975// result are also added.  If one of the functions is changed, the other
3976// should be.
3977// Note that this method cannot be used to set the prototype of a function
3978// because ConvertDescriptorToField() which is called in "case CALLBACKS:"
3979// doesn't handle function prototypes correctly.
3980Handle<Object> JSObject::SetLocalPropertyIgnoreAttributes(
3981    Handle<JSObject> object,
3982    Handle<Name> key,
3983    Handle<Object> value,
3984    PropertyAttributes attributes,
3985    ValueType value_type,
3986    StoreMode mode) {
3987  CALL_HEAP_FUNCTION(
3988    object->GetIsolate(),
3989    object->SetLocalPropertyIgnoreAttributes(
3990        *key, *value, attributes, value_type, mode),
3991    Object);
3992}
3993
3994
3995MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes(
3996    Name* name_raw,
3997    Object* value_raw,
3998    PropertyAttributes attributes,
3999    ValueType value_type,
4000    StoreMode mode) {
4001  // Make sure that the top context does not change when doing callbacks or
4002  // interceptor calls.
4003  AssertNoContextChange ncc;
4004  Isolate* isolate = GetIsolate();
4005  LookupResult lookup(isolate);
4006  LocalLookup(name_raw, &lookup, true);
4007  if (!lookup.IsFound()) map()->LookupTransition(this, name_raw, &lookup);
4008  // Check access rights if needed.
4009  if (IsAccessCheckNeeded()) {
4010    if (!isolate->MayNamedAccess(this, name_raw, v8::ACCESS_SET)) {
4011      return SetPropertyWithFailedAccessCheck(&lookup,
4012                                              name_raw,
4013                                              value_raw,
4014                                              false,
4015                                              kNonStrictMode);
4016    }
4017  }
4018
4019  if (IsJSGlobalProxy()) {
4020    Object* proto = GetPrototype();
4021    if (proto->IsNull()) return value_raw;
4022    ASSERT(proto->IsJSGlobalObject());
4023    return JSObject::cast(proto)->SetLocalPropertyIgnoreAttributes(
4024        name_raw,
4025        value_raw,
4026        attributes,
4027        value_type,
4028        mode);
4029  }
4030
4031  // Check for accessor in prototype chain removed here in clone.
4032  if (!lookup.IsFound()) {
4033    // Neither properties nor transitions found.
4034    return AddProperty(
4035        name_raw, value_raw, attributes, kNonStrictMode,
4036        MAY_BE_STORE_FROM_KEYED, PERFORM_EXTENSIBILITY_CHECK, value_type, mode);
4037  }
4038
4039  // From this point on everything needs to be handlified.
4040  HandleScope scope(isolate);
4041  Handle<JSObject> self(this);
4042  Handle<Name> name(name_raw);
4043  Handle<Object> value(value_raw, isolate);
4044
4045  Handle<Object> old_value(isolate->heap()->the_hole_value(), isolate);
4046  PropertyAttributes old_attributes = ABSENT;
4047  bool is_observed = FLAG_harmony_observation && self->map()->is_observed();
4048  if (is_observed && lookup.IsProperty()) {
4049    if (lookup.IsDataProperty()) old_value = Object::GetProperty(self, name);
4050    old_attributes = lookup.GetAttributes();
4051  }
4052
4053  // Check of IsReadOnly removed from here in clone.
4054  MaybeObject* result = *value;
4055  switch (lookup.type()) {
4056    case NORMAL: {
4057      PropertyDetails details = PropertyDetails(attributes, NORMAL, 0);
4058      result = self->SetNormalizedProperty(*name, *value, details);
4059      break;
4060    }
4061    case FIELD: {
4062      Representation representation = lookup.representation();
4063      Representation value_representation =
4064          value->OptimalRepresentation(value_type);
4065      if (value_representation.IsNone()) break;
4066      if (!value_representation.fits_into(representation)) {
4067        MaybeObject* maybe_failure = self->GeneralizeFieldRepresentation(
4068            lookup.GetDescriptorIndex(), value_representation);
4069        if (maybe_failure->IsFailure()) return maybe_failure;
4070        DescriptorArray* desc = self->map()->instance_descriptors();
4071        int descriptor = lookup.GetDescriptorIndex();
4072        representation = desc->GetDetails(descriptor).representation();
4073      }
4074      if (FLAG_track_double_fields && representation.IsDouble()) {
4075        HeapNumber* storage =
4076            HeapNumber::cast(self->RawFastPropertyAt(
4077                lookup.GetFieldIndex().field_index()));
4078        storage->set_value(value->Number());
4079        result = *value;
4080        break;
4081      }
4082      self->FastPropertyAtPut(lookup.GetFieldIndex().field_index(), *value);
4083      result = *value;
4084      break;
4085    }
4086    case CONSTANT:
4087      // Only replace the function if necessary.
4088      if (*value != lookup.GetConstant()) {
4089        // Preserve the attributes of this existing property.
4090        attributes = lookup.GetAttributes();
4091        result = self->ConvertDescriptorToField(*name, *value, attributes);
4092      }
4093      break;
4094    case CALLBACKS:
4095    case INTERCEPTOR:
4096      // Override callback in clone
4097      result = self->ConvertDescriptorToField(*name, *value, attributes);
4098      break;
4099    case TRANSITION: {
4100      Map* transition_map = lookup.GetTransitionTarget();
4101      int descriptor = transition_map->LastAdded();
4102
4103      DescriptorArray* descriptors = transition_map->instance_descriptors();
4104      PropertyDetails details = descriptors->GetDetails(descriptor);
4105
4106      if (details.type() == FIELD) {
4107        if (attributes == details.attributes()) {
4108          Representation representation = details.representation();
4109          Representation value_representation =
4110              value->OptimalRepresentation(value_type);
4111          if (!value_representation.fits_into(representation)) {
4112            MaybeObject* maybe_map = transition_map->GeneralizeRepresentation(
4113                descriptor, value_representation);
4114            if (!maybe_map->To(&transition_map)) return maybe_map;
4115            Object* back = transition_map->GetBackPointer();
4116            if (back->IsMap()) {
4117              MaybeObject* maybe_failure = self->MigrateToMap(Map::cast(back));
4118              if (maybe_failure->IsFailure()) return maybe_failure;
4119            }
4120            descriptors = transition_map->instance_descriptors();
4121            representation =
4122                descriptors->GetDetails(descriptor).representation();
4123          }
4124          int field_index = descriptors->GetFieldIndex(descriptor);
4125          result = self->AddFastPropertyUsingMap(
4126              transition_map, *name, *value, field_index, representation);
4127        } else {
4128          result = self->ConvertDescriptorToField(*name, *value, attributes);
4129        }
4130      } else if (details.type() == CALLBACKS) {
4131        result = self->ConvertDescriptorToField(*name, *value, attributes);
4132      } else {
4133        ASSERT(details.type() == CONSTANT);
4134
4135        // Replace transition to CONSTANT FUNCTION with a map transition to a
4136        // new map with a FIELD, even if the value is a function.
4137        result = self->ConvertTransitionToMapTransition(
4138            lookup.GetTransitionIndex(), *name, *value, attributes);
4139      }
4140      break;
4141    }
4142    case HANDLER:
4143    case NONEXISTENT:
4144      UNREACHABLE();
4145  }
4146
4147  Handle<Object> hresult;
4148  if (!result->ToHandle(&hresult, isolate)) return result;
4149
4150  if (is_observed) {
4151    if (lookup.IsTransition()) {
4152      EnqueueChangeRecord(self, "new", name, old_value);
4153    } else if (old_value->IsTheHole()) {
4154      EnqueueChangeRecord(self, "reconfigured", name, old_value);
4155    } else {
4156      LookupResult new_lookup(isolate);
4157      self->LocalLookup(*name, &new_lookup, true);
4158      bool value_changed = false;
4159      if (new_lookup.IsDataProperty()) {
4160        Handle<Object> new_value = Object::GetProperty(self, name);
4161        value_changed = !old_value->SameValue(*new_value);
4162      }
4163      if (new_lookup.GetAttributes() != old_attributes) {
4164        if (!value_changed) old_value = isolate->factory()->the_hole_value();
4165        EnqueueChangeRecord(self, "reconfigured", name, old_value);
4166      } else if (value_changed) {
4167        EnqueueChangeRecord(self, "updated", name, old_value);
4168      }
4169    }
4170  }
4171
4172  return *hresult;
4173}
4174
4175
4176PropertyAttributes JSObject::GetPropertyAttributePostInterceptor(
4177      JSObject* receiver,
4178      Name* name,
4179      bool continue_search) {
4180  // Check local property, ignore interceptor.
4181  LookupResult result(GetIsolate());
4182  LocalLookupRealNamedProperty(name, &result);
4183  if (result.IsFound()) return result.GetAttributes();
4184
4185  if (continue_search) {
4186    // Continue searching via the prototype chain.
4187    Object* pt = GetPrototype();
4188    if (!pt->IsNull()) {
4189      return JSObject::cast(pt)->
4190        GetPropertyAttributeWithReceiver(receiver, name);
4191    }
4192  }
4193  return ABSENT;
4194}
4195
4196
4197PropertyAttributes JSObject::GetPropertyAttributeWithInterceptor(
4198      JSObject* receiver,
4199      Name* name,
4200      bool continue_search) {
4201  // TODO(rossberg): Support symbols in the API.
4202  if (name->IsSymbol()) return ABSENT;
4203
4204  Isolate* isolate = GetIsolate();
4205
4206  // Make sure that the top context does not change when doing
4207  // callbacks or interceptor calls.
4208  AssertNoContextChange ncc;
4209
4210  HandleScope scope(isolate);
4211  Handle<InterceptorInfo> interceptor(GetNamedInterceptor());
4212  Handle<JSObject> receiver_handle(receiver);
4213  Handle<JSObject> holder_handle(this);
4214  Handle<String> name_handle(String::cast(name));
4215  PropertyCallbackArguments args(isolate, interceptor->data(), receiver, this);
4216  if (!interceptor->query()->IsUndefined()) {
4217    v8::NamedPropertyQuery query =
4218        v8::ToCData<v8::NamedPropertyQuery>(interceptor->query());
4219    LOG(isolate,
4220        ApiNamedPropertyAccess("interceptor-named-has", *holder_handle, name));
4221    v8::Handle<v8::Integer> result =
4222        args.Call(query, v8::Utils::ToLocal(name_handle));
4223    if (!result.IsEmpty()) {
4224      ASSERT(result->IsInt32());
4225      return static_cast<PropertyAttributes>(result->Int32Value());
4226    }
4227  } else if (!interceptor->getter()->IsUndefined()) {
4228    v8::NamedPropertyGetter getter =
4229        v8::ToCData<v8::NamedPropertyGetter>(interceptor->getter());
4230    LOG(isolate,
4231        ApiNamedPropertyAccess("interceptor-named-get-has", this, name));
4232    v8::Handle<v8::Value> result =
4233        args.Call(getter, v8::Utils::ToLocal(name_handle));
4234    if (!result.IsEmpty()) return DONT_ENUM;
4235  }
4236  return holder_handle->GetPropertyAttributePostInterceptor(*receiver_handle,
4237                                                            *name_handle,
4238                                                            continue_search);
4239}
4240
4241
4242PropertyAttributes JSReceiver::GetPropertyAttributeWithReceiver(
4243      JSReceiver* receiver,
4244      Name* key) {
4245  uint32_t index = 0;
4246  if (IsJSObject() && key->AsArrayIndex(&index)) {
4247    return JSObject::cast(this)->GetElementAttributeWithReceiver(
4248        receiver, index, true);
4249  }
4250  // Named property.
4251  LookupResult lookup(GetIsolate());
4252  Lookup(key, &lookup);
4253  return GetPropertyAttributeForResult(receiver, &lookup, key, true);
4254}
4255
4256
4257PropertyAttributes JSReceiver::GetPropertyAttributeForResult(
4258    JSReceiver* receiver,
4259    LookupResult* lookup,
4260    Name* name,
4261    bool continue_search) {
4262  // Check access rights if needed.
4263  if (IsAccessCheckNeeded()) {
4264    JSObject* this_obj = JSObject::cast(this);
4265    Heap* heap = GetHeap();
4266    if (!heap->isolate()->MayNamedAccess(this_obj, name, v8::ACCESS_HAS)) {
4267      return this_obj->GetPropertyAttributeWithFailedAccessCheck(
4268          receiver, lookup, name, continue_search);
4269    }
4270  }
4271  if (lookup->IsFound()) {
4272    switch (lookup->type()) {
4273      case NORMAL:  // fall through
4274      case FIELD:
4275      case CONSTANT:
4276      case CALLBACKS:
4277        return lookup->GetAttributes();
4278      case HANDLER: {
4279        return JSProxy::cast(lookup->proxy())->GetPropertyAttributeWithHandler(
4280            receiver, name);
4281      }
4282      case INTERCEPTOR:
4283        return lookup->holder()->GetPropertyAttributeWithInterceptor(
4284            JSObject::cast(receiver), name, continue_search);
4285      case TRANSITION:
4286      case NONEXISTENT:
4287        UNREACHABLE();
4288    }
4289  }
4290  return ABSENT;
4291}
4292
4293
4294PropertyAttributes JSReceiver::GetLocalPropertyAttribute(Name* name) {
4295  // Check whether the name is an array index.
4296  uint32_t index = 0;
4297  if (IsJSObject() && name->AsArrayIndex(&index)) {
4298    return GetLocalElementAttribute(index);
4299  }
4300  // Named property.
4301  LookupResult lookup(GetIsolate());
4302  LocalLookup(name, &lookup, true);
4303  return GetPropertyAttributeForResult(this, &lookup, name, false);
4304}
4305
4306
4307PropertyAttributes JSObject::GetElementAttributeWithReceiver(
4308    JSReceiver* receiver, uint32_t index, bool continue_search) {
4309  Isolate* isolate = GetIsolate();
4310
4311  // Check access rights if needed.
4312  if (IsAccessCheckNeeded()) {
4313    if (!isolate->MayIndexedAccess(this, index, v8::ACCESS_HAS)) {
4314      isolate->ReportFailedAccessCheck(this, v8::ACCESS_HAS);
4315      return ABSENT;
4316    }
4317  }
4318
4319  if (IsJSGlobalProxy()) {
4320    Object* proto = GetPrototype();
4321    if (proto->IsNull()) return ABSENT;
4322    ASSERT(proto->IsJSGlobalObject());
4323    return JSObject::cast(proto)->GetElementAttributeWithReceiver(
4324        receiver, index, continue_search);
4325  }
4326
4327  // Check for lookup interceptor except when bootstrapping.
4328  if (HasIndexedInterceptor() && !isolate->bootstrapper()->IsActive()) {
4329    return GetElementAttributeWithInterceptor(receiver, index, continue_search);
4330  }
4331
4332  return GetElementAttributeWithoutInterceptor(
4333      receiver, index, continue_search);
4334}
4335
4336
4337PropertyAttributes JSObject::GetElementAttributeWithInterceptor(
4338    JSReceiver* receiver, uint32_t index, bool continue_search) {
4339  Isolate* isolate = GetIsolate();
4340  // Make sure that the top context does not change when doing
4341  // callbacks or interceptor calls.
4342  AssertNoContextChange ncc;
4343  HandleScope scope(isolate);
4344  Handle<InterceptorInfo> interceptor(GetIndexedInterceptor());
4345  Handle<JSReceiver> hreceiver(receiver);
4346  Handle<JSObject> holder(this);
4347  PropertyCallbackArguments args(isolate, interceptor->data(), receiver, this);
4348  if (!interceptor->query()->IsUndefined()) {
4349    v8::IndexedPropertyQuery query =
4350        v8::ToCData<v8::IndexedPropertyQuery>(interceptor->query());
4351    LOG(isolate,
4352        ApiIndexedPropertyAccess("interceptor-indexed-has", this, index));
4353    v8::Handle<v8::Integer> result = args.Call(query, index);
4354    if (!result.IsEmpty())
4355      return static_cast<PropertyAttributes>(result->Int32Value());
4356  } else if (!interceptor->getter()->IsUndefined()) {
4357    v8::IndexedPropertyGetter getter =
4358        v8::ToCData<v8::IndexedPropertyGetter>(interceptor->getter());
4359    LOG(isolate,
4360        ApiIndexedPropertyAccess("interceptor-indexed-get-has", this, index));
4361    v8::Handle<v8::Value> result = args.Call(getter, index);
4362    if (!result.IsEmpty()) return NONE;
4363  }
4364
4365  return holder->GetElementAttributeWithoutInterceptor(
4366      *hreceiver, index, continue_search);
4367}
4368
4369
4370PropertyAttributes JSObject::GetElementAttributeWithoutInterceptor(
4371      JSReceiver* receiver, uint32_t index, bool continue_search) {
4372  PropertyAttributes attr = GetElementsAccessor()->GetAttributes(
4373      receiver, this, index);
4374  if (attr != ABSENT) return attr;
4375
4376  // Handle [] on String objects.
4377  if (IsStringObjectWithCharacterAt(index)) {
4378    return static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE);
4379  }
4380
4381  if (!continue_search) return ABSENT;
4382
4383  Object* pt = GetPrototype();
4384  if (pt->IsJSProxy()) {
4385    // We need to follow the spec and simulate a call to [[GetOwnProperty]].
4386    return JSProxy::cast(pt)->GetElementAttributeWithHandler(receiver, index);
4387  }
4388  if (pt->IsNull()) return ABSENT;
4389  return JSObject::cast(pt)->GetElementAttributeWithReceiver(
4390      receiver, index, true);
4391}
4392
4393
4394MaybeObject* NormalizedMapCache::Get(JSObject* obj,
4395                                     PropertyNormalizationMode mode) {
4396  Isolate* isolate = obj->GetIsolate();
4397  Map* fast = obj->map();
4398  int index = fast->Hash() % kEntries;
4399  Object* result = get(index);
4400  if (result->IsMap() &&
4401      Map::cast(result)->EquivalentToForNormalization(fast, mode)) {
4402#ifdef VERIFY_HEAP
4403    if (FLAG_verify_heap) {
4404      Map::cast(result)->SharedMapVerify();
4405    }
4406#endif
4407#ifdef DEBUG
4408    if (FLAG_enable_slow_asserts) {
4409      // The cached map should match newly created normalized map bit-by-bit,
4410      // except for the code cache, which can contain some ics which can be
4411      // applied to the shared map.
4412      Object* fresh;
4413      MaybeObject* maybe_fresh =
4414          fast->CopyNormalized(mode, SHARED_NORMALIZED_MAP);
4415      if (maybe_fresh->ToObject(&fresh)) {
4416        ASSERT(memcmp(Map::cast(fresh)->address(),
4417                      Map::cast(result)->address(),
4418                      Map::kCodeCacheOffset) == 0);
4419        STATIC_ASSERT(Map::kDependentCodeOffset ==
4420                      Map::kCodeCacheOffset + kPointerSize);
4421        int offset = Map::kDependentCodeOffset + kPointerSize;
4422        ASSERT(memcmp(Map::cast(fresh)->address() + offset,
4423                      Map::cast(result)->address() + offset,
4424                      Map::kSize - offset) == 0);
4425      }
4426    }
4427#endif
4428    return result;
4429  }
4430
4431  { MaybeObject* maybe_result =
4432        fast->CopyNormalized(mode, SHARED_NORMALIZED_MAP);
4433    if (!maybe_result->ToObject(&result)) return maybe_result;
4434  }
4435  ASSERT(Map::cast(result)->is_dictionary_map());
4436  set(index, result);
4437  isolate->counters()->normalized_maps()->Increment();
4438
4439  return result;
4440}
4441
4442
4443void NormalizedMapCache::Clear() {
4444  int entries = length();
4445  for (int i = 0; i != entries; i++) {
4446    set_undefined(i);
4447  }
4448}
4449
4450
4451void JSObject::UpdateMapCodeCache(Handle<JSObject> object,
4452                                  Handle<Name> name,
4453                                  Handle<Code> code) {
4454  Isolate* isolate = object->GetIsolate();
4455  CALL_HEAP_FUNCTION_VOID(isolate,
4456                          object->UpdateMapCodeCache(*name, *code));
4457}
4458
4459
4460MaybeObject* JSObject::UpdateMapCodeCache(Name* name, Code* code) {
4461  if (map()->is_shared()) {
4462    // Fast case maps are never marked as shared.
4463    ASSERT(!HasFastProperties());
4464    // Replace the map with an identical copy that can be safely modified.
4465    Object* obj;
4466    { MaybeObject* maybe_obj = map()->CopyNormalized(KEEP_INOBJECT_PROPERTIES,
4467                                                     UNIQUE_NORMALIZED_MAP);
4468      if (!maybe_obj->ToObject(&obj)) return maybe_obj;
4469    }
4470    GetIsolate()->counters()->normalized_maps()->Increment();
4471
4472    set_map(Map::cast(obj));
4473  }
4474  return map()->UpdateCodeCache(name, code);
4475}
4476
4477
4478void JSObject::NormalizeProperties(Handle<JSObject> object,
4479                                   PropertyNormalizationMode mode,
4480                                   int expected_additional_properties) {
4481  CALL_HEAP_FUNCTION_VOID(object->GetIsolate(),
4482                          object->NormalizeProperties(
4483                              mode, expected_additional_properties));
4484}
4485
4486
4487MaybeObject* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
4488                                           int expected_additional_properties) {
4489  if (!HasFastProperties()) return this;
4490
4491  // The global object is always normalized.
4492  ASSERT(!IsGlobalObject());
4493  // JSGlobalProxy must never be normalized
4494  ASSERT(!IsJSGlobalProxy());
4495
4496  Map* map_of_this = map();
4497
4498  // Allocate new content.
4499  int real_size = map_of_this->NumberOfOwnDescriptors();
4500  int property_count = real_size;
4501  if (expected_additional_properties > 0) {
4502    property_count += expected_additional_properties;
4503  } else {
4504    property_count += 2;  // Make space for two more properties.
4505  }
4506  NameDictionary* dictionary;
4507  MaybeObject* maybe_dictionary =
4508      NameDictionary::Allocate(GetHeap(), property_count);
4509  if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
4510
4511  DescriptorArray* descs = map_of_this->instance_descriptors();
4512  for (int i = 0; i < real_size; i++) {
4513    PropertyDetails details = descs->GetDetails(i);
4514    switch (details.type()) {
4515      case CONSTANT: {
4516        PropertyDetails d = PropertyDetails(
4517            details.attributes(), NORMAL, i + 1);
4518        Object* value = descs->GetConstant(i);
4519        MaybeObject* maybe_dictionary =
4520            dictionary->Add(descs->GetKey(i), value, d);
4521        if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
4522        break;
4523      }
4524      case FIELD: {
4525        PropertyDetails d =
4526            PropertyDetails(details.attributes(), NORMAL, i + 1);
4527        Object* value = RawFastPropertyAt(descs->GetFieldIndex(i));
4528        MaybeObject* maybe_dictionary =
4529            dictionary->Add(descs->GetKey(i), value, d);
4530        if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
4531        break;
4532      }
4533      case CALLBACKS: {
4534        Object* value = descs->GetCallbacksObject(i);
4535        PropertyDetails d = PropertyDetails(
4536            details.attributes(), CALLBACKS, i + 1);
4537        MaybeObject* maybe_dictionary =
4538            dictionary->Add(descs->GetKey(i), value, d);
4539        if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
4540        break;
4541      }
4542      case INTERCEPTOR:
4543        break;
4544      case HANDLER:
4545      case NORMAL:
4546      case TRANSITION:
4547      case NONEXISTENT:
4548        UNREACHABLE();
4549        break;
4550    }
4551  }
4552
4553  Heap* current_heap = GetHeap();
4554
4555  // Copy the next enumeration index from instance descriptor.
4556  dictionary->SetNextEnumerationIndex(real_size + 1);
4557
4558  Map* new_map;
4559  MaybeObject* maybe_map =
4560      current_heap->isolate()->context()->native_context()->
4561      normalized_map_cache()->Get(this, mode);
4562  if (!maybe_map->To(&new_map)) return maybe_map;
4563  ASSERT(new_map->is_dictionary_map());
4564
4565  // We have now successfully allocated all the necessary objects.
4566  // Changes can now be made with the guarantee that all of them take effect.
4567
4568  // Resize the object in the heap if necessary.
4569  int new_instance_size = new_map->instance_size();
4570  int instance_size_delta = map_of_this->instance_size() - new_instance_size;
4571  ASSERT(instance_size_delta >= 0);
4572  current_heap->CreateFillerObjectAt(this->address() + new_instance_size,
4573                                     instance_size_delta);
4574  if (Marking::IsBlack(Marking::MarkBitFrom(this))) {
4575    MemoryChunk::IncrementLiveBytesFromMutator(this->address(),
4576                                               -instance_size_delta);
4577  }
4578
4579  set_map(new_map);
4580  map_of_this->NotifyLeafMapLayoutChange();
4581
4582  set_properties(dictionary);
4583
4584  current_heap->isolate()->counters()->props_to_dictionary()->Increment();
4585
4586#ifdef DEBUG
4587  if (FLAG_trace_normalization) {
4588    PrintF("Object properties have been normalized:\n");
4589    Print();
4590  }
4591#endif
4592  return this;
4593}
4594
4595
4596void JSObject::TransformToFastProperties(Handle<JSObject> object,
4597                                         int unused_property_fields) {
4598  CALL_HEAP_FUNCTION_VOID(
4599      object->GetIsolate(),
4600      object->TransformToFastProperties(unused_property_fields));
4601}
4602
4603
4604MaybeObject* JSObject::TransformToFastProperties(int unused_property_fields) {
4605  if (HasFastProperties()) return this;
4606  ASSERT(!IsGlobalObject());
4607  return property_dictionary()->
4608      TransformPropertiesToFastFor(this, unused_property_fields);
4609}
4610
4611
4612static MUST_USE_RESULT MaybeObject* CopyFastElementsToDictionary(
4613    Isolate* isolate,
4614    FixedArrayBase* array,
4615    int length,
4616    SeededNumberDictionary* dictionary) {
4617  Heap* heap = isolate->heap();
4618  bool has_double_elements = array->IsFixedDoubleArray();
4619  for (int i = 0; i < length; i++) {
4620    Object* value = NULL;
4621    if (has_double_elements) {
4622      FixedDoubleArray* double_array = FixedDoubleArray::cast(array);
4623      if (double_array->is_the_hole(i)) {
4624        value = isolate->heap()->the_hole_value();
4625      } else {
4626        // Objects must be allocated in the old object space, since the
4627        // overall number of HeapNumbers needed for the conversion might
4628        // exceed the capacity of new space, and we would fail repeatedly
4629        // trying to convert the FixedDoubleArray.
4630        MaybeObject* maybe_value_object =
4631            heap->AllocateHeapNumber(double_array->get_scalar(i), TENURED);
4632        if (!maybe_value_object->ToObject(&value)) return maybe_value_object;
4633      }
4634    } else {
4635      value = FixedArray::cast(array)->get(i);
4636    }
4637    if (!value->IsTheHole()) {
4638      PropertyDetails details = PropertyDetails(NONE, NORMAL, 0);
4639      MaybeObject* maybe_result =
4640          dictionary->AddNumberEntry(i, value, details);
4641      if (!maybe_result->To(&dictionary)) return maybe_result;
4642    }
4643  }
4644  return dictionary;
4645}
4646
4647
4648Handle<SeededNumberDictionary> JSObject::NormalizeElements(
4649    Handle<JSObject> object) {
4650  CALL_HEAP_FUNCTION(object->GetIsolate(),
4651                     object->NormalizeElements(),
4652                     SeededNumberDictionary);
4653}
4654
4655
4656MaybeObject* JSObject::NormalizeElements() {
4657  ASSERT(!HasExternalArrayElements());
4658
4659  // Find the backing store.
4660  FixedArrayBase* array = FixedArrayBase::cast(elements());
4661  Map* old_map = array->map();
4662  bool is_arguments =
4663      (old_map == old_map->GetHeap()->non_strict_arguments_elements_map());
4664  if (is_arguments) {
4665    array = FixedArrayBase::cast(FixedArray::cast(array)->get(1));
4666  }
4667  if (array->IsDictionary()) return array;
4668
4669  ASSERT(HasFastSmiOrObjectElements() ||
4670         HasFastDoubleElements() ||
4671         HasFastArgumentsElements());
4672  // Compute the effective length and allocate a new backing store.
4673  int length = IsJSArray()
4674      ? Smi::cast(JSArray::cast(this)->length())->value()
4675      : array->length();
4676  int old_capacity = 0;
4677  int used_elements = 0;
4678  GetElementsCapacityAndUsage(&old_capacity, &used_elements);
4679  SeededNumberDictionary* dictionary;
4680  MaybeObject* maybe_dictionary =
4681      SeededNumberDictionary::Allocate(GetHeap(), used_elements);
4682  if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
4683
4684  maybe_dictionary = CopyFastElementsToDictionary(
4685      GetIsolate(), array, length, dictionary);
4686  if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
4687
4688  // Switch to using the dictionary as the backing storage for elements.
4689  if (is_arguments) {
4690    FixedArray::cast(elements())->set(1, dictionary);
4691  } else {
4692    // Set the new map first to satify the elements type assert in
4693    // set_elements().
4694    Map* new_map;
4695    MaybeObject* maybe = GetElementsTransitionMap(GetIsolate(),
4696                                                  DICTIONARY_ELEMENTS);
4697    if (!maybe->To(&new_map)) return maybe;
4698    set_map(new_map);
4699    set_elements(dictionary);
4700  }
4701
4702  old_map->GetHeap()->isolate()->counters()->elements_to_dictionary()->
4703      Increment();
4704
4705#ifdef DEBUG
4706  if (FLAG_trace_normalization) {
4707    PrintF("Object elements have been normalized:\n");
4708    Print();
4709  }
4710#endif
4711
4712  ASSERT(HasDictionaryElements() || HasDictionaryArgumentsElements());
4713  return dictionary;
4714}
4715
4716
4717Smi* JSReceiver::GenerateIdentityHash() {
4718  Isolate* isolate = GetIsolate();
4719
4720  int hash_value;
4721  int attempts = 0;
4722  do {
4723    // Generate a random 32-bit hash value but limit range to fit
4724    // within a smi.
4725    hash_value = V8::RandomPrivate(isolate) & Smi::kMaxValue;
4726    attempts++;
4727  } while (hash_value == 0 && attempts < 30);
4728  hash_value = hash_value != 0 ? hash_value : 1;  // never return 0
4729
4730  return Smi::FromInt(hash_value);
4731}
4732
4733
4734MaybeObject* JSObject::SetIdentityHash(Smi* hash, CreationFlag flag) {
4735  MaybeObject* maybe = SetHiddenProperty(GetHeap()->identity_hash_string(),
4736                                         hash);
4737  if (maybe->IsFailure()) return maybe;
4738  return this;
4739}
4740
4741
4742int JSObject::GetIdentityHash(Handle<JSObject> obj) {
4743  CALL_AND_RETRY_OR_DIE(obj->GetIsolate(),
4744                        obj->GetIdentityHash(ALLOW_CREATION),
4745                        return Smi::cast(__object__)->value(),
4746                        return 0);
4747}
4748
4749
4750MaybeObject* JSObject::GetIdentityHash(CreationFlag flag) {
4751  Object* stored_value = GetHiddenProperty(GetHeap()->identity_hash_string());
4752  if (stored_value->IsSmi()) return stored_value;
4753
4754  // Do not generate permanent identity hash code if not requested.
4755  if (flag == OMIT_CREATION) return GetHeap()->undefined_value();
4756
4757  Smi* hash = GenerateIdentityHash();
4758  MaybeObject* result = SetHiddenProperty(GetHeap()->identity_hash_string(),
4759                                          hash);
4760  if (result->IsFailure()) return result;
4761  if (result->ToObjectUnchecked()->IsUndefined()) {
4762    // Trying to get hash of detached proxy.
4763    return Smi::FromInt(0);
4764  }
4765  return hash;
4766}
4767
4768
4769MaybeObject* JSProxy::GetIdentityHash(CreationFlag flag) {
4770  Object* hash = this->hash();
4771  if (!hash->IsSmi() && flag == ALLOW_CREATION) {
4772    hash = GenerateIdentityHash();
4773    set_hash(hash);
4774  }
4775  return hash;
4776}
4777
4778
4779Object* JSObject::GetHiddenProperty(Name* key) {
4780  ASSERT(key->IsUniqueName());
4781  if (IsJSGlobalProxy()) {
4782    // For a proxy, use the prototype as target object.
4783    Object* proxy_parent = GetPrototype();
4784    // If the proxy is detached, return undefined.
4785    if (proxy_parent->IsNull()) return GetHeap()->the_hole_value();
4786    ASSERT(proxy_parent->IsJSGlobalObject());
4787    return JSObject::cast(proxy_parent)->GetHiddenProperty(key);
4788  }
4789  ASSERT(!IsJSGlobalProxy());
4790  MaybeObject* hidden_lookup =
4791      GetHiddenPropertiesHashTable(ONLY_RETURN_INLINE_VALUE);
4792  Object* inline_value = hidden_lookup->ToObjectUnchecked();
4793
4794  if (inline_value->IsSmi()) {
4795    // Handle inline-stored identity hash.
4796    if (key == GetHeap()->identity_hash_string()) {
4797      return inline_value;
4798    } else {
4799      return GetHeap()->the_hole_value();
4800    }
4801  }
4802
4803  if (inline_value->IsUndefined()) return GetHeap()->the_hole_value();
4804
4805  ObjectHashTable* hashtable = ObjectHashTable::cast(inline_value);
4806  Object* entry = hashtable->Lookup(key);
4807  return entry;
4808}
4809
4810
4811Handle<Object> JSObject::SetHiddenProperty(Handle<JSObject> obj,
4812                                           Handle<Name> key,
4813                                           Handle<Object> value) {
4814  CALL_HEAP_FUNCTION(obj->GetIsolate(),
4815                     obj->SetHiddenProperty(*key, *value),
4816                     Object);
4817}
4818
4819
4820MaybeObject* JSObject::SetHiddenProperty(Name* key, Object* value) {
4821  ASSERT(key->IsUniqueName());
4822  if (IsJSGlobalProxy()) {
4823    // For a proxy, use the prototype as target object.
4824    Object* proxy_parent = GetPrototype();
4825    // If the proxy is detached, return undefined.
4826    if (proxy_parent->IsNull()) return GetHeap()->undefined_value();
4827    ASSERT(proxy_parent->IsJSGlobalObject());
4828    return JSObject::cast(proxy_parent)->SetHiddenProperty(key, value);
4829  }
4830  ASSERT(!IsJSGlobalProxy());
4831  MaybeObject* hidden_lookup =
4832      GetHiddenPropertiesHashTable(ONLY_RETURN_INLINE_VALUE);
4833  Object* inline_value = hidden_lookup->ToObjectUnchecked();
4834
4835  // If there is no backing store yet, store the identity hash inline.
4836  if (value->IsSmi() &&
4837      key == GetHeap()->identity_hash_string() &&
4838      (inline_value->IsUndefined() || inline_value->IsSmi())) {
4839    return SetHiddenPropertiesHashTable(value);
4840  }
4841
4842  hidden_lookup = GetHiddenPropertiesHashTable(CREATE_NEW_IF_ABSENT);
4843  ObjectHashTable* hashtable;
4844  if (!hidden_lookup->To(&hashtable)) return hidden_lookup;
4845
4846  // If it was found, check if the key is already in the dictionary.
4847  MaybeObject* insert_result = hashtable->Put(key, value);
4848  ObjectHashTable* new_table;
4849  if (!insert_result->To(&new_table)) return insert_result;
4850  if (new_table != hashtable) {
4851    // If adding the key expanded the dictionary (i.e., Add returned a new
4852    // dictionary), store it back to the object.
4853    MaybeObject* store_result = SetHiddenPropertiesHashTable(new_table);
4854    if (store_result->IsFailure()) return store_result;
4855  }
4856  // Return this to mark success.
4857  return this;
4858}
4859
4860
4861void JSObject::DeleteHiddenProperty(Name* key) {
4862  ASSERT(key->IsUniqueName());
4863  if (IsJSGlobalProxy()) {
4864    // For a proxy, use the prototype as target object.
4865    Object* proxy_parent = GetPrototype();
4866    // If the proxy is detached, return immediately.
4867    if (proxy_parent->IsNull()) return;
4868    ASSERT(proxy_parent->IsJSGlobalObject());
4869    JSObject::cast(proxy_parent)->DeleteHiddenProperty(key);
4870    return;
4871  }
4872  ASSERT(!IsJSGlobalProxy());
4873  MaybeObject* hidden_lookup =
4874      GetHiddenPropertiesHashTable(ONLY_RETURN_INLINE_VALUE);
4875  Object* inline_value = hidden_lookup->ToObjectUnchecked();
4876
4877  // We never delete (inline-stored) identity hashes.
4878  ASSERT(key != GetHeap()->identity_hash_string());
4879  if (inline_value->IsUndefined() || inline_value->IsSmi()) return;
4880
4881  ObjectHashTable* hashtable = ObjectHashTable::cast(inline_value);
4882  MaybeObject* delete_result = hashtable->Put(key, GetHeap()->the_hole_value());
4883  USE(delete_result);
4884  ASSERT(!delete_result->IsFailure());  // Delete does not cause GC.
4885}
4886
4887
4888bool JSObject::HasHiddenProperties() {
4889  return GetPropertyAttributePostInterceptor(this,
4890                                             GetHeap()->hidden_string(),
4891                                             false) != ABSENT;
4892}
4893
4894
4895MaybeObject* JSObject::GetHiddenPropertiesHashTable(
4896    InitializeHiddenProperties init_option) {
4897  ASSERT(!IsJSGlobalProxy());
4898  Object* inline_value;
4899  if (HasFastProperties()) {
4900    // If the object has fast properties, check whether the first slot
4901    // in the descriptor array matches the hidden string. Since the
4902    // hidden strings hash code is zero (and no other name has hash
4903    // code zero) it will always occupy the first entry if present.
4904    DescriptorArray* descriptors = this->map()->instance_descriptors();
4905    if (descriptors->number_of_descriptors() > 0) {
4906      int sorted_index = descriptors->GetSortedKeyIndex(0);
4907      if (descriptors->GetKey(sorted_index) == GetHeap()->hidden_string() &&
4908          sorted_index < map()->NumberOfOwnDescriptors()) {
4909        ASSERT(descriptors->GetType(sorted_index) == FIELD);
4910        MaybeObject* maybe_value = this->FastPropertyAt(
4911            descriptors->GetDetails(sorted_index).representation(),
4912            descriptors->GetFieldIndex(sorted_index));
4913        if (!maybe_value->To(&inline_value)) return maybe_value;
4914      } else {
4915        inline_value = GetHeap()->undefined_value();
4916      }
4917    } else {
4918      inline_value = GetHeap()->undefined_value();
4919    }
4920  } else {
4921    PropertyAttributes attributes;
4922    // You can't install a getter on a property indexed by the hidden string,
4923    // so we can be sure that GetLocalPropertyPostInterceptor returns a real
4924    // object.
4925    inline_value =
4926        GetLocalPropertyPostInterceptor(this,
4927                                        GetHeap()->hidden_string(),
4928                                        &attributes)->ToObjectUnchecked();
4929  }
4930
4931  if (init_option == ONLY_RETURN_INLINE_VALUE ||
4932      inline_value->IsHashTable()) {
4933    return inline_value;
4934  }
4935
4936  ObjectHashTable* hashtable;
4937  static const int kInitialCapacity = 4;
4938  MaybeObject* maybe_obj =
4939      ObjectHashTable::Allocate(GetHeap(),
4940                                kInitialCapacity,
4941                                ObjectHashTable::USE_CUSTOM_MINIMUM_CAPACITY);
4942  if (!maybe_obj->To<ObjectHashTable>(&hashtable)) return maybe_obj;
4943
4944  if (inline_value->IsSmi()) {
4945    // We were storing the identity hash inline and now allocated an actual
4946    // dictionary.  Put the identity hash into the new dictionary.
4947    MaybeObject* insert_result =
4948        hashtable->Put(GetHeap()->identity_hash_string(), inline_value);
4949    ObjectHashTable* new_table;
4950    if (!insert_result->To(&new_table)) return insert_result;
4951    // We expect no resizing for the first insert.
4952    ASSERT_EQ(hashtable, new_table);
4953  }
4954
4955  MaybeObject* store_result =
4956      SetPropertyPostInterceptor(GetHeap()->hidden_string(),
4957                                 hashtable,
4958                                 DONT_ENUM,
4959                                 kNonStrictMode,
4960                                 OMIT_EXTENSIBILITY_CHECK,
4961                                 FORCE_FIELD);
4962  if (store_result->IsFailure()) return store_result;
4963  return hashtable;
4964}
4965
4966
4967MaybeObject* JSObject::SetHiddenPropertiesHashTable(Object* value) {
4968  ASSERT(!IsJSGlobalProxy());
4969  // We can store the identity hash inline iff there is no backing store
4970  // for hidden properties yet.
4971  ASSERT(HasHiddenProperties() != value->IsSmi());
4972  if (HasFastProperties()) {
4973    // If the object has fast properties, check whether the first slot
4974    // in the descriptor array matches the hidden string. Since the
4975    // hidden strings hash code is zero (and no other name has hash
4976    // code zero) it will always occupy the first entry if present.
4977    DescriptorArray* descriptors = this->map()->instance_descriptors();
4978    if (descriptors->number_of_descriptors() > 0) {
4979      int sorted_index = descriptors->GetSortedKeyIndex(0);
4980      if (descriptors->GetKey(sorted_index) == GetHeap()->hidden_string() &&
4981          sorted_index < map()->NumberOfOwnDescriptors()) {
4982        ASSERT(descriptors->GetType(sorted_index) == FIELD);
4983        FastPropertyAtPut(descriptors->GetFieldIndex(sorted_index), value);
4984        return this;
4985      }
4986    }
4987  }
4988  MaybeObject* store_result =
4989      SetPropertyPostInterceptor(GetHeap()->hidden_string(),
4990                                 value,
4991                                 DONT_ENUM,
4992                                 kNonStrictMode,
4993                                 OMIT_EXTENSIBILITY_CHECK,
4994                                 FORCE_FIELD);
4995  if (store_result->IsFailure()) return store_result;
4996  return this;
4997}
4998
4999
5000Handle<Object> JSObject::DeletePropertyPostInterceptor(Handle<JSObject> object,
5001                                                       Handle<Name> name,
5002                                                       DeleteMode mode) {
5003  // Check local property, ignore interceptor.
5004  Isolate* isolate = object->GetIsolate();
5005  LookupResult result(isolate);
5006  object->LocalLookupRealNamedProperty(*name, &result);
5007  if (!result.IsFound()) return isolate->factory()->true_value();
5008
5009  // Normalize object if needed.
5010  NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0);
5011
5012  return DeleteNormalizedProperty(object, name, mode);
5013}
5014
5015
5016Handle<Object> JSObject::DeletePropertyWithInterceptor(Handle<JSObject> object,
5017                                                       Handle<Name> name) {
5018  Isolate* isolate = object->GetIsolate();
5019
5020  // TODO(rossberg): Support symbols in the API.
5021  if (name->IsSymbol()) return isolate->factory()->false_value();
5022
5023  Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor());
5024  if (!interceptor->deleter()->IsUndefined()) {
5025    v8::NamedPropertyDeleter deleter =
5026        v8::ToCData<v8::NamedPropertyDeleter>(interceptor->deleter());
5027    LOG(isolate,
5028        ApiNamedPropertyAccess("interceptor-named-delete", *object, *name));
5029    PropertyCallbackArguments args(
5030        isolate, interceptor->data(), *object, *object);
5031    v8::Handle<v8::Boolean> result =
5032        args.Call(deleter, v8::Utils::ToLocal(Handle<String>::cast(name)));
5033    RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
5034    if (!result.IsEmpty()) {
5035      ASSERT(result->IsBoolean());
5036      Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
5037      result_internal->VerifyApiCallResultType();
5038      // Rebox CustomArguments::kReturnValueOffset before returning.
5039      return handle(*result_internal, isolate);
5040    }
5041  }
5042  Handle<Object> result =
5043      DeletePropertyPostInterceptor(object, name, NORMAL_DELETION);
5044  RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
5045  return result;
5046}
5047
5048
5049MaybeObject* JSObject::DeleteElementWithInterceptor(uint32_t index) {
5050  Isolate* isolate = GetIsolate();
5051  Heap* heap = isolate->heap();
5052  // Make sure that the top context does not change when doing
5053  // callbacks or interceptor calls.
5054  AssertNoContextChange ncc;
5055  HandleScope scope(isolate);
5056  Handle<InterceptorInfo> interceptor(GetIndexedInterceptor());
5057  if (interceptor->deleter()->IsUndefined()) return heap->false_value();
5058  v8::IndexedPropertyDeleter deleter =
5059      v8::ToCData<v8::IndexedPropertyDeleter>(interceptor->deleter());
5060  Handle<JSObject> this_handle(this);
5061  LOG(isolate,
5062      ApiIndexedPropertyAccess("interceptor-indexed-delete", this, index));
5063  PropertyCallbackArguments args(isolate, interceptor->data(), this, this);
5064  v8::Handle<v8::Boolean> result = args.Call(deleter, index);
5065  RETURN_IF_SCHEDULED_EXCEPTION(isolate);
5066  if (!result.IsEmpty()) {
5067    ASSERT(result->IsBoolean());
5068    Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
5069    result_internal->VerifyApiCallResultType();
5070    return *result_internal;
5071  }
5072  MaybeObject* raw_result = this_handle->GetElementsAccessor()->Delete(
5073      *this_handle,
5074      index,
5075      NORMAL_DELETION);
5076  RETURN_IF_SCHEDULED_EXCEPTION(isolate);
5077  return raw_result;
5078}
5079
5080
5081Handle<Object> JSObject::DeleteElement(Handle<JSObject> obj,
5082                                       uint32_t index,
5083                                       DeleteMode mode) {
5084  CALL_HEAP_FUNCTION(obj->GetIsolate(),
5085                     obj->DeleteElement(index, mode),
5086                     Object);
5087}
5088
5089
5090MaybeObject* JSObject::DeleteElement(uint32_t index, DeleteMode mode) {
5091  Isolate* isolate = GetIsolate();
5092  // Check access rights if needed.
5093  if (IsAccessCheckNeeded() &&
5094      !isolate->MayIndexedAccess(this, index, v8::ACCESS_DELETE)) {
5095    isolate->ReportFailedAccessCheck(this, v8::ACCESS_DELETE);
5096    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
5097    return isolate->heap()->false_value();
5098  }
5099
5100  if (IsStringObjectWithCharacterAt(index)) {
5101    if (mode == STRICT_DELETION) {
5102      // Deleting a non-configurable property in strict mode.
5103      HandleScope scope(isolate);
5104      Handle<Object> holder(this, isolate);
5105      Handle<Object> name = isolate->factory()->NewNumberFromUint(index);
5106      Handle<Object> args[2] = { name, holder };
5107      Handle<Object> error =
5108          isolate->factory()->NewTypeError("strict_delete_property",
5109                                           HandleVector(args, 2));
5110      return isolate->Throw(*error);
5111    }
5112    return isolate->heap()->false_value();
5113  }
5114
5115  if (IsJSGlobalProxy()) {
5116    Object* proto = GetPrototype();
5117    if (proto->IsNull()) return isolate->heap()->false_value();
5118    ASSERT(proto->IsJSGlobalObject());
5119    return JSGlobalObject::cast(proto)->DeleteElement(index, mode);
5120  }
5121
5122  // From this point on everything needs to be handlified.
5123  HandleScope scope(isolate);
5124  Handle<JSObject> self(this);
5125
5126  Handle<Object> old_value;
5127  bool should_enqueue_change_record = false;
5128  if (FLAG_harmony_observation && self->map()->is_observed()) {
5129    should_enqueue_change_record = self->HasLocalElement(index);
5130    if (should_enqueue_change_record) {
5131      old_value = self->GetLocalElementAccessorPair(index) != NULL
5132          ? Handle<Object>::cast(isolate->factory()->the_hole_value())
5133          : Object::GetElement(self, index);
5134    }
5135  }
5136
5137  MaybeObject* result;
5138  // Skip interceptor if forcing deletion.
5139  if (self->HasIndexedInterceptor() && mode != FORCE_DELETION) {
5140    result = self->DeleteElementWithInterceptor(index);
5141  } else {
5142    result = self->GetElementsAccessor()->Delete(*self, index, mode);
5143  }
5144
5145  Handle<Object> hresult;
5146  if (!result->ToHandle(&hresult, isolate)) return result;
5147
5148  if (should_enqueue_change_record && !self->HasLocalElement(index)) {
5149    Handle<String> name = isolate->factory()->Uint32ToString(index);
5150    EnqueueChangeRecord(self, "deleted", name, old_value);
5151  }
5152
5153  return *hresult;
5154}
5155
5156
5157Handle<Object> JSObject::DeleteProperty(Handle<JSObject> object,
5158                                        Handle<Name> name,
5159                                        DeleteMode mode) {
5160  Isolate* isolate = object->GetIsolate();
5161  // ECMA-262, 3rd, 8.6.2.5
5162  ASSERT(name->IsName());
5163
5164  // Check access rights if needed.
5165  if (object->IsAccessCheckNeeded() &&
5166      !isolate->MayNamedAccess(*object, *name, v8::ACCESS_DELETE)) {
5167    isolate->ReportFailedAccessCheck(*object, v8::ACCESS_DELETE);
5168    RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
5169    return isolate->factory()->false_value();
5170  }
5171
5172  if (object->IsJSGlobalProxy()) {
5173    Object* proto = object->GetPrototype();
5174    if (proto->IsNull()) return isolate->factory()->false_value();
5175    ASSERT(proto->IsJSGlobalObject());
5176    return JSGlobalObject::DeleteProperty(
5177        handle(JSGlobalObject::cast(proto)), name, mode);
5178  }
5179
5180  uint32_t index = 0;
5181  if (name->AsArrayIndex(&index)) {
5182    return DeleteElement(object, index, mode);
5183  }
5184
5185  LookupResult lookup(isolate);
5186  object->LocalLookup(*name, &lookup, true);
5187  if (!lookup.IsFound()) return isolate->factory()->true_value();
5188  // Ignore attributes if forcing a deletion.
5189  if (lookup.IsDontDelete() && mode != FORCE_DELETION) {
5190    if (mode == STRICT_DELETION) {
5191      // Deleting a non-configurable property in strict mode.
5192      Handle<Object> args[2] = { name, object };
5193      Handle<Object> error = isolate->factory()->NewTypeError(
5194          "strict_delete_property", HandleVector(args, ARRAY_SIZE(args)));
5195      isolate->Throw(*error);
5196      return Handle<Object>();
5197    }
5198    return isolate->factory()->false_value();
5199  }
5200
5201  Handle<Object> old_value = isolate->factory()->the_hole_value();
5202  bool is_observed = FLAG_harmony_observation && object->map()->is_observed();
5203  if (is_observed && lookup.IsDataProperty()) {
5204    old_value = Object::GetProperty(object, name);
5205  }
5206  Handle<Object> result;
5207
5208  // Check for interceptor.
5209  if (lookup.IsInterceptor()) {
5210    // Skip interceptor if forcing a deletion.
5211    if (mode == FORCE_DELETION) {
5212      result = DeletePropertyPostInterceptor(object, name, mode);
5213    } else {
5214      result = DeletePropertyWithInterceptor(object, name);
5215    }
5216  } else {
5217    // Normalize object if needed.
5218    NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0);
5219    // Make sure the properties are normalized before removing the entry.
5220    result = DeleteNormalizedProperty(object, name, mode);
5221  }
5222
5223  if (is_observed && !object->HasLocalProperty(*name)) {
5224    EnqueueChangeRecord(object, "deleted", name, old_value);
5225  }
5226
5227  return result;
5228}
5229
5230
5231Handle<Object> JSReceiver::DeleteElement(Handle<JSReceiver> object,
5232                                         uint32_t index,
5233                                         DeleteMode mode) {
5234  if (object->IsJSProxy()) {
5235    return JSProxy::DeleteElementWithHandler(
5236        Handle<JSProxy>::cast(object), index, mode);
5237  }
5238  return JSObject::DeleteElement(Handle<JSObject>::cast(object), index, mode);
5239}
5240
5241
5242Handle<Object> JSReceiver::DeleteProperty(Handle<JSReceiver> object,
5243                                          Handle<Name> name,
5244                                          DeleteMode mode) {
5245  if (object->IsJSProxy()) {
5246    return JSProxy::DeletePropertyWithHandler(
5247        Handle<JSProxy>::cast(object), name, mode);
5248  }
5249  return JSObject::DeleteProperty(Handle<JSObject>::cast(object), name, mode);
5250}
5251
5252
5253bool JSObject::ReferencesObjectFromElements(FixedArray* elements,
5254                                            ElementsKind kind,
5255                                            Object* object) {
5256  ASSERT(IsFastObjectElementsKind(kind) ||
5257         kind == DICTIONARY_ELEMENTS);
5258  if (IsFastObjectElementsKind(kind)) {
5259    int length = IsJSArray()
5260        ? Smi::cast(JSArray::cast(this)->length())->value()
5261        : elements->length();
5262    for (int i = 0; i < length; ++i) {
5263      Object* element = elements->get(i);
5264      if (!element->IsTheHole() && element == object) return true;
5265    }
5266  } else {
5267    Object* key =
5268        SeededNumberDictionary::cast(elements)->SlowReverseLookup(object);
5269    if (!key->IsUndefined()) return true;
5270  }
5271  return false;
5272}
5273
5274
5275// Check whether this object references another object.
5276bool JSObject::ReferencesObject(Object* obj) {
5277  Map* map_of_this = map();
5278  Heap* heap = GetHeap();
5279  DisallowHeapAllocation no_allocation;
5280
5281  // Is the object the constructor for this object?
5282  if (map_of_this->constructor() == obj) {
5283    return true;
5284  }
5285
5286  // Is the object the prototype for this object?
5287  if (map_of_this->prototype() == obj) {
5288    return true;
5289  }
5290
5291  // Check if the object is among the named properties.
5292  Object* key = SlowReverseLookup(obj);
5293  if (!key->IsUndefined()) {
5294    return true;
5295  }
5296
5297  // Check if the object is among the indexed properties.
5298  ElementsKind kind = GetElementsKind();
5299  switch (kind) {
5300    case EXTERNAL_PIXEL_ELEMENTS:
5301    case EXTERNAL_BYTE_ELEMENTS:
5302    case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
5303    case EXTERNAL_SHORT_ELEMENTS:
5304    case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
5305    case EXTERNAL_INT_ELEMENTS:
5306    case EXTERNAL_UNSIGNED_INT_ELEMENTS:
5307    case EXTERNAL_FLOAT_ELEMENTS:
5308    case EXTERNAL_DOUBLE_ELEMENTS:
5309    case FAST_DOUBLE_ELEMENTS:
5310    case FAST_HOLEY_DOUBLE_ELEMENTS:
5311      // Raw pixels and external arrays do not reference other
5312      // objects.
5313      break;
5314    case FAST_SMI_ELEMENTS:
5315    case FAST_HOLEY_SMI_ELEMENTS:
5316      break;
5317    case FAST_ELEMENTS:
5318    case FAST_HOLEY_ELEMENTS:
5319    case DICTIONARY_ELEMENTS: {
5320      FixedArray* elements = FixedArray::cast(this->elements());
5321      if (ReferencesObjectFromElements(elements, kind, obj)) return true;
5322      break;
5323    }
5324    case NON_STRICT_ARGUMENTS_ELEMENTS: {
5325      FixedArray* parameter_map = FixedArray::cast(elements());
5326      // Check the mapped parameters.
5327      int length = parameter_map->length();
5328      for (int i = 2; i < length; ++i) {
5329        Object* value = parameter_map->get(i);
5330        if (!value->IsTheHole() && value == obj) return true;
5331      }
5332      // Check the arguments.
5333      FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
5334      kind = arguments->IsDictionary() ? DICTIONARY_ELEMENTS :
5335          FAST_HOLEY_ELEMENTS;
5336      if (ReferencesObjectFromElements(arguments, kind, obj)) return true;
5337      break;
5338    }
5339  }
5340
5341  // For functions check the context.
5342  if (IsJSFunction()) {
5343    // Get the constructor function for arguments array.
5344    JSObject* arguments_boilerplate =
5345        heap->isolate()->context()->native_context()->
5346            arguments_boilerplate();
5347    JSFunction* arguments_function =
5348        JSFunction::cast(arguments_boilerplate->map()->constructor());
5349
5350    // Get the context and don't check if it is the native context.
5351    JSFunction* f = JSFunction::cast(this);
5352    Context* context = f->context();
5353    if (context->IsNativeContext()) {
5354      return false;
5355    }
5356
5357    // Check the non-special context slots.
5358    for (int i = Context::MIN_CONTEXT_SLOTS; i < context->length(); i++) {
5359      // Only check JS objects.
5360      if (context->get(i)->IsJSObject()) {
5361        JSObject* ctxobj = JSObject::cast(context->get(i));
5362        // If it is an arguments array check the content.
5363        if (ctxobj->map()->constructor() == arguments_function) {
5364          if (ctxobj->ReferencesObject(obj)) {
5365            return true;
5366          }
5367        } else if (ctxobj == obj) {
5368          return true;
5369        }
5370      }
5371    }
5372
5373    // Check the context extension (if any) if it can have references.
5374    if (context->has_extension() && !context->IsCatchContext()) {
5375      return JSObject::cast(context->extension())->ReferencesObject(obj);
5376    }
5377  }
5378
5379  // No references to object.
5380  return false;
5381}
5382
5383
5384Handle<Object> JSObject::PreventExtensions(Handle<JSObject> object) {
5385  CALL_HEAP_FUNCTION(object->GetIsolate(), object->PreventExtensions(), Object);
5386}
5387
5388
5389MaybeObject* JSObject::PreventExtensions() {
5390  Isolate* isolate = GetIsolate();
5391  if (IsAccessCheckNeeded() &&
5392      !isolate->MayNamedAccess(this,
5393                               isolate->heap()->undefined_value(),
5394                               v8::ACCESS_KEYS)) {
5395    isolate->ReportFailedAccessCheck(this, v8::ACCESS_KEYS);
5396    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
5397    return isolate->heap()->false_value();
5398  }
5399
5400  if (IsJSGlobalProxy()) {
5401    Object* proto = GetPrototype();
5402    if (proto->IsNull()) return this;
5403    ASSERT(proto->IsJSGlobalObject());
5404    return JSObject::cast(proto)->PreventExtensions();
5405  }
5406
5407  // It's not possible to seal objects with external array elements
5408  if (HasExternalArrayElements()) {
5409    HandleScope scope(isolate);
5410    Handle<Object> object(this, isolate);
5411    Handle<Object> error  =
5412        isolate->factory()->NewTypeError(
5413            "cant_prevent_ext_external_array_elements",
5414            HandleVector(&object, 1));
5415    return isolate->Throw(*error);
5416  }
5417
5418  // If there are fast elements we normalize.
5419  SeededNumberDictionary* dictionary = NULL;
5420  { MaybeObject* maybe = NormalizeElements();
5421    if (!maybe->To<SeededNumberDictionary>(&dictionary)) return maybe;
5422  }
5423  ASSERT(HasDictionaryElements() || HasDictionaryArgumentsElements());
5424  // Make sure that we never go back to fast case.
5425  dictionary->set_requires_slow_elements();
5426
5427  // Do a map transition, other objects with this map may still
5428  // be extensible.
5429  // TODO(adamk): Extend the NormalizedMapCache to handle non-extensible maps.
5430  Map* new_map;
5431  MaybeObject* maybe = map()->Copy();
5432  if (!maybe->To(&new_map)) return maybe;
5433
5434  new_map->set_is_extensible(false);
5435  set_map(new_map);
5436  ASSERT(!map()->is_extensible());
5437  return new_map;
5438}
5439
5440
5441template<typename Dictionary>
5442static void FreezeDictionary(Dictionary* dictionary) {
5443  int capacity = dictionary->Capacity();
5444  for (int i = 0; i < capacity; i++) {
5445    Object* k = dictionary->KeyAt(i);
5446    if (dictionary->IsKey(k)) {
5447      PropertyDetails details = dictionary->DetailsAt(i);
5448      int attrs = DONT_DELETE;
5449      // READ_ONLY is an invalid attribute for JS setters/getters.
5450      if (details.type() != CALLBACKS ||
5451          !dictionary->ValueAt(i)->IsAccessorPair()) {
5452        attrs |= READ_ONLY;
5453      }
5454      details = details.CopyAddAttributes(
5455          static_cast<PropertyAttributes>(attrs));
5456      dictionary->DetailsAtPut(i, details);
5457    }
5458  }
5459}
5460
5461
5462MUST_USE_RESULT MaybeObject* JSObject::Freeze(Isolate* isolate) {
5463  // Freezing non-strict arguments should be handled elsewhere.
5464  ASSERT(!HasNonStrictArgumentsElements());
5465
5466  Heap* heap = isolate->heap();
5467
5468  if (map()->is_frozen()) return this;
5469
5470  if (IsAccessCheckNeeded() &&
5471      !isolate->MayNamedAccess(this,
5472                               heap->undefined_value(),
5473                               v8::ACCESS_KEYS)) {
5474    isolate->ReportFailedAccessCheck(this, v8::ACCESS_KEYS);
5475    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
5476    return heap->false_value();
5477  }
5478
5479  if (IsJSGlobalProxy()) {
5480    Object* proto = GetPrototype();
5481    if (proto->IsNull()) return this;
5482    ASSERT(proto->IsJSGlobalObject());
5483    return JSObject::cast(proto)->Freeze(isolate);
5484  }
5485
5486  // It's not possible to freeze objects with external array elements
5487  if (HasExternalArrayElements()) {
5488    HandleScope scope(isolate);
5489    Handle<Object> object(this, isolate);
5490    Handle<Object> error  =
5491        isolate->factory()->NewTypeError(
5492            "cant_prevent_ext_external_array_elements",
5493            HandleVector(&object, 1));
5494    return isolate->Throw(*error);
5495  }
5496
5497  SeededNumberDictionary* new_element_dictionary = NULL;
5498  if (!elements()->IsDictionary()) {
5499    int length = IsJSArray()
5500        ? Smi::cast(JSArray::cast(this)->length())->value()
5501        : elements()->length();
5502    if (length > 0) {
5503      int capacity = 0;
5504      int used = 0;
5505      GetElementsCapacityAndUsage(&capacity, &used);
5506      MaybeObject* maybe_dict = SeededNumberDictionary::Allocate(heap, used);
5507      if (!maybe_dict->To(&new_element_dictionary)) return maybe_dict;
5508
5509      // Move elements to a dictionary; avoid calling NormalizeElements to avoid
5510      // unnecessary transitions.
5511      maybe_dict = CopyFastElementsToDictionary(isolate, elements(), length,
5512                                                new_element_dictionary);
5513      if (!maybe_dict->To(&new_element_dictionary)) return maybe_dict;
5514    } else {
5515      // No existing elements, use a pre-allocated empty backing store
5516      new_element_dictionary = heap->empty_slow_element_dictionary();
5517    }
5518  }
5519
5520  LookupResult result(isolate);
5521  map()->LookupTransition(this, heap->frozen_symbol(), &result);
5522  if (result.IsTransition()) {
5523    Map* transition_map = result.GetTransitionTarget();
5524    ASSERT(transition_map->has_dictionary_elements());
5525    ASSERT(transition_map->is_frozen());
5526    ASSERT(!transition_map->is_extensible());
5527    set_map(transition_map);
5528  } else if (HasFastProperties() && map()->CanHaveMoreTransitions()) {
5529    // Create a new descriptor array with fully-frozen properties
5530    int num_descriptors = map()->NumberOfOwnDescriptors();
5531    DescriptorArray* new_descriptors;
5532    MaybeObject* maybe_descriptors =
5533        map()->instance_descriptors()->CopyUpToAddAttributes(num_descriptors,
5534                                                             FROZEN);
5535    if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
5536
5537    Map* new_map;
5538    MaybeObject* maybe_new_map = map()->CopyReplaceDescriptors(
5539        new_descriptors, INSERT_TRANSITION, heap->frozen_symbol());
5540    if (!maybe_new_map->To(&new_map)) return maybe_new_map;
5541    new_map->freeze();
5542    new_map->set_is_extensible(false);
5543    new_map->set_elements_kind(DICTIONARY_ELEMENTS);
5544    set_map(new_map);
5545  } else {
5546    // Slow path: need to normalize properties for safety
5547    MaybeObject* maybe = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
5548    if (maybe->IsFailure()) return maybe;
5549
5550    // Create a new map, since other objects with this map may be extensible.
5551    // TODO(adamk): Extend the NormalizedMapCache to handle non-extensible maps.
5552    Map* new_map;
5553    MaybeObject* maybe_copy = map()->Copy();
5554    if (!maybe_copy->To(&new_map)) return maybe_copy;
5555    new_map->freeze();
5556    new_map->set_is_extensible(false);
5557    new_map->set_elements_kind(DICTIONARY_ELEMENTS);
5558    set_map(new_map);
5559
5560    // Freeze dictionary-mode properties
5561    FreezeDictionary(property_dictionary());
5562  }
5563
5564  ASSERT(map()->has_dictionary_elements());
5565  if (new_element_dictionary != NULL) {
5566    set_elements(new_element_dictionary);
5567  }
5568
5569  if (elements() != heap->empty_slow_element_dictionary()) {
5570    SeededNumberDictionary* dictionary = element_dictionary();
5571    // Make sure we never go back to the fast case
5572    dictionary->set_requires_slow_elements();
5573    // Freeze all elements in the dictionary
5574    FreezeDictionary(dictionary);
5575  }
5576
5577  return this;
5578}
5579
5580
5581MUST_USE_RESULT MaybeObject* JSObject::SetObserved(Isolate* isolate) {
5582  if (map()->is_observed())
5583    return isolate->heap()->undefined_value();
5584
5585  Heap* heap = isolate->heap();
5586
5587  if (!HasExternalArrayElements()) {
5588    // Go to dictionary mode, so that we don't skip map checks.
5589    MaybeObject* maybe = NormalizeElements();
5590    if (maybe->IsFailure()) return maybe;
5591    ASSERT(!HasFastElements());
5592  }
5593
5594  LookupResult result(isolate);
5595  map()->LookupTransition(this, heap->observed_symbol(), &result);
5596
5597  Map* new_map;
5598  if (result.IsTransition()) {
5599    new_map = result.GetTransitionTarget();
5600    ASSERT(new_map->is_observed());
5601  } else if (map()->CanHaveMoreTransitions()) {
5602    MaybeObject* maybe_new_map = map()->CopyForObserved();
5603    if (!maybe_new_map->To(&new_map)) return maybe_new_map;
5604  } else {
5605    MaybeObject* maybe_copy = map()->Copy();
5606    if (!maybe_copy->To(&new_map)) return maybe_copy;
5607    new_map->set_is_observed(true);
5608  }
5609  set_map(new_map);
5610
5611  return heap->undefined_value();
5612}
5613
5614
5615MUST_USE_RESULT MaybeObject* JSObject::DeepCopy(Isolate* isolate) {
5616  StackLimitCheck check(isolate);
5617  if (check.HasOverflowed()) return isolate->StackOverflow();
5618
5619  if (map()->is_deprecated()) {
5620    MaybeObject* maybe_failure = MigrateInstance();
5621    if (maybe_failure->IsFailure()) return maybe_failure;
5622  }
5623
5624  Heap* heap = isolate->heap();
5625  Object* result;
5626  { MaybeObject* maybe_result = heap->CopyJSObject(this);
5627    if (!maybe_result->ToObject(&result)) return maybe_result;
5628  }
5629  JSObject* copy = JSObject::cast(result);
5630
5631  // Deep copy local properties.
5632  if (copy->HasFastProperties()) {
5633    DescriptorArray* descriptors = copy->map()->instance_descriptors();
5634    int limit = copy->map()->NumberOfOwnDescriptors();
5635    for (int i = 0; i < limit; i++) {
5636      PropertyDetails details = descriptors->GetDetails(i);
5637      if (details.type() != FIELD) continue;
5638      int index = descriptors->GetFieldIndex(i);
5639      Object* value = RawFastPropertyAt(index);
5640      if (value->IsJSObject()) {
5641        JSObject* js_object = JSObject::cast(value);
5642        MaybeObject* maybe_copy = js_object->DeepCopy(isolate);
5643        if (!maybe_copy->To(&value)) return maybe_copy;
5644      } else {
5645        Representation representation = details.representation();
5646        MaybeObject* maybe_storage =
5647            value->AllocateNewStorageFor(heap, representation);
5648        if (!maybe_storage->To(&value)) return maybe_storage;
5649      }
5650      copy->FastPropertyAtPut(index, value);
5651    }
5652  } else {
5653    { MaybeObject* maybe_result =
5654          heap->AllocateFixedArray(copy->NumberOfLocalProperties());
5655      if (!maybe_result->ToObject(&result)) return maybe_result;
5656    }
5657    FixedArray* names = FixedArray::cast(result);
5658    copy->GetLocalPropertyNames(names, 0);
5659    for (int i = 0; i < names->length(); i++) {
5660      ASSERT(names->get(i)->IsString());
5661      String* key_string = String::cast(names->get(i));
5662      PropertyAttributes attributes =
5663          copy->GetLocalPropertyAttribute(key_string);
5664      // Only deep copy fields from the object literal expression.
5665      // In particular, don't try to copy the length attribute of
5666      // an array.
5667      if (attributes != NONE) continue;
5668      Object* value =
5669          copy->GetProperty(key_string, &attributes)->ToObjectUnchecked();
5670      if (value->IsJSObject()) {
5671        JSObject* js_object = JSObject::cast(value);
5672        { MaybeObject* maybe_result = js_object->DeepCopy(isolate);
5673          if (!maybe_result->ToObject(&result)) return maybe_result;
5674        }
5675        { MaybeObject* maybe_result =
5676              // Creating object copy for literals. No strict mode needed.
5677              copy->SetProperty(key_string, result, NONE, kNonStrictMode);
5678          if (!maybe_result->ToObject(&result)) return maybe_result;
5679        }
5680      }
5681    }
5682  }
5683
5684  // Deep copy local elements.
5685  // Pixel elements cannot be created using an object literal.
5686  ASSERT(!copy->HasExternalArrayElements());
5687  switch (copy->GetElementsKind()) {
5688    case FAST_SMI_ELEMENTS:
5689    case FAST_ELEMENTS:
5690    case FAST_HOLEY_SMI_ELEMENTS:
5691    case FAST_HOLEY_ELEMENTS: {
5692      FixedArray* elements = FixedArray::cast(copy->elements());
5693      if (elements->map() == heap->fixed_cow_array_map()) {
5694        isolate->counters()->cow_arrays_created_runtime()->Increment();
5695#ifdef DEBUG
5696        for (int i = 0; i < elements->length(); i++) {
5697          ASSERT(!elements->get(i)->IsJSObject());
5698        }
5699#endif
5700      } else {
5701        for (int i = 0; i < elements->length(); i++) {
5702          Object* value = elements->get(i);
5703          ASSERT(value->IsSmi() ||
5704                 value->IsTheHole() ||
5705                 (IsFastObjectElementsKind(copy->GetElementsKind())));
5706          if (value->IsJSObject()) {
5707            JSObject* js_object = JSObject::cast(value);
5708            { MaybeObject* maybe_result = js_object->DeepCopy(isolate);
5709              if (!maybe_result->ToObject(&result)) return maybe_result;
5710            }
5711            elements->set(i, result);
5712          }
5713        }
5714      }
5715      break;
5716    }
5717    case DICTIONARY_ELEMENTS: {
5718      SeededNumberDictionary* element_dictionary = copy->element_dictionary();
5719      int capacity = element_dictionary->Capacity();
5720      for (int i = 0; i < capacity; i++) {
5721        Object* k = element_dictionary->KeyAt(i);
5722        if (element_dictionary->IsKey(k)) {
5723          Object* value = element_dictionary->ValueAt(i);
5724          if (value->IsJSObject()) {
5725            JSObject* js_object = JSObject::cast(value);
5726            { MaybeObject* maybe_result = js_object->DeepCopy(isolate);
5727              if (!maybe_result->ToObject(&result)) return maybe_result;
5728            }
5729            element_dictionary->ValueAtPut(i, result);
5730          }
5731        }
5732      }
5733      break;
5734    }
5735    case NON_STRICT_ARGUMENTS_ELEMENTS:
5736      UNIMPLEMENTED();
5737      break;
5738    case EXTERNAL_PIXEL_ELEMENTS:
5739    case EXTERNAL_BYTE_ELEMENTS:
5740    case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
5741    case EXTERNAL_SHORT_ELEMENTS:
5742    case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
5743    case EXTERNAL_INT_ELEMENTS:
5744    case EXTERNAL_UNSIGNED_INT_ELEMENTS:
5745    case EXTERNAL_FLOAT_ELEMENTS:
5746    case EXTERNAL_DOUBLE_ELEMENTS:
5747    case FAST_DOUBLE_ELEMENTS:
5748    case FAST_HOLEY_DOUBLE_ELEMENTS:
5749      // No contained objects, nothing to do.
5750      break;
5751  }
5752  return copy;
5753}
5754
5755
5756// Tests for the fast common case for property enumeration:
5757// - This object and all prototypes has an enum cache (which means that
5758//   it is no proxy, has no interceptors and needs no access checks).
5759// - This object has no elements.
5760// - No prototype has enumerable properties/elements.
5761bool JSReceiver::IsSimpleEnum() {
5762  Heap* heap = GetHeap();
5763  for (Object* o = this;
5764       o != heap->null_value();
5765       o = JSObject::cast(o)->GetPrototype()) {
5766    if (!o->IsJSObject()) return false;
5767    JSObject* curr = JSObject::cast(o);
5768    int enum_length = curr->map()->EnumLength();
5769    if (enum_length == Map::kInvalidEnumCache) return false;
5770    ASSERT(!curr->HasNamedInterceptor());
5771    ASSERT(!curr->HasIndexedInterceptor());
5772    ASSERT(!curr->IsAccessCheckNeeded());
5773    if (curr->NumberOfEnumElements() > 0) return false;
5774    if (curr != this && enum_length != 0) return false;
5775  }
5776  return true;
5777}
5778
5779
5780int Map::NumberOfDescribedProperties(DescriptorFlag which,
5781                                     PropertyAttributes filter) {
5782  int result = 0;
5783  DescriptorArray* descs = instance_descriptors();
5784  int limit = which == ALL_DESCRIPTORS
5785      ? descs->number_of_descriptors()
5786      : NumberOfOwnDescriptors();
5787  for (int i = 0; i < limit; i++) {
5788    if ((descs->GetDetails(i).attributes() & filter) == 0 &&
5789        ((filter & SYMBOLIC) == 0 || !descs->GetKey(i)->IsSymbol())) {
5790      result++;
5791    }
5792  }
5793  return result;
5794}
5795
5796
5797int Map::NextFreePropertyIndex() {
5798  int max_index = -1;
5799  int number_of_own_descriptors = NumberOfOwnDescriptors();
5800  DescriptorArray* descs = instance_descriptors();
5801  for (int i = 0; i < number_of_own_descriptors; i++) {
5802    if (descs->GetType(i) == FIELD) {
5803      int current_index = descs->GetFieldIndex(i);
5804      if (current_index > max_index) max_index = current_index;
5805    }
5806  }
5807  return max_index + 1;
5808}
5809
5810
5811AccessorDescriptor* Map::FindAccessor(Name* name) {
5812  DescriptorArray* descs = instance_descriptors();
5813  int number_of_own_descriptors = NumberOfOwnDescriptors();
5814  for (int i = 0; i < number_of_own_descriptors; i++) {
5815    if (descs->GetType(i) == CALLBACKS && name->Equals(descs->GetKey(i))) {
5816      return descs->GetCallbacks(i);
5817    }
5818  }
5819  return NULL;
5820}
5821
5822
5823void JSReceiver::LocalLookup(
5824    Name* name, LookupResult* result, bool search_hidden_prototypes) {
5825  ASSERT(name->IsName());
5826
5827  Heap* heap = GetHeap();
5828
5829  if (IsJSGlobalProxy()) {
5830    Object* proto = GetPrototype();
5831    if (proto->IsNull()) return result->NotFound();
5832    ASSERT(proto->IsJSGlobalObject());
5833    return JSReceiver::cast(proto)->LocalLookup(
5834        name, result, search_hidden_prototypes);
5835  }
5836
5837  if (IsJSProxy()) {
5838    result->HandlerResult(JSProxy::cast(this));
5839    return;
5840  }
5841
5842  // Do not use inline caching if the object is a non-global object
5843  // that requires access checks.
5844  if (IsAccessCheckNeeded()) {
5845    result->DisallowCaching();
5846  }
5847
5848  JSObject* js_object = JSObject::cast(this);
5849
5850  // Check for lookup interceptor except when bootstrapping.
5851  if (js_object->HasNamedInterceptor() &&
5852      !heap->isolate()->bootstrapper()->IsActive()) {
5853    result->InterceptorResult(js_object);
5854    return;
5855  }
5856
5857  js_object->LocalLookupRealNamedProperty(name, result);
5858  if (result->IsFound() || !search_hidden_prototypes) return;
5859
5860  Object* proto = js_object->GetPrototype();
5861  if (!proto->IsJSReceiver()) return;
5862  JSReceiver* receiver = JSReceiver::cast(proto);
5863  if (receiver->map()->is_hidden_prototype()) {
5864    receiver->LocalLookup(name, result, search_hidden_prototypes);
5865  }
5866}
5867
5868
5869void JSReceiver::Lookup(Name* name, LookupResult* result) {
5870  // Ecma-262 3rd 8.6.2.4
5871  Heap* heap = GetHeap();
5872  for (Object* current = this;
5873       current != heap->null_value();
5874       current = JSObject::cast(current)->GetPrototype()) {
5875    JSReceiver::cast(current)->LocalLookup(name, result, false);
5876    if (result->IsFound()) return;
5877  }
5878  result->NotFound();
5879}
5880
5881
5882// Search object and its prototype chain for callback properties.
5883void JSObject::LookupCallbackProperty(Name* name, LookupResult* result) {
5884  Heap* heap = GetHeap();
5885  for (Object* current = this;
5886       current != heap->null_value() && current->IsJSObject();
5887       current = JSObject::cast(current)->GetPrototype()) {
5888    JSObject::cast(current)->LocalLookupRealNamedProperty(name, result);
5889    if (result->IsPropertyCallbacks()) return;
5890  }
5891  result->NotFound();
5892}
5893
5894
5895// Try to update an accessor in an elements dictionary. Return true if the
5896// update succeeded, and false otherwise.
5897static bool UpdateGetterSetterInDictionary(
5898    SeededNumberDictionary* dictionary,
5899    uint32_t index,
5900    Object* getter,
5901    Object* setter,
5902    PropertyAttributes attributes) {
5903  int entry = dictionary->FindEntry(index);
5904  if (entry != SeededNumberDictionary::kNotFound) {
5905    Object* result = dictionary->ValueAt(entry);
5906    PropertyDetails details = dictionary->DetailsAt(entry);
5907    if (details.type() == CALLBACKS && result->IsAccessorPair()) {
5908      ASSERT(!details.IsDontDelete());
5909      if (details.attributes() != attributes) {
5910        dictionary->DetailsAtPut(
5911            entry,
5912            PropertyDetails(attributes, CALLBACKS, index));
5913      }
5914      AccessorPair::cast(result)->SetComponents(getter, setter);
5915      return true;
5916    }
5917  }
5918  return false;
5919}
5920
5921
5922void JSObject::DefineElementAccessor(Handle<JSObject> object,
5923                                     uint32_t index,
5924                                     Handle<Object> getter,
5925                                     Handle<Object> setter,
5926                                     PropertyAttributes attributes) {
5927  switch (object->GetElementsKind()) {
5928    case FAST_SMI_ELEMENTS:
5929    case FAST_ELEMENTS:
5930    case FAST_DOUBLE_ELEMENTS:
5931    case FAST_HOLEY_SMI_ELEMENTS:
5932    case FAST_HOLEY_ELEMENTS:
5933    case FAST_HOLEY_DOUBLE_ELEMENTS:
5934      break;
5935    case EXTERNAL_PIXEL_ELEMENTS:
5936    case EXTERNAL_BYTE_ELEMENTS:
5937    case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
5938    case EXTERNAL_SHORT_ELEMENTS:
5939    case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
5940    case EXTERNAL_INT_ELEMENTS:
5941    case EXTERNAL_UNSIGNED_INT_ELEMENTS:
5942    case EXTERNAL_FLOAT_ELEMENTS:
5943    case EXTERNAL_DOUBLE_ELEMENTS:
5944      // Ignore getters and setters on pixel and external array elements.
5945      return;
5946    case DICTIONARY_ELEMENTS:
5947      if (UpdateGetterSetterInDictionary(object->element_dictionary(),
5948                                         index,
5949                                         *getter,
5950                                         *setter,
5951                                         attributes)) {
5952        return;
5953      }
5954      break;
5955    case NON_STRICT_ARGUMENTS_ELEMENTS: {
5956      // Ascertain whether we have read-only properties or an existing
5957      // getter/setter pair in an arguments elements dictionary backing
5958      // store.
5959      FixedArray* parameter_map = FixedArray::cast(object->elements());
5960      uint32_t length = parameter_map->length();
5961      Object* probe =
5962          index < (length - 2) ? parameter_map->get(index + 2) : NULL;
5963      if (probe == NULL || probe->IsTheHole()) {
5964        FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
5965        if (arguments->IsDictionary()) {
5966          SeededNumberDictionary* dictionary =
5967              SeededNumberDictionary::cast(arguments);
5968          if (UpdateGetterSetterInDictionary(dictionary,
5969                                             index,
5970                                             *getter,
5971                                             *setter,
5972                                             attributes)) {
5973            return;
5974          }
5975        }
5976      }
5977      break;
5978    }
5979  }
5980
5981  Isolate* isolate = object->GetIsolate();
5982  Handle<AccessorPair> accessors = isolate->factory()->NewAccessorPair();
5983  accessors->SetComponents(*getter, *setter);
5984
5985  CALL_HEAP_FUNCTION_VOID(
5986      isolate, object->SetElementCallback(index, *accessors, attributes));
5987}
5988
5989
5990Handle<AccessorPair> JSObject::CreateAccessorPairFor(Handle<JSObject> object,
5991                                                     Handle<Name> name) {
5992  Isolate* isolate = object->GetIsolate();
5993  LookupResult result(isolate);
5994  object->LocalLookupRealNamedProperty(*name, &result);
5995  if (result.IsPropertyCallbacks()) {
5996    // Note that the result can actually have IsDontDelete() == true when we
5997    // e.g. have to fall back to the slow case while adding a setter after
5998    // successfully reusing a map transition for a getter. Nevertheless, this is
5999    // OK, because the assertion only holds for the whole addition of both
6000    // accessors, not for the addition of each part. See first comment in
6001    // DefinePropertyAccessor below.
6002    Object* obj = result.GetCallbackObject();
6003    if (obj->IsAccessorPair()) {
6004      return AccessorPair::Copy(handle(AccessorPair::cast(obj), isolate));
6005    }
6006  }
6007  return isolate->factory()->NewAccessorPair();
6008}
6009
6010
6011void JSObject::DefinePropertyAccessor(Handle<JSObject> object,
6012                                      Handle<Name> name,
6013                                      Handle<Object> getter,
6014                                      Handle<Object> setter,
6015                                      PropertyAttributes attributes) {
6016  // We could assert that the property is configurable here, but we would need
6017  // to do a lookup, which seems to be a bit of overkill.
6018  bool only_attribute_changes = getter->IsNull() && setter->IsNull();
6019  if (object->HasFastProperties() && !only_attribute_changes &&
6020      (object->map()->NumberOfOwnDescriptors() <
6021       DescriptorArray::kMaxNumberOfDescriptors)) {
6022    bool getterOk = getter->IsNull() ||
6023        DefineFastAccessor(object, name, ACCESSOR_GETTER, getter, attributes);
6024    bool setterOk = !getterOk || setter->IsNull() ||
6025        DefineFastAccessor(object, name, ACCESSOR_SETTER, setter, attributes);
6026    if (getterOk && setterOk) return;
6027  }
6028
6029  Handle<AccessorPair> accessors = CreateAccessorPairFor(object, name);
6030  accessors->SetComponents(*getter, *setter);
6031
6032  CALL_HEAP_FUNCTION_VOID(
6033      object->GetIsolate(),
6034      object->SetPropertyCallback(*name, *accessors, attributes));
6035}
6036
6037
6038bool JSObject::CanSetCallback(Name* name) {
6039  ASSERT(!IsAccessCheckNeeded() ||
6040         GetIsolate()->MayNamedAccess(this, name, v8::ACCESS_SET));
6041
6042  // Check if there is an API defined callback object which prohibits
6043  // callback overwriting in this object or its prototype chain.
6044  // This mechanism is needed for instance in a browser setting, where
6045  // certain accessors such as window.location should not be allowed
6046  // to be overwritten because allowing overwriting could potentially
6047  // cause security problems.
6048  LookupResult callback_result(GetIsolate());
6049  LookupCallbackProperty(name, &callback_result);
6050  if (callback_result.IsFound()) {
6051    Object* obj = callback_result.GetCallbackObject();
6052    if (obj->IsAccessorInfo() &&
6053        AccessorInfo::cast(obj)->prohibits_overwriting()) {
6054      return false;
6055    }
6056  }
6057
6058  return true;
6059}
6060
6061
6062MaybeObject* JSObject::SetElementCallback(uint32_t index,
6063                                          Object* structure,
6064                                          PropertyAttributes attributes) {
6065  PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0);
6066
6067  // Normalize elements to make this operation simple.
6068  SeededNumberDictionary* dictionary;
6069  { MaybeObject* maybe_dictionary = NormalizeElements();
6070    if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
6071  }
6072  ASSERT(HasDictionaryElements() || HasDictionaryArgumentsElements());
6073
6074  // Update the dictionary with the new CALLBACKS property.
6075  { MaybeObject* maybe_dictionary = dictionary->Set(index, structure, details);
6076    if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
6077  }
6078
6079  dictionary->set_requires_slow_elements();
6080  // Update the dictionary backing store on the object.
6081  if (elements()->map() == GetHeap()->non_strict_arguments_elements_map()) {
6082    // Also delete any parameter alias.
6083    //
6084    // TODO(kmillikin): when deleting the last parameter alias we could
6085    // switch to a direct backing store without the parameter map.  This
6086    // would allow GC of the context.
6087    FixedArray* parameter_map = FixedArray::cast(elements());
6088    if (index < static_cast<uint32_t>(parameter_map->length()) - 2) {
6089      parameter_map->set(index + 2, GetHeap()->the_hole_value());
6090    }
6091    parameter_map->set(1, dictionary);
6092  } else {
6093    set_elements(dictionary);
6094  }
6095
6096  return GetHeap()->undefined_value();
6097}
6098
6099
6100MaybeObject* JSObject::SetPropertyCallback(Name* name,
6101                                           Object* structure,
6102                                           PropertyAttributes attributes) {
6103  // Normalize object to make this operation simple.
6104  MaybeObject* maybe_ok = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
6105  if (maybe_ok->IsFailure()) return maybe_ok;
6106
6107  // For the global object allocate a new map to invalidate the global inline
6108  // caches which have a global property cell reference directly in the code.
6109  if (IsGlobalObject()) {
6110    Map* new_map;
6111    MaybeObject* maybe_new_map = map()->CopyDropDescriptors();
6112    if (!maybe_new_map->To(&new_map)) return maybe_new_map;
6113    ASSERT(new_map->is_dictionary_map());
6114
6115    set_map(new_map);
6116    // When running crankshaft, changing the map is not enough. We
6117    // need to deoptimize all functions that rely on this global
6118    // object.
6119    Deoptimizer::DeoptimizeGlobalObject(this);
6120  }
6121
6122  // Update the dictionary with the new CALLBACKS property.
6123  PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0);
6124  maybe_ok = SetNormalizedProperty(name, structure, details);
6125  if (maybe_ok->IsFailure()) return maybe_ok;
6126
6127  return GetHeap()->undefined_value();
6128}
6129
6130
6131void JSObject::DefineAccessor(Handle<JSObject> object,
6132                              Handle<Name> name,
6133                              Handle<Object> getter,
6134                              Handle<Object> setter,
6135                              PropertyAttributes attributes) {
6136  Isolate* isolate = object->GetIsolate();
6137  // Check access rights if needed.
6138  if (object->IsAccessCheckNeeded() &&
6139      !isolate->MayNamedAccess(*object, *name, v8::ACCESS_SET)) {
6140    isolate->ReportFailedAccessCheck(*object, v8::ACCESS_SET);
6141    return;
6142  }
6143
6144  if (object->IsJSGlobalProxy()) {
6145    Handle<Object> proto(object->GetPrototype(), isolate);
6146    if (proto->IsNull()) return;
6147    ASSERT(proto->IsJSGlobalObject());
6148    DefineAccessor(
6149        Handle<JSObject>::cast(proto), name, getter, setter, attributes);
6150    return;
6151  }
6152
6153  // Make sure that the top context does not change when doing callbacks or
6154  // interceptor calls.
6155  AssertNoContextChange ncc;
6156
6157  // Try to flatten before operating on the string.
6158  if (name->IsString()) String::cast(*name)->TryFlatten();
6159
6160  if (!object->CanSetCallback(*name)) return;
6161
6162  uint32_t index = 0;
6163  bool is_element = name->AsArrayIndex(&index);
6164
6165  Handle<Object> old_value = isolate->factory()->the_hole_value();
6166  bool is_observed = FLAG_harmony_observation && object->map()->is_observed();
6167  bool preexists = false;
6168  if (is_observed) {
6169    if (is_element) {
6170      preexists = object->HasLocalElement(index);
6171      if (preexists && object->GetLocalElementAccessorPair(index) == NULL) {
6172        old_value = Object::GetElement(object, index);
6173      }
6174    } else {
6175      LookupResult lookup(isolate);
6176      object->LocalLookup(*name, &lookup, true);
6177      preexists = lookup.IsProperty();
6178      if (preexists && lookup.IsDataProperty()) {
6179        old_value = Object::GetProperty(object, name);
6180      }
6181    }
6182  }
6183
6184  if (is_element) {
6185    DefineElementAccessor(object, index, getter, setter, attributes);
6186  } else {
6187    DefinePropertyAccessor(object, name, getter, setter, attributes);
6188  }
6189
6190  if (is_observed) {
6191    const char* type = preexists ? "reconfigured" : "new";
6192    EnqueueChangeRecord(object, type, name, old_value);
6193  }
6194}
6195
6196
6197static bool TryAccessorTransition(JSObject* self,
6198                                  Map* transitioned_map,
6199                                  int target_descriptor,
6200                                  AccessorComponent component,
6201                                  Object* accessor,
6202                                  PropertyAttributes attributes) {
6203  DescriptorArray* descs = transitioned_map->instance_descriptors();
6204  PropertyDetails details = descs->GetDetails(target_descriptor);
6205
6206  // If the transition target was not callbacks, fall back to the slow case.
6207  if (details.type() != CALLBACKS) return false;
6208  Object* descriptor = descs->GetCallbacksObject(target_descriptor);
6209  if (!descriptor->IsAccessorPair()) return false;
6210
6211  Object* target_accessor = AccessorPair::cast(descriptor)->get(component);
6212  PropertyAttributes target_attributes = details.attributes();
6213
6214  // Reuse transition if adding same accessor with same attributes.
6215  if (target_accessor == accessor && target_attributes == attributes) {
6216    self->set_map(transitioned_map);
6217    return true;
6218  }
6219
6220  // If either not the same accessor, or not the same attributes, fall back to
6221  // the slow case.
6222  return false;
6223}
6224
6225
6226static MaybeObject* CopyInsertDescriptor(Map* map,
6227                                         Name* name,
6228                                         AccessorPair* accessors,
6229                                         PropertyAttributes attributes) {
6230  CallbacksDescriptor new_accessors_desc(name, accessors, attributes);
6231  return map->CopyInsertDescriptor(&new_accessors_desc, INSERT_TRANSITION);
6232}
6233
6234
6235static Handle<Map> CopyInsertDescriptor(Handle<Map> map,
6236                                        Handle<Name> name,
6237                                        Handle<AccessorPair> accessors,
6238                                        PropertyAttributes attributes) {
6239  CALL_HEAP_FUNCTION(map->GetIsolate(),
6240                     CopyInsertDescriptor(*map, *name, *accessors, attributes),
6241                     Map);
6242}
6243
6244
6245bool JSObject::DefineFastAccessor(Handle<JSObject> object,
6246                                  Handle<Name> name,
6247                                  AccessorComponent component,
6248                                  Handle<Object> accessor,
6249                                  PropertyAttributes attributes) {
6250  ASSERT(accessor->IsSpecFunction() || accessor->IsUndefined());
6251  Isolate* isolate = object->GetIsolate();
6252  LookupResult result(isolate);
6253  object->LocalLookup(*name, &result);
6254
6255  if (result.IsFound() && !result.IsPropertyCallbacks()) {
6256    return false;
6257  }
6258
6259  // Return success if the same accessor with the same attributes already exist.
6260  AccessorPair* source_accessors = NULL;
6261  if (result.IsPropertyCallbacks()) {
6262    Object* callback_value = result.GetCallbackObject();
6263    if (callback_value->IsAccessorPair()) {
6264      source_accessors = AccessorPair::cast(callback_value);
6265      Object* entry = source_accessors->get(component);
6266      if (entry == *accessor && result.GetAttributes() == attributes) {
6267        return true;
6268      }
6269    } else {
6270      return false;
6271    }
6272
6273    int descriptor_number = result.GetDescriptorIndex();
6274
6275    object->map()->LookupTransition(*object, *name, &result);
6276
6277    if (result.IsFound()) {
6278      Map* target = result.GetTransitionTarget();
6279      ASSERT(target->NumberOfOwnDescriptors() ==
6280             object->map()->NumberOfOwnDescriptors());
6281      // This works since descriptors are sorted in order of addition.
6282      ASSERT(object->map()->instance_descriptors()->
6283             GetKey(descriptor_number) == *name);
6284      return TryAccessorTransition(*object, target, descriptor_number,
6285                                   component, *accessor, attributes);
6286    }
6287  } else {
6288    // If not, lookup a transition.
6289    object->map()->LookupTransition(*object, *name, &result);
6290
6291    // If there is a transition, try to follow it.
6292    if (result.IsFound()) {
6293      Map* target = result.GetTransitionTarget();
6294      int descriptor_number = target->LastAdded();
6295      ASSERT(target->instance_descriptors()->GetKey(descriptor_number)
6296             ->Equals(*name));
6297      return TryAccessorTransition(*object, target, descriptor_number,
6298                                   component, *accessor, attributes);
6299    }
6300  }
6301
6302  // If there is no transition yet, add a transition to the a new accessor pair
6303  // containing the accessor.  Allocate a new pair if there were no source
6304  // accessors.  Otherwise, copy the pair and modify the accessor.
6305  Handle<AccessorPair> accessors = source_accessors != NULL
6306      ? AccessorPair::Copy(Handle<AccessorPair>(source_accessors))
6307      : isolate->factory()->NewAccessorPair();
6308  accessors->set(component, *accessor);
6309  Handle<Map> new_map = CopyInsertDescriptor(Handle<Map>(object->map()),
6310                                             name, accessors, attributes);
6311  object->set_map(*new_map);
6312  return true;
6313}
6314
6315
6316MaybeObject* JSObject::DefineAccessor(AccessorInfo* info) {
6317  Isolate* isolate = GetIsolate();
6318  Name* name = Name::cast(info->name());
6319  // Check access rights if needed.
6320  if (IsAccessCheckNeeded() &&
6321      !isolate->MayNamedAccess(this, name, v8::ACCESS_SET)) {
6322    isolate->ReportFailedAccessCheck(this, v8::ACCESS_SET);
6323    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
6324    return isolate->heap()->undefined_value();
6325  }
6326
6327  if (IsJSGlobalProxy()) {
6328    Object* proto = GetPrototype();
6329    if (proto->IsNull()) return this;
6330    ASSERT(proto->IsJSGlobalObject());
6331    return JSObject::cast(proto)->DefineAccessor(info);
6332  }
6333
6334  // Make sure that the top context does not change when doing callbacks or
6335  // interceptor calls.
6336  AssertNoContextChange ncc;
6337
6338  // Try to flatten before operating on the string.
6339  if (name->IsString()) String::cast(name)->TryFlatten();
6340
6341  if (!CanSetCallback(name)) return isolate->heap()->undefined_value();
6342
6343  uint32_t index = 0;
6344  bool is_element = name->AsArrayIndex(&index);
6345
6346  if (is_element) {
6347    if (IsJSArray()) return isolate->heap()->undefined_value();
6348
6349    // Accessors overwrite previous callbacks (cf. with getters/setters).
6350    switch (GetElementsKind()) {
6351      case FAST_SMI_ELEMENTS:
6352      case FAST_ELEMENTS:
6353      case FAST_DOUBLE_ELEMENTS:
6354      case FAST_HOLEY_SMI_ELEMENTS:
6355      case FAST_HOLEY_ELEMENTS:
6356      case FAST_HOLEY_DOUBLE_ELEMENTS:
6357        break;
6358      case EXTERNAL_PIXEL_ELEMENTS:
6359      case EXTERNAL_BYTE_ELEMENTS:
6360      case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
6361      case EXTERNAL_SHORT_ELEMENTS:
6362      case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
6363      case EXTERNAL_INT_ELEMENTS:
6364      case EXTERNAL_UNSIGNED_INT_ELEMENTS:
6365      case EXTERNAL_FLOAT_ELEMENTS:
6366      case EXTERNAL_DOUBLE_ELEMENTS:
6367        // Ignore getters and setters on pixel and external array
6368        // elements.
6369        return isolate->heap()->undefined_value();
6370      case DICTIONARY_ELEMENTS:
6371        break;
6372      case NON_STRICT_ARGUMENTS_ELEMENTS:
6373        UNIMPLEMENTED();
6374        break;
6375    }
6376
6377    MaybeObject* maybe_ok =
6378        SetElementCallback(index, info, info->property_attributes());
6379    if (maybe_ok->IsFailure()) return maybe_ok;
6380  } else {
6381    // Lookup the name.
6382    LookupResult result(isolate);
6383    LocalLookup(name, &result, true);
6384    // ES5 forbids turning a property into an accessor if it's not
6385    // configurable (that is IsDontDelete in ES3 and v8), see 8.6.1 (Table 5).
6386    if (result.IsFound() && (result.IsReadOnly() || result.IsDontDelete())) {
6387      return isolate->heap()->undefined_value();
6388    }
6389
6390    MaybeObject* maybe_ok =
6391        SetPropertyCallback(name, info, info->property_attributes());
6392    if (maybe_ok->IsFailure()) return maybe_ok;
6393  }
6394
6395  return this;
6396}
6397
6398
6399MaybeObject* JSObject::LookupAccessor(Name* name, AccessorComponent component) {
6400  Heap* heap = GetHeap();
6401
6402  // Make sure that the top context does not change when doing callbacks or
6403  // interceptor calls.
6404  AssertNoContextChange ncc;
6405
6406  // Check access rights if needed.
6407  if (IsAccessCheckNeeded() &&
6408      !heap->isolate()->MayNamedAccess(this, name, v8::ACCESS_HAS)) {
6409    heap->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_HAS);
6410    RETURN_IF_SCHEDULED_EXCEPTION(heap->isolate());
6411    return heap->undefined_value();
6412  }
6413
6414  // Make the lookup and include prototypes.
6415  uint32_t index = 0;
6416  if (name->AsArrayIndex(&index)) {
6417    for (Object* obj = this;
6418         obj != heap->null_value();
6419         obj = JSReceiver::cast(obj)->GetPrototype()) {
6420      if (obj->IsJSObject() && JSObject::cast(obj)->HasDictionaryElements()) {
6421        JSObject* js_object = JSObject::cast(obj);
6422        SeededNumberDictionary* dictionary = js_object->element_dictionary();
6423        int entry = dictionary->FindEntry(index);
6424        if (entry != SeededNumberDictionary::kNotFound) {
6425          Object* element = dictionary->ValueAt(entry);
6426          if (dictionary->DetailsAt(entry).type() == CALLBACKS &&
6427              element->IsAccessorPair()) {
6428            return AccessorPair::cast(element)->GetComponent(component);
6429          }
6430        }
6431      }
6432    }
6433  } else {
6434    for (Object* obj = this;
6435         obj != heap->null_value();
6436         obj = JSReceiver::cast(obj)->GetPrototype()) {
6437      LookupResult result(heap->isolate());
6438      JSReceiver::cast(obj)->LocalLookup(name, &result);
6439      if (result.IsFound()) {
6440        if (result.IsReadOnly()) return heap->undefined_value();
6441        if (result.IsPropertyCallbacks()) {
6442          Object* obj = result.GetCallbackObject();
6443          if (obj->IsAccessorPair()) {
6444            return AccessorPair::cast(obj)->GetComponent(component);
6445          }
6446        }
6447      }
6448    }
6449  }
6450  return heap->undefined_value();
6451}
6452
6453
6454Object* JSObject::SlowReverseLookup(Object* value) {
6455  if (HasFastProperties()) {
6456    int number_of_own_descriptors = map()->NumberOfOwnDescriptors();
6457    DescriptorArray* descs = map()->instance_descriptors();
6458    for (int i = 0; i < number_of_own_descriptors; i++) {
6459      if (descs->GetType(i) == FIELD) {
6460        Object* property = RawFastPropertyAt(descs->GetFieldIndex(i));
6461        if (FLAG_track_double_fields &&
6462            descs->GetDetails(i).representation().IsDouble()) {
6463          ASSERT(property->IsHeapNumber());
6464          if (value->IsNumber() && property->Number() == value->Number()) {
6465            return descs->GetKey(i);
6466          }
6467        } else if (property == value) {
6468          return descs->GetKey(i);
6469        }
6470      } else if (descs->GetType(i) == CONSTANT) {
6471        if (descs->GetConstant(i) == value) {
6472          return descs->GetKey(i);
6473        }
6474      }
6475    }
6476    return GetHeap()->undefined_value();
6477  } else {
6478    return property_dictionary()->SlowReverseLookup(value);
6479  }
6480}
6481
6482
6483MaybeObject* Map::RawCopy(int instance_size) {
6484  Map* result;
6485  MaybeObject* maybe_result =
6486      GetHeap()->AllocateMap(instance_type(), instance_size);
6487  if (!maybe_result->To(&result)) return maybe_result;
6488
6489  result->set_prototype(prototype());
6490  result->set_constructor(constructor());
6491  result->set_bit_field(bit_field());
6492  result->set_bit_field2(bit_field2());
6493  int new_bit_field3 = bit_field3();
6494  new_bit_field3 = OwnsDescriptors::update(new_bit_field3, true);
6495  new_bit_field3 = NumberOfOwnDescriptorsBits::update(new_bit_field3, 0);
6496  new_bit_field3 = EnumLengthBits::update(new_bit_field3, kInvalidEnumCache);
6497  new_bit_field3 = Deprecated::update(new_bit_field3, false);
6498  new_bit_field3 = IsUnstable::update(new_bit_field3, false);
6499  result->set_bit_field3(new_bit_field3);
6500  return result;
6501}
6502
6503
6504MaybeObject* Map::CopyNormalized(PropertyNormalizationMode mode,
6505                                 NormalizedMapSharingMode sharing) {
6506  int new_instance_size = instance_size();
6507  if (mode == CLEAR_INOBJECT_PROPERTIES) {
6508    new_instance_size -= inobject_properties() * kPointerSize;
6509  }
6510
6511  Map* result;
6512  MaybeObject* maybe_result = RawCopy(new_instance_size);
6513  if (!maybe_result->To(&result)) return maybe_result;
6514
6515  if (mode != CLEAR_INOBJECT_PROPERTIES) {
6516    result->set_inobject_properties(inobject_properties());
6517  }
6518
6519  result->set_is_shared(sharing == SHARED_NORMALIZED_MAP);
6520  result->set_dictionary_map(true);
6521  result->set_migration_target(false);
6522
6523#ifdef VERIFY_HEAP
6524  if (FLAG_verify_heap && result->is_shared()) {
6525    result->SharedMapVerify();
6526  }
6527#endif
6528
6529  return result;
6530}
6531
6532
6533Handle<Map> Map::CopyDropDescriptors(Handle<Map> map) {
6534  CALL_HEAP_FUNCTION(map->GetIsolate(), map->CopyDropDescriptors(), Map);
6535}
6536
6537
6538MaybeObject* Map::CopyDropDescriptors() {
6539  Map* result;
6540  MaybeObject* maybe_result = RawCopy(instance_size());
6541  if (!maybe_result->To(&result)) return maybe_result;
6542
6543  // Please note instance_type and instance_size are set when allocated.
6544  result->set_inobject_properties(inobject_properties());
6545  result->set_unused_property_fields(unused_property_fields());
6546
6547  result->set_pre_allocated_property_fields(pre_allocated_property_fields());
6548  result->set_is_shared(false);
6549  result->ClearCodeCache(GetHeap());
6550  NotifyLeafMapLayoutChange();
6551  return result;
6552}
6553
6554
6555MaybeObject* Map::ShareDescriptor(DescriptorArray* descriptors,
6556                                  Descriptor* descriptor) {
6557  // Sanity check. This path is only to be taken if the map owns its descriptor
6558  // array, implying that its NumberOfOwnDescriptors equals the number of
6559  // descriptors in the descriptor array.
6560  ASSERT(NumberOfOwnDescriptors() ==
6561         instance_descriptors()->number_of_descriptors());
6562  Map* result;
6563  MaybeObject* maybe_result = CopyDropDescriptors();
6564  if (!maybe_result->To(&result)) return maybe_result;
6565
6566  Name* name = descriptor->GetKey();
6567
6568  TransitionArray* transitions;
6569  MaybeObject* maybe_transitions =
6570      AddTransition(name, result, SIMPLE_TRANSITION);
6571  if (!maybe_transitions->To(&transitions)) return maybe_transitions;
6572
6573  int old_size = descriptors->number_of_descriptors();
6574
6575  DescriptorArray* new_descriptors;
6576
6577  if (descriptors->NumberOfSlackDescriptors() > 0) {
6578    new_descriptors = descriptors;
6579    new_descriptors->Append(descriptor);
6580  } else {
6581    // Descriptor arrays grow by 50%.
6582    MaybeObject* maybe_descriptors = DescriptorArray::Allocate(
6583        old_size, old_size < 4 ? 1 : old_size / 2);
6584    if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
6585
6586    DescriptorArray::WhitenessWitness witness(new_descriptors);
6587
6588    // Copy the descriptors, inserting a descriptor.
6589    for (int i = 0; i < old_size; ++i) {
6590      new_descriptors->CopyFrom(i, descriptors, i, witness);
6591    }
6592
6593    new_descriptors->Append(descriptor, witness);
6594
6595    if (old_size > 0) {
6596      // If the source descriptors had an enum cache we copy it. This ensures
6597      // that the maps to which we push the new descriptor array back can rely
6598      // on a cache always being available once it is set. If the map has more
6599      // enumerated descriptors than available in the original cache, the cache
6600      // will be lazily replaced by the extended cache when needed.
6601      if (descriptors->HasEnumCache()) {
6602        new_descriptors->CopyEnumCacheFrom(descriptors);
6603      }
6604
6605      Map* map;
6606      // Replace descriptors by new_descriptors in all maps that share it.
6607      for (Object* current = GetBackPointer();
6608           !current->IsUndefined();
6609           current = map->GetBackPointer()) {
6610        map = Map::cast(current);
6611        if (map->instance_descriptors() != descriptors) break;
6612        map->set_instance_descriptors(new_descriptors);
6613      }
6614
6615      set_instance_descriptors(new_descriptors);
6616    }
6617  }
6618
6619  result->SetBackPointer(this);
6620  result->InitializeDescriptors(new_descriptors);
6621  ASSERT(result->NumberOfOwnDescriptors() == NumberOfOwnDescriptors() + 1);
6622
6623  set_transitions(transitions);
6624  set_owns_descriptors(false);
6625
6626  return result;
6627}
6628
6629
6630MaybeObject* Map::CopyReplaceDescriptors(DescriptorArray* descriptors,
6631                                         TransitionFlag flag,
6632                                         Name* name,
6633                                         SimpleTransitionFlag simple_flag) {
6634  ASSERT(descriptors->IsSortedNoDuplicates());
6635
6636  Map* result;
6637  MaybeObject* maybe_result = CopyDropDescriptors();
6638  if (!maybe_result->To(&result)) return maybe_result;
6639
6640  result->InitializeDescriptors(descriptors);
6641
6642  if (flag == INSERT_TRANSITION && CanHaveMoreTransitions()) {
6643    TransitionArray* transitions;
6644    MaybeObject* maybe_transitions = AddTransition(name, result, simple_flag);
6645    if (!maybe_transitions->To(&transitions)) return maybe_transitions;
6646    set_transitions(transitions);
6647    result->SetBackPointer(this);
6648  } else if (flag != OMIT_TRANSITION_KEEP_REPRESENTATIONS) {
6649    descriptors->InitializeRepresentations(Representation::Tagged());
6650  }
6651
6652  return result;
6653}
6654
6655
6656// Since this method is used to rewrite an existing transition tree, it can
6657// always insert transitions without checking.
6658MaybeObject* Map::CopyInstallDescriptors(int new_descriptor,
6659                                         DescriptorArray* descriptors) {
6660  ASSERT(descriptors->IsSortedNoDuplicates());
6661
6662  Map* result;
6663  MaybeObject* maybe_result = CopyDropDescriptors();
6664  if (!maybe_result->To(&result)) return maybe_result;
6665
6666  result->InitializeDescriptors(descriptors);
6667  result->SetNumberOfOwnDescriptors(new_descriptor + 1);
6668
6669  int unused_property_fields = this->unused_property_fields();
6670  if (descriptors->GetDetails(new_descriptor).type() == FIELD) {
6671    unused_property_fields = this->unused_property_fields() - 1;
6672    if (unused_property_fields < 0) {
6673      unused_property_fields += JSObject::kFieldsAdded;
6674    }
6675  }
6676
6677  result->set_unused_property_fields(unused_property_fields);
6678  result->set_owns_descriptors(false);
6679
6680  Name* name = descriptors->GetKey(new_descriptor);
6681  TransitionArray* transitions;
6682  MaybeObject* maybe_transitions =
6683      AddTransition(name, result, SIMPLE_TRANSITION);
6684  if (!maybe_transitions->To(&transitions)) return maybe_transitions;
6685
6686  set_transitions(transitions);
6687  result->SetBackPointer(this);
6688
6689  return result;
6690}
6691
6692
6693MaybeObject* Map::CopyAsElementsKind(ElementsKind kind, TransitionFlag flag) {
6694  if (flag == INSERT_TRANSITION) {
6695    ASSERT(!HasElementsTransition() ||
6696        ((elements_transition_map()->elements_kind() == DICTIONARY_ELEMENTS ||
6697          IsExternalArrayElementsKind(
6698              elements_transition_map()->elements_kind())) &&
6699         (kind == DICTIONARY_ELEMENTS ||
6700          IsExternalArrayElementsKind(kind))));
6701    ASSERT(!IsFastElementsKind(kind) ||
6702           IsMoreGeneralElementsKindTransition(elements_kind(), kind));
6703    ASSERT(kind != elements_kind());
6704  }
6705
6706  bool insert_transition =
6707      flag == INSERT_TRANSITION && !HasElementsTransition();
6708
6709  if (insert_transition && owns_descriptors()) {
6710    // In case the map owned its own descriptors, share the descriptors and
6711    // transfer ownership to the new map.
6712    Map* new_map;
6713    MaybeObject* maybe_new_map = CopyDropDescriptors();
6714    if (!maybe_new_map->To(&new_map)) return maybe_new_map;
6715
6716    MaybeObject* added_elements = set_elements_transition_map(new_map);
6717    if (added_elements->IsFailure()) return added_elements;
6718
6719    new_map->set_elements_kind(kind);
6720    new_map->InitializeDescriptors(instance_descriptors());
6721    new_map->SetBackPointer(this);
6722    set_owns_descriptors(false);
6723    return new_map;
6724  }
6725
6726  // In case the map did not own its own descriptors, a split is forced by
6727  // copying the map; creating a new descriptor array cell.
6728  // Create a new free-floating map only if we are not allowed to store it.
6729  Map* new_map;
6730  MaybeObject* maybe_new_map = Copy();
6731  if (!maybe_new_map->To(&new_map)) return maybe_new_map;
6732
6733  new_map->set_elements_kind(kind);
6734
6735  if (insert_transition) {
6736    MaybeObject* added_elements = set_elements_transition_map(new_map);
6737    if (added_elements->IsFailure()) return added_elements;
6738    new_map->SetBackPointer(this);
6739  }
6740
6741  return new_map;
6742}
6743
6744
6745MaybeObject* Map::CopyForObserved() {
6746  ASSERT(!is_observed());
6747
6748  // In case the map owned its own descriptors, share the descriptors and
6749  // transfer ownership to the new map.
6750  Map* new_map;
6751  MaybeObject* maybe_new_map;
6752  if (owns_descriptors()) {
6753    maybe_new_map = CopyDropDescriptors();
6754  } else {
6755    maybe_new_map = Copy();
6756  }
6757  if (!maybe_new_map->To(&new_map)) return maybe_new_map;
6758
6759  TransitionArray* transitions;
6760  MaybeObject* maybe_transitions = AddTransition(GetHeap()->observed_symbol(),
6761                                                 new_map,
6762                                                 FULL_TRANSITION);
6763  if (!maybe_transitions->To(&transitions)) return maybe_transitions;
6764  set_transitions(transitions);
6765
6766  new_map->set_is_observed(true);
6767
6768  if (owns_descriptors()) {
6769    new_map->InitializeDescriptors(instance_descriptors());
6770    set_owns_descriptors(false);
6771  }
6772
6773  new_map->SetBackPointer(this);
6774  return new_map;
6775}
6776
6777
6778MaybeObject* Map::CopyWithPreallocatedFieldDescriptors() {
6779  if (pre_allocated_property_fields() == 0) return CopyDropDescriptors();
6780
6781  // If the map has pre-allocated properties always start out with a descriptor
6782  // array describing these properties.
6783  ASSERT(constructor()->IsJSFunction());
6784  JSFunction* ctor = JSFunction::cast(constructor());
6785  Map* map = ctor->initial_map();
6786  DescriptorArray* descriptors = map->instance_descriptors();
6787
6788  int number_of_own_descriptors = map->NumberOfOwnDescriptors();
6789  DescriptorArray* new_descriptors;
6790  MaybeObject* maybe_descriptors =
6791      descriptors->CopyUpTo(number_of_own_descriptors);
6792  if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
6793
6794  return CopyReplaceDescriptors(new_descriptors, OMIT_TRANSITION);
6795}
6796
6797
6798Handle<Map> Map::Copy(Handle<Map> map) {
6799  CALL_HEAP_FUNCTION(map->GetIsolate(), map->Copy(), Map);
6800}
6801
6802
6803MaybeObject* Map::Copy() {
6804  DescriptorArray* descriptors = instance_descriptors();
6805  DescriptorArray* new_descriptors;
6806  int number_of_own_descriptors = NumberOfOwnDescriptors();
6807  MaybeObject* maybe_descriptors =
6808      descriptors->CopyUpTo(number_of_own_descriptors);
6809  if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
6810
6811  return CopyReplaceDescriptors(new_descriptors, OMIT_TRANSITION);
6812}
6813
6814
6815MaybeObject* Map::CopyAddDescriptor(Descriptor* descriptor,
6816                                    TransitionFlag flag) {
6817  DescriptorArray* descriptors = instance_descriptors();
6818
6819  // Ensure the key is unique.
6820  MaybeObject* maybe_failure = descriptor->KeyToUniqueName();
6821  if (maybe_failure->IsFailure()) return maybe_failure;
6822
6823  int old_size = NumberOfOwnDescriptors();
6824  int new_size = old_size + 1;
6825
6826  if (flag == INSERT_TRANSITION &&
6827      owns_descriptors() &&
6828      CanHaveMoreTransitions()) {
6829    return ShareDescriptor(descriptors, descriptor);
6830  }
6831
6832  DescriptorArray* new_descriptors;
6833  MaybeObject* maybe_descriptors = DescriptorArray::Allocate(old_size, 1);
6834  if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
6835
6836  DescriptorArray::WhitenessWitness witness(new_descriptors);
6837
6838  // Copy the descriptors, inserting a descriptor.
6839  for (int i = 0; i < old_size; ++i) {
6840    new_descriptors->CopyFrom(i, descriptors, i, witness);
6841  }
6842
6843  if (old_size != descriptors->number_of_descriptors()) {
6844    new_descriptors->SetNumberOfDescriptors(new_size);
6845    new_descriptors->Set(old_size, descriptor, witness);
6846    new_descriptors->Sort();
6847  } else {
6848    new_descriptors->Append(descriptor, witness);
6849  }
6850
6851  Name* key = descriptor->GetKey();
6852  return CopyReplaceDescriptors(new_descriptors, flag, key, SIMPLE_TRANSITION);
6853}
6854
6855
6856MaybeObject* Map::CopyInsertDescriptor(Descriptor* descriptor,
6857                                       TransitionFlag flag) {
6858  DescriptorArray* old_descriptors = instance_descriptors();
6859
6860  // Ensure the key is unique.
6861  MaybeObject* maybe_result = descriptor->KeyToUniqueName();
6862  if (maybe_result->IsFailure()) return maybe_result;
6863
6864  // We replace the key if it is already present.
6865  int index = old_descriptors->SearchWithCache(descriptor->GetKey(), this);
6866  if (index != DescriptorArray::kNotFound) {
6867    return CopyReplaceDescriptor(old_descriptors, descriptor, index, flag);
6868  }
6869  return CopyAddDescriptor(descriptor, flag);
6870}
6871
6872
6873MaybeObject* DescriptorArray::CopyUpToAddAttributes(
6874    int enumeration_index, PropertyAttributes attributes) {
6875  if (enumeration_index == 0) return GetHeap()->empty_descriptor_array();
6876
6877  int size = enumeration_index;
6878
6879  DescriptorArray* descriptors;
6880  MaybeObject* maybe_descriptors = Allocate(size);
6881  if (!maybe_descriptors->To(&descriptors)) return maybe_descriptors;
6882  DescriptorArray::WhitenessWitness witness(descriptors);
6883
6884  if (attributes != NONE) {
6885    for (int i = 0; i < size; ++i) {
6886      Object* value = GetValue(i);
6887      PropertyDetails details = GetDetails(i);
6888      int mask = DONT_DELETE | DONT_ENUM;
6889      // READ_ONLY is an invalid attribute for JS setters/getters.
6890      if (details.type() != CALLBACKS || !value->IsAccessorPair()) {
6891        mask |= READ_ONLY;
6892      }
6893      details = details.CopyAddAttributes(
6894          static_cast<PropertyAttributes>(attributes & mask));
6895      Descriptor desc(GetKey(i), value, details);
6896      descriptors->Set(i, &desc, witness);
6897    }
6898  } else {
6899    for (int i = 0; i < size; ++i) {
6900      descriptors->CopyFrom(i, this, i, witness);
6901    }
6902  }
6903
6904  if (number_of_descriptors() != enumeration_index) descriptors->Sort();
6905
6906  return descriptors;
6907}
6908
6909
6910MaybeObject* Map::CopyReplaceDescriptor(DescriptorArray* descriptors,
6911                                        Descriptor* descriptor,
6912                                        int insertion_index,
6913                                        TransitionFlag flag) {
6914  // Ensure the key is unique.
6915  MaybeObject* maybe_failure = descriptor->KeyToUniqueName();
6916  if (maybe_failure->IsFailure()) return maybe_failure;
6917
6918  Name* key = descriptor->GetKey();
6919  ASSERT(key == descriptors->GetKey(insertion_index));
6920
6921  int new_size = NumberOfOwnDescriptors();
6922  ASSERT(0 <= insertion_index && insertion_index < new_size);
6923
6924  ASSERT_LT(insertion_index, new_size);
6925
6926  DescriptorArray* new_descriptors;
6927  MaybeObject* maybe_descriptors = DescriptorArray::Allocate(new_size);
6928  if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
6929  DescriptorArray::WhitenessWitness witness(new_descriptors);
6930
6931  for (int i = 0; i < new_size; ++i) {
6932    if (i == insertion_index) {
6933      new_descriptors->Set(i, descriptor, witness);
6934    } else {
6935      new_descriptors->CopyFrom(i, descriptors, i, witness);
6936    }
6937  }
6938
6939  // Re-sort if descriptors were removed.
6940  if (new_size != descriptors->length()) new_descriptors->Sort();
6941
6942  SimpleTransitionFlag simple_flag =
6943      (insertion_index == descriptors->number_of_descriptors() - 1)
6944      ? SIMPLE_TRANSITION
6945      : FULL_TRANSITION;
6946  return CopyReplaceDescriptors(new_descriptors, flag, key, simple_flag);
6947}
6948
6949
6950void Map::UpdateCodeCache(Handle<Map> map,
6951                          Handle<Name> name,
6952                          Handle<Code> code) {
6953  Isolate* isolate = map->GetIsolate();
6954  CALL_HEAP_FUNCTION_VOID(isolate,
6955                          map->UpdateCodeCache(*name, *code));
6956}
6957
6958
6959MaybeObject* Map::UpdateCodeCache(Name* name, Code* code) {
6960  ASSERT(!is_shared() || code->allowed_in_shared_map_code_cache());
6961
6962  // Allocate the code cache if not present.
6963  if (code_cache()->IsFixedArray()) {
6964    Object* result;
6965    { MaybeObject* maybe_result = GetHeap()->AllocateCodeCache();
6966      if (!maybe_result->ToObject(&result)) return maybe_result;
6967    }
6968    set_code_cache(result);
6969  }
6970
6971  // Update the code cache.
6972  return CodeCache::cast(code_cache())->Update(name, code);
6973}
6974
6975
6976Object* Map::FindInCodeCache(Name* name, Code::Flags flags) {
6977  // Do a lookup if a code cache exists.
6978  if (!code_cache()->IsFixedArray()) {
6979    return CodeCache::cast(code_cache())->Lookup(name, flags);
6980  } else {
6981    return GetHeap()->undefined_value();
6982  }
6983}
6984
6985
6986int Map::IndexInCodeCache(Object* name, Code* code) {
6987  // Get the internal index if a code cache exists.
6988  if (!code_cache()->IsFixedArray()) {
6989    return CodeCache::cast(code_cache())->GetIndex(name, code);
6990  }
6991  return -1;
6992}
6993
6994
6995void Map::RemoveFromCodeCache(Name* name, Code* code, int index) {
6996  // No GC is supposed to happen between a call to IndexInCodeCache and
6997  // RemoveFromCodeCache so the code cache must be there.
6998  ASSERT(!code_cache()->IsFixedArray());
6999  CodeCache::cast(code_cache())->RemoveByIndex(name, code, index);
7000}
7001
7002
7003// An iterator over all map transitions in an descriptor array, reusing the map
7004// field of the contens array while it is running.
7005class IntrusiveMapTransitionIterator {
7006 public:
7007  explicit IntrusiveMapTransitionIterator(TransitionArray* transition_array)
7008      : transition_array_(transition_array) { }
7009
7010  void Start() {
7011    ASSERT(!IsIterating());
7012    *TransitionArrayHeader() = Smi::FromInt(0);
7013  }
7014
7015  bool IsIterating() {
7016    return (*TransitionArrayHeader())->IsSmi();
7017  }
7018
7019  Map* Next() {
7020    ASSERT(IsIterating());
7021    int index = Smi::cast(*TransitionArrayHeader())->value();
7022    int number_of_transitions = transition_array_->number_of_transitions();
7023    while (index < number_of_transitions) {
7024      *TransitionArrayHeader() = Smi::FromInt(index + 1);
7025      return transition_array_->GetTarget(index);
7026    }
7027
7028    *TransitionArrayHeader() = transition_array_->GetHeap()->fixed_array_map();
7029    return NULL;
7030  }
7031
7032 private:
7033  Object** TransitionArrayHeader() {
7034    return HeapObject::RawField(transition_array_, TransitionArray::kMapOffset);
7035  }
7036
7037  TransitionArray* transition_array_;
7038};
7039
7040
7041// An iterator over all prototype transitions, reusing the map field of the
7042// underlying array while it is running.
7043class IntrusivePrototypeTransitionIterator {
7044 public:
7045  explicit IntrusivePrototypeTransitionIterator(HeapObject* proto_trans)
7046      : proto_trans_(proto_trans) { }
7047
7048  void Start() {
7049    ASSERT(!IsIterating());
7050    *Header() = Smi::FromInt(0);
7051  }
7052
7053  bool IsIterating() {
7054    return (*Header())->IsSmi();
7055  }
7056
7057  Map* Next() {
7058    ASSERT(IsIterating());
7059    int transitionNumber = Smi::cast(*Header())->value();
7060    if (transitionNumber < NumberOfTransitions()) {
7061      *Header() = Smi::FromInt(transitionNumber + 1);
7062      return GetTransition(transitionNumber);
7063    }
7064    *Header() = proto_trans_->GetHeap()->fixed_array_map();
7065    return NULL;
7066  }
7067
7068 private:
7069  Object** Header() {
7070    return HeapObject::RawField(proto_trans_, FixedArray::kMapOffset);
7071  }
7072
7073  int NumberOfTransitions() {
7074    FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_);
7075    Object* num = proto_trans->get(Map::kProtoTransitionNumberOfEntriesOffset);
7076    return Smi::cast(num)->value();
7077  }
7078
7079  Map* GetTransition(int transitionNumber) {
7080    FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_);
7081    return Map::cast(proto_trans->get(IndexFor(transitionNumber)));
7082  }
7083
7084  int IndexFor(int transitionNumber) {
7085    return Map::kProtoTransitionHeaderSize +
7086        Map::kProtoTransitionMapOffset +
7087        transitionNumber * Map::kProtoTransitionElementsPerEntry;
7088  }
7089
7090  HeapObject* proto_trans_;
7091};
7092
7093
7094// To traverse the transition tree iteratively, we have to store two kinds of
7095// information in a map: The parent map in the traversal and which children of a
7096// node have already been visited. To do this without additional memory, we
7097// temporarily reuse two maps with known values:
7098//
7099//  (1) The map of the map temporarily holds the parent, and is restored to the
7100//      meta map afterwards.
7101//
7102//  (2) The info which children have already been visited depends on which part
7103//      of the map we currently iterate:
7104//
7105//    (a) If we currently follow normal map transitions, we temporarily store
7106//        the current index in the map of the FixedArray of the desciptor
7107//        array's contents, and restore it to the fixed array map afterwards.
7108//        Note that a single descriptor can have 0, 1, or 2 transitions.
7109//
7110//    (b) If we currently follow prototype transitions, we temporarily store
7111//        the current index in the map of the FixedArray holding the prototype
7112//        transitions, and restore it to the fixed array map afterwards.
7113//
7114// Note that the child iterator is just a concatenation of two iterators: One
7115// iterating over map transitions and one iterating over prototype transisitons.
7116class TraversableMap : public Map {
7117 public:
7118  // Record the parent in the traversal within this map. Note that this destroys
7119  // this map's map!
7120  void SetParent(TraversableMap* parent) { set_map_no_write_barrier(parent); }
7121
7122  // Reset the current map's map, returning the parent previously stored in it.
7123  TraversableMap* GetAndResetParent() {
7124    TraversableMap* old_parent = static_cast<TraversableMap*>(map());
7125    set_map_no_write_barrier(GetHeap()->meta_map());
7126    return old_parent;
7127  }
7128
7129  // Start iterating over this map's children, possibly destroying a FixedArray
7130  // map (see explanation above).
7131  void ChildIteratorStart() {
7132    if (HasTransitionArray()) {
7133      if (HasPrototypeTransitions()) {
7134        IntrusivePrototypeTransitionIterator(GetPrototypeTransitions()).Start();
7135      }
7136
7137      IntrusiveMapTransitionIterator(transitions()).Start();
7138    }
7139  }
7140
7141  // If we have an unvisited child map, return that one and advance. If we have
7142  // none, return NULL and reset any destroyed FixedArray maps.
7143  TraversableMap* ChildIteratorNext() {
7144    TransitionArray* transition_array = unchecked_transition_array();
7145    if (!transition_array->map()->IsSmi() &&
7146        !transition_array->IsTransitionArray()) {
7147      return NULL;
7148    }
7149
7150    if (transition_array->HasPrototypeTransitions()) {
7151      HeapObject* proto_transitions =
7152          transition_array->UncheckedPrototypeTransitions();
7153      IntrusivePrototypeTransitionIterator proto_iterator(proto_transitions);
7154      if (proto_iterator.IsIterating()) {
7155        Map* next = proto_iterator.Next();
7156        if (next != NULL) return static_cast<TraversableMap*>(next);
7157      }
7158    }
7159
7160    IntrusiveMapTransitionIterator transition_iterator(transition_array);
7161    if (transition_iterator.IsIterating()) {
7162      Map* next = transition_iterator.Next();
7163      if (next != NULL) return static_cast<TraversableMap*>(next);
7164    }
7165
7166    return NULL;
7167  }
7168};
7169
7170
7171// Traverse the transition tree in postorder without using the C++ stack by
7172// doing pointer reversal.
7173void Map::TraverseTransitionTree(TraverseCallback callback, void* data) {
7174  TraversableMap* current = static_cast<TraversableMap*>(this);
7175  current->ChildIteratorStart();
7176  while (true) {
7177    TraversableMap* child = current->ChildIteratorNext();
7178    if (child != NULL) {
7179      child->ChildIteratorStart();
7180      child->SetParent(current);
7181      current = child;
7182    } else {
7183      TraversableMap* parent = current->GetAndResetParent();
7184      callback(current, data);
7185      if (current == this) break;
7186      current = parent;
7187    }
7188  }
7189}
7190
7191
7192MaybeObject* CodeCache::Update(Name* name, Code* code) {
7193  // The number of monomorphic stubs for normal load/store/call IC's can grow to
7194  // a large number and therefore they need to go into a hash table. They are
7195  // used to load global properties from cells.
7196  if (code->type() == Code::NORMAL) {
7197    // Make sure that a hash table is allocated for the normal load code cache.
7198    if (normal_type_cache()->IsUndefined()) {
7199      Object* result;
7200      { MaybeObject* maybe_result =
7201            CodeCacheHashTable::Allocate(GetHeap(),
7202                                         CodeCacheHashTable::kInitialSize);
7203        if (!maybe_result->ToObject(&result)) return maybe_result;
7204      }
7205      set_normal_type_cache(result);
7206    }
7207    return UpdateNormalTypeCache(name, code);
7208  } else {
7209    ASSERT(default_cache()->IsFixedArray());
7210    return UpdateDefaultCache(name, code);
7211  }
7212}
7213
7214
7215MaybeObject* CodeCache::UpdateDefaultCache(Name* name, Code* code) {
7216  // When updating the default code cache we disregard the type encoded in the
7217  // flags. This allows call constant stubs to overwrite call field
7218  // stubs, etc.
7219  Code::Flags flags = Code::RemoveTypeFromFlags(code->flags());
7220
7221  // First check whether we can update existing code cache without
7222  // extending it.
7223  FixedArray* cache = default_cache();
7224  int length = cache->length();
7225  int deleted_index = -1;
7226  for (int i = 0; i < length; i += kCodeCacheEntrySize) {
7227    Object* key = cache->get(i);
7228    if (key->IsNull()) {
7229      if (deleted_index < 0) deleted_index = i;
7230      continue;
7231    }
7232    if (key->IsUndefined()) {
7233      if (deleted_index >= 0) i = deleted_index;
7234      cache->set(i + kCodeCacheEntryNameOffset, name);
7235      cache->set(i + kCodeCacheEntryCodeOffset, code);
7236      return this;
7237    }
7238    if (name->Equals(Name::cast(key))) {
7239      Code::Flags found =
7240          Code::cast(cache->get(i + kCodeCacheEntryCodeOffset))->flags();
7241      if (Code::RemoveTypeFromFlags(found) == flags) {
7242        cache->set(i + kCodeCacheEntryCodeOffset, code);
7243        return this;
7244      }
7245    }
7246  }
7247
7248  // Reached the end of the code cache.  If there were deleted
7249  // elements, reuse the space for the first of them.
7250  if (deleted_index >= 0) {
7251    cache->set(deleted_index + kCodeCacheEntryNameOffset, name);
7252    cache->set(deleted_index + kCodeCacheEntryCodeOffset, code);
7253    return this;
7254  }
7255
7256  // Extend the code cache with some new entries (at least one). Must be a
7257  // multiple of the entry size.
7258  int new_length = length + ((length >> 1)) + kCodeCacheEntrySize;
7259  new_length = new_length - new_length % kCodeCacheEntrySize;
7260  ASSERT((new_length % kCodeCacheEntrySize) == 0);
7261  Object* result;
7262  { MaybeObject* maybe_result = cache->CopySize(new_length);
7263    if (!maybe_result->ToObject(&result)) return maybe_result;
7264  }
7265
7266  // Add the (name, code) pair to the new cache.
7267  cache = FixedArray::cast(result);
7268  cache->set(length + kCodeCacheEntryNameOffset, name);
7269  cache->set(length + kCodeCacheEntryCodeOffset, code);
7270  set_default_cache(cache);
7271  return this;
7272}
7273
7274
7275MaybeObject* CodeCache::UpdateNormalTypeCache(Name* name, Code* code) {
7276  // Adding a new entry can cause a new cache to be allocated.
7277  CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
7278  Object* new_cache;
7279  { MaybeObject* maybe_new_cache = cache->Put(name, code);
7280    if (!maybe_new_cache->ToObject(&new_cache)) return maybe_new_cache;
7281  }
7282  set_normal_type_cache(new_cache);
7283  return this;
7284}
7285
7286
7287Object* CodeCache::Lookup(Name* name, Code::Flags flags) {
7288  if (Code::ExtractTypeFromFlags(flags) == Code::NORMAL) {
7289    return LookupNormalTypeCache(name, flags);
7290  } else {
7291    return LookupDefaultCache(name, flags);
7292  }
7293}
7294
7295
7296Object* CodeCache::LookupDefaultCache(Name* name, Code::Flags flags) {
7297  FixedArray* cache = default_cache();
7298  int length = cache->length();
7299  for (int i = 0; i < length; i += kCodeCacheEntrySize) {
7300    Object* key = cache->get(i + kCodeCacheEntryNameOffset);
7301    // Skip deleted elements.
7302    if (key->IsNull()) continue;
7303    if (key->IsUndefined()) return key;
7304    if (name->Equals(Name::cast(key))) {
7305      Code* code = Code::cast(cache->get(i + kCodeCacheEntryCodeOffset));
7306      if (code->flags() == flags) {
7307        return code;
7308      }
7309    }
7310  }
7311  return GetHeap()->undefined_value();
7312}
7313
7314
7315Object* CodeCache::LookupNormalTypeCache(Name* name, Code::Flags flags) {
7316  if (!normal_type_cache()->IsUndefined()) {
7317    CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
7318    return cache->Lookup(name, flags);
7319  } else {
7320    return GetHeap()->undefined_value();
7321  }
7322}
7323
7324
7325int CodeCache::GetIndex(Object* name, Code* code) {
7326  if (code->type() == Code::NORMAL) {
7327    if (normal_type_cache()->IsUndefined()) return -1;
7328    CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
7329    return cache->GetIndex(Name::cast(name), code->flags());
7330  }
7331
7332  FixedArray* array = default_cache();
7333  int len = array->length();
7334  for (int i = 0; i < len; i += kCodeCacheEntrySize) {
7335    if (array->get(i + kCodeCacheEntryCodeOffset) == code) return i + 1;
7336  }
7337  return -1;
7338}
7339
7340
7341void CodeCache::RemoveByIndex(Object* name, Code* code, int index) {
7342  if (code->type() == Code::NORMAL) {
7343    ASSERT(!normal_type_cache()->IsUndefined());
7344    CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
7345    ASSERT(cache->GetIndex(Name::cast(name), code->flags()) == index);
7346    cache->RemoveByIndex(index);
7347  } else {
7348    FixedArray* array = default_cache();
7349    ASSERT(array->length() >= index && array->get(index)->IsCode());
7350    // Use null instead of undefined for deleted elements to distinguish
7351    // deleted elements from unused elements.  This distinction is used
7352    // when looking up in the cache and when updating the cache.
7353    ASSERT_EQ(1, kCodeCacheEntryCodeOffset - kCodeCacheEntryNameOffset);
7354    array->set_null(index - 1);  // Name.
7355    array->set_null(index);  // Code.
7356  }
7357}
7358
7359
7360// The key in the code cache hash table consists of the property name and the
7361// code object. The actual match is on the name and the code flags. If a key
7362// is created using the flags and not a code object it can only be used for
7363// lookup not to create a new entry.
7364class CodeCacheHashTableKey : public HashTableKey {
7365 public:
7366  CodeCacheHashTableKey(Name* name, Code::Flags flags)
7367      : name_(name), flags_(flags), code_(NULL) { }
7368
7369  CodeCacheHashTableKey(Name* name, Code* code)
7370      : name_(name),
7371        flags_(code->flags()),
7372        code_(code) { }
7373
7374
7375  bool IsMatch(Object* other) {
7376    if (!other->IsFixedArray()) return false;
7377    FixedArray* pair = FixedArray::cast(other);
7378    Name* name = Name::cast(pair->get(0));
7379    Code::Flags flags = Code::cast(pair->get(1))->flags();
7380    if (flags != flags_) {
7381      return false;
7382    }
7383    return name_->Equals(name);
7384  }
7385
7386  static uint32_t NameFlagsHashHelper(Name* name, Code::Flags flags) {
7387    return name->Hash() ^ flags;
7388  }
7389
7390  uint32_t Hash() { return NameFlagsHashHelper(name_, flags_); }
7391
7392  uint32_t HashForObject(Object* obj) {
7393    FixedArray* pair = FixedArray::cast(obj);
7394    Name* name = Name::cast(pair->get(0));
7395    Code* code = Code::cast(pair->get(1));
7396    return NameFlagsHashHelper(name, code->flags());
7397  }
7398
7399  MUST_USE_RESULT MaybeObject* AsObject(Heap* heap) {
7400    ASSERT(code_ != NULL);
7401    Object* obj;
7402    { MaybeObject* maybe_obj = heap->AllocateFixedArray(2);
7403      if (!maybe_obj->ToObject(&obj)) return maybe_obj;
7404    }
7405    FixedArray* pair = FixedArray::cast(obj);
7406    pair->set(0, name_);
7407    pair->set(1, code_);
7408    return pair;
7409  }
7410
7411 private:
7412  Name* name_;
7413  Code::Flags flags_;
7414  // TODO(jkummerow): We should be able to get by without this.
7415  Code* code_;
7416};
7417
7418
7419Object* CodeCacheHashTable::Lookup(Name* name, Code::Flags flags) {
7420  CodeCacheHashTableKey key(name, flags);
7421  int entry = FindEntry(&key);
7422  if (entry == kNotFound) return GetHeap()->undefined_value();
7423  return get(EntryToIndex(entry) + 1);
7424}
7425
7426
7427MaybeObject* CodeCacheHashTable::Put(Name* name, Code* code) {
7428  CodeCacheHashTableKey key(name, code);
7429  Object* obj;
7430  { MaybeObject* maybe_obj = EnsureCapacity(1, &key);
7431    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
7432  }
7433
7434  // Don't use |this|, as the table might have grown.
7435  CodeCacheHashTable* cache = reinterpret_cast<CodeCacheHashTable*>(obj);
7436
7437  int entry = cache->FindInsertionEntry(key.Hash());
7438  Object* k;
7439  { MaybeObject* maybe_k = key.AsObject(GetHeap());
7440    if (!maybe_k->ToObject(&k)) return maybe_k;
7441  }
7442
7443  cache->set(EntryToIndex(entry), k);
7444  cache->set(EntryToIndex(entry) + 1, code);
7445  cache->ElementAdded();
7446  return cache;
7447}
7448
7449
7450int CodeCacheHashTable::GetIndex(Name* name, Code::Flags flags) {
7451  CodeCacheHashTableKey key(name, flags);
7452  int entry = FindEntry(&key);
7453  return (entry == kNotFound) ? -1 : entry;
7454}
7455
7456
7457void CodeCacheHashTable::RemoveByIndex(int index) {
7458  ASSERT(index >= 0);
7459  Heap* heap = GetHeap();
7460  set(EntryToIndex(index), heap->the_hole_value());
7461  set(EntryToIndex(index) + 1, heap->the_hole_value());
7462  ElementRemoved();
7463}
7464
7465
7466void PolymorphicCodeCache::Update(Handle<PolymorphicCodeCache> cache,
7467                                  MapHandleList* maps,
7468                                  Code::Flags flags,
7469                                  Handle<Code> code) {
7470  Isolate* isolate = cache->GetIsolate();
7471  CALL_HEAP_FUNCTION_VOID(isolate, cache->Update(maps, flags, *code));
7472}
7473
7474
7475MaybeObject* PolymorphicCodeCache::Update(MapHandleList* maps,
7476                                          Code::Flags flags,
7477                                          Code* code) {
7478  // Initialize cache if necessary.
7479  if (cache()->IsUndefined()) {
7480    Object* result;
7481    { MaybeObject* maybe_result =
7482          PolymorphicCodeCacheHashTable::Allocate(
7483              GetHeap(),
7484              PolymorphicCodeCacheHashTable::kInitialSize);
7485      if (!maybe_result->ToObject(&result)) return maybe_result;
7486    }
7487    set_cache(result);
7488  } else {
7489    // This entry shouldn't be contained in the cache yet.
7490    ASSERT(PolymorphicCodeCacheHashTable::cast(cache())
7491               ->Lookup(maps, flags)->IsUndefined());
7492  }
7493  PolymorphicCodeCacheHashTable* hash_table =
7494      PolymorphicCodeCacheHashTable::cast(cache());
7495  Object* new_cache;
7496  { MaybeObject* maybe_new_cache = hash_table->Put(maps, flags, code);
7497    if (!maybe_new_cache->ToObject(&new_cache)) return maybe_new_cache;
7498  }
7499  set_cache(new_cache);
7500  return this;
7501}
7502
7503
7504Handle<Object> PolymorphicCodeCache::Lookup(MapHandleList* maps,
7505                                            Code::Flags flags) {
7506  if (!cache()->IsUndefined()) {
7507    PolymorphicCodeCacheHashTable* hash_table =
7508        PolymorphicCodeCacheHashTable::cast(cache());
7509    return Handle<Object>(hash_table->Lookup(maps, flags), GetIsolate());
7510  } else {
7511    return GetIsolate()->factory()->undefined_value();
7512  }
7513}
7514
7515
7516// Despite their name, object of this class are not stored in the actual
7517// hash table; instead they're temporarily used for lookups. It is therefore
7518// safe to have a weak (non-owning) pointer to a MapList as a member field.
7519class PolymorphicCodeCacheHashTableKey : public HashTableKey {
7520 public:
7521  // Callers must ensure that |maps| outlives the newly constructed object.
7522  PolymorphicCodeCacheHashTableKey(MapHandleList* maps, int code_flags)
7523      : maps_(maps),
7524        code_flags_(code_flags) {}
7525
7526  bool IsMatch(Object* other) {
7527    MapHandleList other_maps(kDefaultListAllocationSize);
7528    int other_flags;
7529    FromObject(other, &other_flags, &other_maps);
7530    if (code_flags_ != other_flags) return false;
7531    if (maps_->length() != other_maps.length()) return false;
7532    // Compare just the hashes first because it's faster.
7533    int this_hash = MapsHashHelper(maps_, code_flags_);
7534    int other_hash = MapsHashHelper(&other_maps, other_flags);
7535    if (this_hash != other_hash) return false;
7536
7537    // Full comparison: for each map in maps_, look for an equivalent map in
7538    // other_maps. This implementation is slow, but probably good enough for
7539    // now because the lists are short (<= 4 elements currently).
7540    for (int i = 0; i < maps_->length(); ++i) {
7541      bool match_found = false;
7542      for (int j = 0; j < other_maps.length(); ++j) {
7543        if (*(maps_->at(i)) == *(other_maps.at(j))) {
7544          match_found = true;
7545          break;
7546        }
7547      }
7548      if (!match_found) return false;
7549    }
7550    return true;
7551  }
7552
7553  static uint32_t MapsHashHelper(MapHandleList* maps, int code_flags) {
7554    uint32_t hash = code_flags;
7555    for (int i = 0; i < maps->length(); ++i) {
7556      hash ^= maps->at(i)->Hash();
7557    }
7558    return hash;
7559  }
7560
7561  uint32_t Hash() {
7562    return MapsHashHelper(maps_, code_flags_);
7563  }
7564
7565  uint32_t HashForObject(Object* obj) {
7566    MapHandleList other_maps(kDefaultListAllocationSize);
7567    int other_flags;
7568    FromObject(obj, &other_flags, &other_maps);
7569    return MapsHashHelper(&other_maps, other_flags);
7570  }
7571
7572  MUST_USE_RESULT MaybeObject* AsObject(Heap* heap) {
7573    Object* obj;
7574    // The maps in |maps_| must be copied to a newly allocated FixedArray,
7575    // both because the referenced MapList is short-lived, and because C++
7576    // objects can't be stored in the heap anyway.
7577    { MaybeObject* maybe_obj =
7578          heap->AllocateUninitializedFixedArray(maps_->length() + 1);
7579      if (!maybe_obj->ToObject(&obj)) return maybe_obj;
7580    }
7581    FixedArray* list = FixedArray::cast(obj);
7582    list->set(0, Smi::FromInt(code_flags_));
7583    for (int i = 0; i < maps_->length(); ++i) {
7584      list->set(i + 1, *maps_->at(i));
7585    }
7586    return list;
7587  }
7588
7589 private:
7590  static MapHandleList* FromObject(Object* obj,
7591                                   int* code_flags,
7592                                   MapHandleList* maps) {
7593    FixedArray* list = FixedArray::cast(obj);
7594    maps->Rewind(0);
7595    *code_flags = Smi::cast(list->get(0))->value();
7596    for (int i = 1; i < list->length(); ++i) {
7597      maps->Add(Handle<Map>(Map::cast(list->get(i))));
7598    }
7599    return maps;
7600  }
7601
7602  MapHandleList* maps_;  // weak.
7603  int code_flags_;
7604  static const int kDefaultListAllocationSize = kMaxKeyedPolymorphism + 1;
7605};
7606
7607
7608Object* PolymorphicCodeCacheHashTable::Lookup(MapHandleList* maps,
7609                                              int code_flags) {
7610  PolymorphicCodeCacheHashTableKey key(maps, code_flags);
7611  int entry = FindEntry(&key);
7612  if (entry == kNotFound) return GetHeap()->undefined_value();
7613  return get(EntryToIndex(entry) + 1);
7614}
7615
7616
7617MaybeObject* PolymorphicCodeCacheHashTable::Put(MapHandleList* maps,
7618                                                int code_flags,
7619                                                Code* code) {
7620  PolymorphicCodeCacheHashTableKey key(maps, code_flags);
7621  Object* obj;
7622  { MaybeObject* maybe_obj = EnsureCapacity(1, &key);
7623    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
7624  }
7625  PolymorphicCodeCacheHashTable* cache =
7626      reinterpret_cast<PolymorphicCodeCacheHashTable*>(obj);
7627  int entry = cache->FindInsertionEntry(key.Hash());
7628  { MaybeObject* maybe_obj = key.AsObject(GetHeap());
7629    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
7630  }
7631  cache->set(EntryToIndex(entry), obj);
7632  cache->set(EntryToIndex(entry) + 1, code);
7633  cache->ElementAdded();
7634  return cache;
7635}
7636
7637
7638MaybeObject* FixedArray::AddKeysFromJSArray(JSArray* array) {
7639  ElementsAccessor* accessor = array->GetElementsAccessor();
7640  MaybeObject* maybe_result =
7641      accessor->AddElementsToFixedArray(array, array, this);
7642  FixedArray* result;
7643  if (!maybe_result->To<FixedArray>(&result)) return maybe_result;
7644#ifdef DEBUG
7645  if (FLAG_enable_slow_asserts) {
7646    for (int i = 0; i < result->length(); i++) {
7647      Object* current = result->get(i);
7648      ASSERT(current->IsNumber() || current->IsName());
7649    }
7650  }
7651#endif
7652  return result;
7653}
7654
7655
7656MaybeObject* FixedArray::UnionOfKeys(FixedArray* other) {
7657  ElementsAccessor* accessor = ElementsAccessor::ForArray(other);
7658  MaybeObject* maybe_result =
7659      accessor->AddElementsToFixedArray(NULL, NULL, this, other);
7660  FixedArray* result;
7661  if (!maybe_result->To(&result)) return maybe_result;
7662#ifdef DEBUG
7663  if (FLAG_enable_slow_asserts) {
7664    for (int i = 0; i < result->length(); i++) {
7665      Object* current = result->get(i);
7666      ASSERT(current->IsNumber() || current->IsName());
7667    }
7668  }
7669#endif
7670  return result;
7671}
7672
7673
7674MaybeObject* FixedArray::CopySize(int new_length) {
7675  Heap* heap = GetHeap();
7676  if (new_length == 0) return heap->empty_fixed_array();
7677  Object* obj;
7678  { MaybeObject* maybe_obj = heap->AllocateFixedArray(new_length);
7679    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
7680  }
7681  FixedArray* result = FixedArray::cast(obj);
7682  // Copy the content
7683  DisallowHeapAllocation no_gc;
7684  int len = length();
7685  if (new_length < len) len = new_length;
7686  // We are taking the map from the old fixed array so the map is sure to
7687  // be an immortal immutable object.
7688  result->set_map_no_write_barrier(map());
7689  WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
7690  for (int i = 0; i < len; i++) {
7691    result->set(i, get(i), mode);
7692  }
7693  return result;
7694}
7695
7696
7697void FixedArray::CopyTo(int pos, FixedArray* dest, int dest_pos, int len) {
7698  DisallowHeapAllocation no_gc;
7699  WriteBarrierMode mode = dest->GetWriteBarrierMode(no_gc);
7700  for (int index = 0; index < len; index++) {
7701    dest->set(dest_pos+index, get(pos+index), mode);
7702  }
7703}
7704
7705
7706#ifdef DEBUG
7707bool FixedArray::IsEqualTo(FixedArray* other) {
7708  if (length() != other->length()) return false;
7709  for (int i = 0 ; i < length(); ++i) {
7710    if (get(i) != other->get(i)) return false;
7711  }
7712  return true;
7713}
7714#endif
7715
7716
7717MaybeObject* DescriptorArray::Allocate(int number_of_descriptors, int slack) {
7718  Heap* heap = Isolate::Current()->heap();
7719  // Do not use DescriptorArray::cast on incomplete object.
7720  int size = number_of_descriptors + slack;
7721  if (size == 0) return heap->empty_descriptor_array();
7722  FixedArray* result;
7723  // Allocate the array of keys.
7724  MaybeObject* maybe_array = heap->AllocateFixedArray(LengthFor(size));
7725  if (!maybe_array->To(&result)) return maybe_array;
7726
7727  result->set(kDescriptorLengthIndex, Smi::FromInt(number_of_descriptors));
7728  result->set(kEnumCacheIndex, Smi::FromInt(0));
7729  return result;
7730}
7731
7732
7733void DescriptorArray::ClearEnumCache() {
7734  set(kEnumCacheIndex, Smi::FromInt(0));
7735}
7736
7737
7738void DescriptorArray::SetEnumCache(FixedArray* bridge_storage,
7739                                   FixedArray* new_cache,
7740                                   Object* new_index_cache) {
7741  ASSERT(bridge_storage->length() >= kEnumCacheBridgeLength);
7742  ASSERT(new_index_cache->IsSmi() || new_index_cache->IsFixedArray());
7743  ASSERT(!IsEmpty());
7744  ASSERT(!HasEnumCache() || new_cache->length() > GetEnumCache()->length());
7745  FixedArray::cast(bridge_storage)->
7746    set(kEnumCacheBridgeCacheIndex, new_cache);
7747  FixedArray::cast(bridge_storage)->
7748    set(kEnumCacheBridgeIndicesCacheIndex, new_index_cache);
7749  set(kEnumCacheIndex, bridge_storage);
7750}
7751
7752
7753void DescriptorArray::CopyFrom(int dst_index,
7754                               DescriptorArray* src,
7755                               int src_index,
7756                               const WhitenessWitness& witness) {
7757  Object* value = src->GetValue(src_index);
7758  PropertyDetails details = src->GetDetails(src_index);
7759  Descriptor desc(src->GetKey(src_index), value, details);
7760  Set(dst_index, &desc, witness);
7761}
7762
7763
7764// Generalize the |other| descriptor array by merging it into the (at least
7765// partly) updated |this| descriptor array.
7766// The method merges two descriptor array in three parts. Both descriptor arrays
7767// are identical up to |verbatim|. They also overlap in keys up to |valid|.
7768// Between |verbatim| and |valid|, the resulting descriptor type as well as the
7769// representation are generalized from both |this| and |other|. Beyond |valid|,
7770// the descriptors are copied verbatim from |other| up to |new_size|.
7771// In case of incompatible types, the type and representation of |other| is
7772// used.
7773MaybeObject* DescriptorArray::Merge(int verbatim,
7774                                    int valid,
7775                                    int new_size,
7776                                    DescriptorArray* other) {
7777  ASSERT(verbatim <= valid);
7778  ASSERT(valid <= new_size);
7779
7780  DescriptorArray* result;
7781  // Allocate a new descriptor array large enough to hold the required
7782  // descriptors, with minimally the exact same size as this descriptor array.
7783  MaybeObject* maybe_descriptors = DescriptorArray::Allocate(
7784      new_size, Max(new_size, other->number_of_descriptors()) - new_size);
7785  if (!maybe_descriptors->To(&result)) return maybe_descriptors;
7786  ASSERT(result->length() > length() ||
7787         result->NumberOfSlackDescriptors() > 0 ||
7788         result->number_of_descriptors() == other->number_of_descriptors());
7789  ASSERT(result->number_of_descriptors() == new_size);
7790
7791  DescriptorArray::WhitenessWitness witness(result);
7792
7793  int descriptor;
7794
7795  // 0 -> |verbatim|
7796  int current_offset = 0;
7797  for (descriptor = 0; descriptor < verbatim; descriptor++) {
7798    if (GetDetails(descriptor).type() == FIELD) current_offset++;
7799    result->CopyFrom(descriptor, this, descriptor, witness);
7800  }
7801
7802  // |verbatim| -> |valid|
7803  for (; descriptor < valid; descriptor++) {
7804    Name* key = GetKey(descriptor);
7805    PropertyDetails details = GetDetails(descriptor);
7806    PropertyDetails other_details = other->GetDetails(descriptor);
7807
7808    if (details.type() == FIELD || other_details.type() == FIELD ||
7809        (details.type() == CONSTANT &&
7810         other_details.type() == CONSTANT &&
7811         GetValue(descriptor) != other->GetValue(descriptor))) {
7812      Representation representation =
7813          details.representation().generalize(other_details.representation());
7814      FieldDescriptor d(key,
7815                        current_offset++,
7816                        other_details.attributes(),
7817                        representation);
7818      result->Set(descriptor, &d, witness);
7819    } else {
7820      result->CopyFrom(descriptor, other, descriptor, witness);
7821    }
7822  }
7823
7824  // |valid| -> |new_size|
7825  for (; descriptor < new_size; descriptor++) {
7826    PropertyDetails details = other->GetDetails(descriptor);
7827    if (details.type() == FIELD) {
7828      Name* key = other->GetKey(descriptor);
7829      FieldDescriptor d(key,
7830                        current_offset++,
7831                        details.attributes(),
7832                        details.representation());
7833      result->Set(descriptor, &d, witness);
7834    } else {
7835      result->CopyFrom(descriptor, other, descriptor, witness);
7836    }
7837  }
7838
7839  result->Sort();
7840  return result;
7841}
7842
7843
7844// Checks whether a merge of |other| into |this| would return a copy of |this|.
7845bool DescriptorArray::IsMoreGeneralThan(int verbatim,
7846                                        int valid,
7847                                        int new_size,
7848                                        DescriptorArray* other) {
7849  ASSERT(verbatim <= valid);
7850  ASSERT(valid <= new_size);
7851  if (valid != new_size) return false;
7852
7853  for (int descriptor = verbatim; descriptor < valid; descriptor++) {
7854    PropertyDetails details = GetDetails(descriptor);
7855    PropertyDetails other_details = other->GetDetails(descriptor);
7856    if (!other_details.representation().fits_into(details.representation())) {
7857      return false;
7858    }
7859    if (details.type() == CONSTANT) {
7860      if (other_details.type() != CONSTANT) return false;
7861      if (GetValue(descriptor) != other->GetValue(descriptor)) return false;
7862    }
7863  }
7864
7865  return true;
7866}
7867
7868
7869// We need the whiteness witness since sort will reshuffle the entries in the
7870// descriptor array. If the descriptor array were to be black, the shuffling
7871// would move a slot that was already recorded as pointing into an evacuation
7872// candidate. This would result in missing updates upon evacuation.
7873void DescriptorArray::Sort() {
7874  // In-place heap sort.
7875  int len = number_of_descriptors();
7876  // Reset sorting since the descriptor array might contain invalid pointers.
7877  for (int i = 0; i < len; ++i) SetSortedKey(i, i);
7878  // Bottom-up max-heap construction.
7879  // Index of the last node with children
7880  const int max_parent_index = (len / 2) - 1;
7881  for (int i = max_parent_index; i >= 0; --i) {
7882    int parent_index = i;
7883    const uint32_t parent_hash = GetSortedKey(i)->Hash();
7884    while (parent_index <= max_parent_index) {
7885      int child_index = 2 * parent_index + 1;
7886      uint32_t child_hash = GetSortedKey(child_index)->Hash();
7887      if (child_index + 1 < len) {
7888        uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash();
7889        if (right_child_hash > child_hash) {
7890          child_index++;
7891          child_hash = right_child_hash;
7892        }
7893      }
7894      if (child_hash <= parent_hash) break;
7895      SwapSortedKeys(parent_index, child_index);
7896      // Now element at child_index could be < its children.
7897      parent_index = child_index;  // parent_hash remains correct.
7898    }
7899  }
7900
7901  // Extract elements and create sorted array.
7902  for (int i = len - 1; i > 0; --i) {
7903    // Put max element at the back of the array.
7904    SwapSortedKeys(0, i);
7905    // Shift down the new top element.
7906    int parent_index = 0;
7907    const uint32_t parent_hash = GetSortedKey(parent_index)->Hash();
7908    const int max_parent_index = (i / 2) - 1;
7909    while (parent_index <= max_parent_index) {
7910      int child_index = parent_index * 2 + 1;
7911      uint32_t child_hash = GetSortedKey(child_index)->Hash();
7912      if (child_index + 1 < i) {
7913        uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash();
7914        if (right_child_hash > child_hash) {
7915          child_index++;
7916          child_hash = right_child_hash;
7917        }
7918      }
7919      if (child_hash <= parent_hash) break;
7920      SwapSortedKeys(parent_index, child_index);
7921      parent_index = child_index;
7922    }
7923  }
7924  ASSERT(IsSortedNoDuplicates());
7925}
7926
7927
7928Handle<AccessorPair> AccessorPair::Copy(Handle<AccessorPair> pair) {
7929  Handle<AccessorPair> copy = pair->GetIsolate()->factory()->NewAccessorPair();
7930  copy->set_getter(pair->getter());
7931  copy->set_setter(pair->setter());
7932  return copy;
7933}
7934
7935
7936Object* AccessorPair::GetComponent(AccessorComponent component) {
7937  Object* accessor = get(component);
7938  return accessor->IsTheHole() ? GetHeap()->undefined_value() : accessor;
7939}
7940
7941
7942MaybeObject* DeoptimizationInputData::Allocate(int deopt_entry_count,
7943                                               PretenureFlag pretenure) {
7944  ASSERT(deopt_entry_count > 0);
7945  return HEAP->AllocateFixedArray(LengthFor(deopt_entry_count),
7946                                  pretenure);
7947}
7948
7949
7950MaybeObject* DeoptimizationOutputData::Allocate(int number_of_deopt_points,
7951                                                PretenureFlag pretenure) {
7952  if (number_of_deopt_points == 0) return HEAP->empty_fixed_array();
7953  return HEAP->AllocateFixedArray(LengthOfFixedArray(number_of_deopt_points),
7954                                  pretenure);
7955}
7956
7957
7958#ifdef DEBUG
7959bool DescriptorArray::IsEqualTo(DescriptorArray* other) {
7960  if (IsEmpty()) return other->IsEmpty();
7961  if (other->IsEmpty()) return false;
7962  if (length() != other->length()) return false;
7963  for (int i = 0; i < length(); ++i) {
7964    if (get(i) != other->get(i)) return false;
7965  }
7966  return true;
7967}
7968#endif
7969
7970
7971bool String::LooksValid() {
7972  if (!Isolate::Current()->heap()->Contains(this)) return false;
7973  return true;
7974}
7975
7976
7977String::FlatContent String::GetFlatContent() {
7978  ASSERT(!AllowHeapAllocation::IsAllowed());
7979  int length = this->length();
7980  StringShape shape(this);
7981  String* string = this;
7982  int offset = 0;
7983  if (shape.representation_tag() == kConsStringTag) {
7984    ConsString* cons = ConsString::cast(string);
7985    if (cons->second()->length() != 0) {
7986      return FlatContent();
7987    }
7988    string = cons->first();
7989    shape = StringShape(string);
7990  }
7991  if (shape.representation_tag() == kSlicedStringTag) {
7992    SlicedString* slice = SlicedString::cast(string);
7993    offset = slice->offset();
7994    string = slice->parent();
7995    shape = StringShape(string);
7996    ASSERT(shape.representation_tag() != kConsStringTag &&
7997           shape.representation_tag() != kSlicedStringTag);
7998  }
7999  if (shape.encoding_tag() == kOneByteStringTag) {
8000    const uint8_t* start;
8001    if (shape.representation_tag() == kSeqStringTag) {
8002      start = SeqOneByteString::cast(string)->GetChars();
8003    } else {
8004      start = ExternalAsciiString::cast(string)->GetChars();
8005    }
8006    return FlatContent(Vector<const uint8_t>(start + offset, length));
8007  } else {
8008    ASSERT(shape.encoding_tag() == kTwoByteStringTag);
8009    const uc16* start;
8010    if (shape.representation_tag() == kSeqStringTag) {
8011      start = SeqTwoByteString::cast(string)->GetChars();
8012    } else {
8013      start = ExternalTwoByteString::cast(string)->GetChars();
8014    }
8015    return FlatContent(Vector<const uc16>(start + offset, length));
8016  }
8017}
8018
8019
8020SmartArrayPointer<char> String::ToCString(AllowNullsFlag allow_nulls,
8021                                          RobustnessFlag robust_flag,
8022                                          int offset,
8023                                          int length,
8024                                          int* length_return) {
8025  if (robust_flag == ROBUST_STRING_TRAVERSAL && !LooksValid()) {
8026    return SmartArrayPointer<char>(NULL);
8027  }
8028  Heap* heap = GetHeap();
8029
8030  // Negative length means the to the end of the string.
8031  if (length < 0) length = kMaxInt - offset;
8032
8033  // Compute the size of the UTF-8 string. Start at the specified offset.
8034  Access<ConsStringIteratorOp> op(
8035      heap->isolate()->objects_string_iterator());
8036  StringCharacterStream stream(this, op.value(), offset);
8037  int character_position = offset;
8038  int utf8_bytes = 0;
8039  int last = unibrow::Utf16::kNoPreviousCharacter;
8040  while (stream.HasMore() && character_position++ < offset + length) {
8041    uint16_t character = stream.GetNext();
8042    utf8_bytes += unibrow::Utf8::Length(character, last);
8043    last = character;
8044  }
8045
8046  if (length_return) {
8047    *length_return = utf8_bytes;
8048  }
8049
8050  char* result = NewArray<char>(utf8_bytes + 1);
8051
8052  // Convert the UTF-16 string to a UTF-8 buffer. Start at the specified offset.
8053  stream.Reset(this, offset);
8054  character_position = offset;
8055  int utf8_byte_position = 0;
8056  last = unibrow::Utf16::kNoPreviousCharacter;
8057  while (stream.HasMore() && character_position++ < offset + length) {
8058    uint16_t character = stream.GetNext();
8059    if (allow_nulls == DISALLOW_NULLS && character == 0) {
8060      character = ' ';
8061    }
8062    utf8_byte_position +=
8063        unibrow::Utf8::Encode(result + utf8_byte_position, character, last);
8064    last = character;
8065  }
8066  result[utf8_byte_position] = 0;
8067  return SmartArrayPointer<char>(result);
8068}
8069
8070
8071SmartArrayPointer<char> String::ToCString(AllowNullsFlag allow_nulls,
8072                                          RobustnessFlag robust_flag,
8073                                          int* length_return) {
8074  return ToCString(allow_nulls, robust_flag, 0, -1, length_return);
8075}
8076
8077
8078const uc16* String::GetTwoByteData() {
8079  return GetTwoByteData(0);
8080}
8081
8082
8083const uc16* String::GetTwoByteData(unsigned start) {
8084  ASSERT(!IsOneByteRepresentationUnderneath());
8085  switch (StringShape(this).representation_tag()) {
8086    case kSeqStringTag:
8087      return SeqTwoByteString::cast(this)->SeqTwoByteStringGetData(start);
8088    case kExternalStringTag:
8089      return ExternalTwoByteString::cast(this)->
8090        ExternalTwoByteStringGetData(start);
8091    case kSlicedStringTag: {
8092      SlicedString* slice = SlicedString::cast(this);
8093      return slice->parent()->GetTwoByteData(start + slice->offset());
8094    }
8095    case kConsStringTag:
8096      UNREACHABLE();
8097      return NULL;
8098  }
8099  UNREACHABLE();
8100  return NULL;
8101}
8102
8103
8104SmartArrayPointer<uc16> String::ToWideCString(RobustnessFlag robust_flag) {
8105  if (robust_flag == ROBUST_STRING_TRAVERSAL && !LooksValid()) {
8106    return SmartArrayPointer<uc16>();
8107  }
8108  Heap* heap = GetHeap();
8109
8110  Access<ConsStringIteratorOp> op(
8111      heap->isolate()->objects_string_iterator());
8112  StringCharacterStream stream(this, op.value());
8113
8114  uc16* result = NewArray<uc16>(length() + 1);
8115
8116  int i = 0;
8117  while (stream.HasMore()) {
8118    uint16_t character = stream.GetNext();
8119    result[i++] = character;
8120  }
8121  result[i] = 0;
8122  return SmartArrayPointer<uc16>(result);
8123}
8124
8125
8126const uc16* SeqTwoByteString::SeqTwoByteStringGetData(unsigned start) {
8127  return reinterpret_cast<uc16*>(
8128      reinterpret_cast<char*>(this) - kHeapObjectTag + kHeaderSize) + start;
8129}
8130
8131
8132void Relocatable::PostGarbageCollectionProcessing() {
8133  Isolate* isolate = Isolate::Current();
8134  Relocatable* current = isolate->relocatable_top();
8135  while (current != NULL) {
8136    current->PostGarbageCollection();
8137    current = current->prev_;
8138  }
8139}
8140
8141
8142// Reserve space for statics needing saving and restoring.
8143int Relocatable::ArchiveSpacePerThread() {
8144  return sizeof(Isolate::Current()->relocatable_top());
8145}
8146
8147
8148// Archive statics that are thread local.
8149char* Relocatable::ArchiveState(Isolate* isolate, char* to) {
8150  *reinterpret_cast<Relocatable**>(to) = isolate->relocatable_top();
8151  isolate->set_relocatable_top(NULL);
8152  return to + ArchiveSpacePerThread();
8153}
8154
8155
8156// Restore statics that are thread local.
8157char* Relocatable::RestoreState(Isolate* isolate, char* from) {
8158  isolate->set_relocatable_top(*reinterpret_cast<Relocatable**>(from));
8159  return from + ArchiveSpacePerThread();
8160}
8161
8162
8163char* Relocatable::Iterate(ObjectVisitor* v, char* thread_storage) {
8164  Relocatable* top = *reinterpret_cast<Relocatable**>(thread_storage);
8165  Iterate(v, top);
8166  return thread_storage + ArchiveSpacePerThread();
8167}
8168
8169
8170void Relocatable::Iterate(ObjectVisitor* v) {
8171  Isolate* isolate = Isolate::Current();
8172  Iterate(v, isolate->relocatable_top());
8173}
8174
8175
8176void Relocatable::Iterate(ObjectVisitor* v, Relocatable* top) {
8177  Relocatable* current = top;
8178  while (current != NULL) {
8179    current->IterateInstance(v);
8180    current = current->prev_;
8181  }
8182}
8183
8184
8185FlatStringReader::FlatStringReader(Isolate* isolate, Handle<String> str)
8186    : Relocatable(isolate),
8187      str_(str.location()),
8188      length_(str->length()) {
8189  PostGarbageCollection();
8190}
8191
8192
8193FlatStringReader::FlatStringReader(Isolate* isolate, Vector<const char> input)
8194    : Relocatable(isolate),
8195      str_(0),
8196      is_ascii_(true),
8197      length_(input.length()),
8198      start_(input.start()) { }
8199
8200
8201void FlatStringReader::PostGarbageCollection() {
8202  if (str_ == NULL) return;
8203  Handle<String> str(str_);
8204  ASSERT(str->IsFlat());
8205  DisallowHeapAllocation no_gc;
8206  // This does not actually prevent the vector from being relocated later.
8207  String::FlatContent content = str->GetFlatContent();
8208  ASSERT(content.IsFlat());
8209  is_ascii_ = content.IsAscii();
8210  if (is_ascii_) {
8211    start_ = content.ToOneByteVector().start();
8212  } else {
8213    start_ = content.ToUC16Vector().start();
8214  }
8215}
8216
8217
8218String* ConsStringIteratorOp::Operate(String* string,
8219                                      unsigned* offset_out,
8220                                      int32_t* type_out,
8221                                      unsigned* length_out) {
8222  ASSERT(string->IsConsString());
8223  ConsString* cons_string = ConsString::cast(string);
8224  // Set up search data.
8225  root_ = cons_string;
8226  consumed_ = *offset_out;
8227  // Now search.
8228  return Search(offset_out, type_out, length_out);
8229}
8230
8231
8232String* ConsStringIteratorOp::Search(unsigned* offset_out,
8233                                     int32_t* type_out,
8234                                     unsigned* length_out) {
8235  ConsString* cons_string = root_;
8236  // Reset the stack, pushing the root string.
8237  depth_ = 1;
8238  maximum_depth_ = 1;
8239  frames_[0] = cons_string;
8240  const unsigned consumed = consumed_;
8241  unsigned offset = 0;
8242  while (true) {
8243    // Loop until the string is found which contains the target offset.
8244    String* string = cons_string->first();
8245    unsigned length = string->length();
8246    int32_t type;
8247    if (consumed < offset + length) {
8248      // Target offset is in the left branch.
8249      // Keep going if we're still in a ConString.
8250      type = string->map()->instance_type();
8251      if ((type & kStringRepresentationMask) == kConsStringTag) {
8252        cons_string = ConsString::cast(string);
8253        PushLeft(cons_string);
8254        continue;
8255      }
8256      // Tell the stack we're done decending.
8257      AdjustMaximumDepth();
8258    } else {
8259      // Descend right.
8260      // Update progress through the string.
8261      offset += length;
8262      // Keep going if we're still in a ConString.
8263      string = cons_string->second();
8264      type = string->map()->instance_type();
8265      if ((type & kStringRepresentationMask) == kConsStringTag) {
8266        cons_string = ConsString::cast(string);
8267        PushRight(cons_string);
8268        // TODO(dcarney) Add back root optimization.
8269        continue;
8270      }
8271      // Need this to be updated for the current string.
8272      length = string->length();
8273      // Account for the possibility of an empty right leaf.
8274      // This happens only if we have asked for an offset outside the string.
8275      if (length == 0) {
8276        // Reset depth so future operations will return null immediately.
8277        Reset();
8278        return NULL;
8279      }
8280      // Tell the stack we're done decending.
8281      AdjustMaximumDepth();
8282      // Pop stack so next iteration is in correct place.
8283      Pop();
8284    }
8285    ASSERT(length != 0);
8286    // Adjust return values and exit.
8287    consumed_ = offset + length;
8288    *offset_out = consumed - offset;
8289    *type_out = type;
8290    *length_out = length;
8291    return string;
8292  }
8293  UNREACHABLE();
8294  return NULL;
8295}
8296
8297
8298String* ConsStringIteratorOp::NextLeaf(bool* blew_stack,
8299                                       int32_t* type_out,
8300                                       unsigned* length_out) {
8301  while (true) {
8302    // Tree traversal complete.
8303    if (depth_ == 0) {
8304      *blew_stack = false;
8305      return NULL;
8306    }
8307    // We've lost track of higher nodes.
8308    if (maximum_depth_ - depth_ == kStackSize) {
8309      *blew_stack = true;
8310      return NULL;
8311    }
8312    // Go right.
8313    ConsString* cons_string = frames_[OffsetForDepth(depth_ - 1)];
8314    String* string = cons_string->second();
8315    int32_t type = string->map()->instance_type();
8316    if ((type & kStringRepresentationMask) != kConsStringTag) {
8317      // Pop stack so next iteration is in correct place.
8318      Pop();
8319      unsigned length = static_cast<unsigned>(string->length());
8320      // Could be a flattened ConsString.
8321      if (length == 0) continue;
8322      *length_out = length;
8323      *type_out = type;
8324      consumed_ += length;
8325      return string;
8326    }
8327    cons_string = ConsString::cast(string);
8328    // TODO(dcarney) Add back root optimization.
8329    PushRight(cons_string);
8330    // Need to traverse all the way left.
8331    while (true) {
8332      // Continue left.
8333      string = cons_string->first();
8334      type = string->map()->instance_type();
8335      if ((type & kStringRepresentationMask) != kConsStringTag) {
8336        AdjustMaximumDepth();
8337        unsigned length = static_cast<unsigned>(string->length());
8338        ASSERT(length != 0);
8339        *length_out = length;
8340        *type_out = type;
8341        consumed_ += length;
8342        return string;
8343      }
8344      cons_string = ConsString::cast(string);
8345      PushLeft(cons_string);
8346    }
8347  }
8348  UNREACHABLE();
8349  return NULL;
8350}
8351
8352
8353uint16_t ConsString::ConsStringGet(int index) {
8354  ASSERT(index >= 0 && index < this->length());
8355
8356  // Check for a flattened cons string
8357  if (second()->length() == 0) {
8358    String* left = first();
8359    return left->Get(index);
8360  }
8361
8362  String* string = String::cast(this);
8363
8364  while (true) {
8365    if (StringShape(string).IsCons()) {
8366      ConsString* cons_string = ConsString::cast(string);
8367      String* left = cons_string->first();
8368      if (left->length() > index) {
8369        string = left;
8370      } else {
8371        index -= left->length();
8372        string = cons_string->second();
8373      }
8374    } else {
8375      return string->Get(index);
8376    }
8377  }
8378
8379  UNREACHABLE();
8380  return 0;
8381}
8382
8383
8384uint16_t SlicedString::SlicedStringGet(int index) {
8385  return parent()->Get(offset() + index);
8386}
8387
8388
8389template <typename sinkchar>
8390void String::WriteToFlat(String* src,
8391                         sinkchar* sink,
8392                         int f,
8393                         int t) {
8394  String* source = src;
8395  int from = f;
8396  int to = t;
8397  while (true) {
8398    ASSERT(0 <= from && from <= to && to <= source->length());
8399    switch (StringShape(source).full_representation_tag()) {
8400      case kOneByteStringTag | kExternalStringTag: {
8401        CopyChars(sink,
8402                  ExternalAsciiString::cast(source)->GetChars() + from,
8403                  to - from);
8404        return;
8405      }
8406      case kTwoByteStringTag | kExternalStringTag: {
8407        const uc16* data =
8408            ExternalTwoByteString::cast(source)->GetChars();
8409        CopyChars(sink,
8410                  data + from,
8411                  to - from);
8412        return;
8413      }
8414      case kOneByteStringTag | kSeqStringTag: {
8415        CopyChars(sink,
8416                  SeqOneByteString::cast(source)->GetChars() + from,
8417                  to - from);
8418        return;
8419      }
8420      case kTwoByteStringTag | kSeqStringTag: {
8421        CopyChars(sink,
8422                  SeqTwoByteString::cast(source)->GetChars() + from,
8423                  to - from);
8424        return;
8425      }
8426      case kOneByteStringTag | kConsStringTag:
8427      case kTwoByteStringTag | kConsStringTag: {
8428        ConsString* cons_string = ConsString::cast(source);
8429        String* first = cons_string->first();
8430        int boundary = first->length();
8431        if (to - boundary >= boundary - from) {
8432          // Right hand side is longer.  Recurse over left.
8433          if (from < boundary) {
8434            WriteToFlat(first, sink, from, boundary);
8435            sink += boundary - from;
8436            from = 0;
8437          } else {
8438            from -= boundary;
8439          }
8440          to -= boundary;
8441          source = cons_string->second();
8442        } else {
8443          // Left hand side is longer.  Recurse over right.
8444          if (to > boundary) {
8445            String* second = cons_string->second();
8446            // When repeatedly appending to a string, we get a cons string that
8447            // is unbalanced to the left, a list, essentially.  We inline the
8448            // common case of sequential ascii right child.
8449            if (to - boundary == 1) {
8450              sink[boundary - from] = static_cast<sinkchar>(second->Get(0));
8451            } else if (second->IsSeqOneByteString()) {
8452              CopyChars(sink + boundary - from,
8453                        SeqOneByteString::cast(second)->GetChars(),
8454                        to - boundary);
8455            } else {
8456              WriteToFlat(second,
8457                          sink + boundary - from,
8458                          0,
8459                          to - boundary);
8460            }
8461            to = boundary;
8462          }
8463          source = first;
8464        }
8465        break;
8466      }
8467      case kOneByteStringTag | kSlicedStringTag:
8468      case kTwoByteStringTag | kSlicedStringTag: {
8469        SlicedString* slice = SlicedString::cast(source);
8470        unsigned offset = slice->offset();
8471        WriteToFlat(slice->parent(), sink, from + offset, to + offset);
8472        return;
8473      }
8474    }
8475  }
8476}
8477
8478
8479// Compares the contents of two strings by reading and comparing
8480// int-sized blocks of characters.
8481template <typename Char>
8482static inline bool CompareRawStringContents(const Char* const a,
8483                                            const Char* const b,
8484                                            int length) {
8485  int i = 0;
8486#ifndef V8_HOST_CAN_READ_UNALIGNED
8487  // If this architecture isn't comfortable reading unaligned ints
8488  // then we have to check that the strings are aligned before
8489  // comparing them blockwise.
8490  const int kAlignmentMask = sizeof(uint32_t) - 1;  // NOLINT
8491  uint32_t pa_addr = reinterpret_cast<uint32_t>(a);
8492  uint32_t pb_addr = reinterpret_cast<uint32_t>(b);
8493  if (((pa_addr & kAlignmentMask) | (pb_addr & kAlignmentMask)) == 0) {
8494#endif
8495    const int kStepSize = sizeof(int) / sizeof(Char);  // NOLINT
8496    int endpoint = length - kStepSize;
8497    // Compare blocks until we reach near the end of the string.
8498    for (; i <= endpoint; i += kStepSize) {
8499      uint32_t wa = *reinterpret_cast<const uint32_t*>(a + i);
8500      uint32_t wb = *reinterpret_cast<const uint32_t*>(b + i);
8501      if (wa != wb) {
8502        return false;
8503      }
8504    }
8505#ifndef V8_HOST_CAN_READ_UNALIGNED
8506  }
8507#endif
8508  // Compare the remaining characters that didn't fit into a block.
8509  for (; i < length; i++) {
8510    if (a[i] != b[i]) {
8511      return false;
8512    }
8513  }
8514  return true;
8515}
8516
8517
8518template<typename Chars1, typename Chars2>
8519class RawStringComparator : public AllStatic {
8520 public:
8521  static inline bool compare(const Chars1* a, const Chars2* b, int len) {
8522    ASSERT(sizeof(Chars1) != sizeof(Chars2));
8523    for (int i = 0; i < len; i++) {
8524      if (a[i] != b[i]) {
8525        return false;
8526      }
8527    }
8528    return true;
8529  }
8530};
8531
8532
8533template<>
8534class RawStringComparator<uint16_t, uint16_t> {
8535 public:
8536  static inline bool compare(const uint16_t* a, const uint16_t* b, int len) {
8537    return CompareRawStringContents(a, b, len);
8538  }
8539};
8540
8541
8542template<>
8543class RawStringComparator<uint8_t, uint8_t> {
8544 public:
8545  static inline bool compare(const uint8_t* a, const uint8_t* b, int len) {
8546    return CompareRawStringContents(a, b, len);
8547  }
8548};
8549
8550
8551class StringComparator {
8552  class State {
8553   public:
8554    explicit inline State(ConsStringIteratorOp* op)
8555      : op_(op), is_one_byte_(true), length_(0), buffer8_(NULL) {}
8556
8557    inline void Init(String* string, unsigned len) {
8558      op_->Reset();
8559      int32_t type = string->map()->instance_type();
8560      String::Visit(string, 0, *this, *op_, type, len);
8561    }
8562
8563    inline void VisitOneByteString(const uint8_t* chars, unsigned length) {
8564      is_one_byte_ = true;
8565      buffer8_ = chars;
8566      length_ = length;
8567    }
8568
8569    inline void VisitTwoByteString(const uint16_t* chars, unsigned length) {
8570      is_one_byte_ = false;
8571      buffer16_ = chars;
8572      length_ = length;
8573    }
8574
8575    void Advance(unsigned consumed) {
8576      ASSERT(consumed <= length_);
8577      // Still in buffer.
8578      if (length_ != consumed) {
8579        if (is_one_byte_) {
8580          buffer8_ += consumed;
8581        } else {
8582          buffer16_ += consumed;
8583        }
8584        length_ -= consumed;
8585        return;
8586      }
8587      // Advance state.
8588      ASSERT(op_->HasMore());
8589      int32_t type = 0;
8590      unsigned length = 0;
8591      String* next = op_->ContinueOperation(&type, &length);
8592      ASSERT(next != NULL);
8593      ConsStringNullOp null_op;
8594      String::Visit(next, 0, *this, null_op, type, length);
8595    }
8596
8597    ConsStringIteratorOp* const op_;
8598    bool is_one_byte_;
8599    unsigned length_;
8600    union {
8601      const uint8_t* buffer8_;
8602      const uint16_t* buffer16_;
8603    };
8604
8605   private:
8606    DISALLOW_IMPLICIT_CONSTRUCTORS(State);
8607  };
8608
8609 public:
8610  inline StringComparator(ConsStringIteratorOp* op_1,
8611                          ConsStringIteratorOp* op_2)
8612    : state_1_(op_1),
8613      state_2_(op_2) {
8614  }
8615
8616  template<typename Chars1, typename Chars2>
8617  static inline bool Equals(State* state_1, State* state_2, unsigned to_check) {
8618    const Chars1* a = reinterpret_cast<const Chars1*>(state_1->buffer8_);
8619    const Chars2* b = reinterpret_cast<const Chars2*>(state_2->buffer8_);
8620    return RawStringComparator<Chars1, Chars2>::compare(a, b, to_check);
8621  }
8622
8623  bool Equals(unsigned length, String* string_1, String* string_2) {
8624    ASSERT(length != 0);
8625    state_1_.Init(string_1, length);
8626    state_2_.Init(string_2, length);
8627    while (true) {
8628      unsigned to_check = Min(state_1_.length_, state_2_.length_);
8629      ASSERT(to_check > 0 && to_check <= length);
8630      bool is_equal;
8631      if (state_1_.is_one_byte_) {
8632        if (state_2_.is_one_byte_) {
8633          is_equal = Equals<uint8_t, uint8_t>(&state_1_, &state_2_, to_check);
8634        } else {
8635          is_equal = Equals<uint8_t, uint16_t>(&state_1_, &state_2_, to_check);
8636        }
8637      } else {
8638        if (state_2_.is_one_byte_) {
8639          is_equal = Equals<uint16_t, uint8_t>(&state_1_, &state_2_, to_check);
8640        } else {
8641          is_equal = Equals<uint16_t, uint16_t>(&state_1_, &state_2_, to_check);
8642        }
8643      }
8644      // Looping done.
8645      if (!is_equal) return false;
8646      length -= to_check;
8647      // Exit condition. Strings are equal.
8648      if (length == 0) return true;
8649      state_1_.Advance(to_check);
8650      state_2_.Advance(to_check);
8651    }
8652  }
8653
8654 private:
8655  State state_1_;
8656  State state_2_;
8657  DISALLOW_IMPLICIT_CONSTRUCTORS(StringComparator);
8658};
8659
8660
8661bool String::SlowEquals(String* other) {
8662  // Fast check: negative check with lengths.
8663  int len = length();
8664  if (len != other->length()) return false;
8665  if (len == 0) return true;
8666
8667  // Fast check: if hash code is computed for both strings
8668  // a fast negative check can be performed.
8669  if (HasHashCode() && other->HasHashCode()) {
8670#ifdef DEBUG
8671    if (FLAG_enable_slow_asserts) {
8672      if (Hash() != other->Hash()) {
8673        bool found_difference = false;
8674        for (int i = 0; i < len; i++) {
8675          if (Get(i) != other->Get(i)) {
8676            found_difference = true;
8677            break;
8678          }
8679        }
8680        ASSERT(found_difference);
8681      }
8682    }
8683#endif
8684    if (Hash() != other->Hash()) return false;
8685  }
8686
8687  // We know the strings are both non-empty. Compare the first chars
8688  // before we try to flatten the strings.
8689  if (this->Get(0) != other->Get(0)) return false;
8690
8691  String* lhs = this->TryFlattenGetString();
8692  String* rhs = other->TryFlattenGetString();
8693
8694  // TODO(dcarney): Compare all types of flat strings with a Visitor.
8695  if (StringShape(lhs).IsSequentialAscii() &&
8696      StringShape(rhs).IsSequentialAscii()) {
8697    const uint8_t* str1 = SeqOneByteString::cast(lhs)->GetChars();
8698    const uint8_t* str2 = SeqOneByteString::cast(rhs)->GetChars();
8699    return CompareRawStringContents(str1, str2, len);
8700  }
8701
8702  Isolate* isolate = GetIsolate();
8703  StringComparator comparator(isolate->objects_string_compare_iterator_a(),
8704                              isolate->objects_string_compare_iterator_b());
8705
8706  return comparator.Equals(static_cast<unsigned>(len), lhs, rhs);
8707}
8708
8709
8710bool String::MarkAsUndetectable() {
8711  if (StringShape(this).IsInternalized()) return false;
8712
8713  Map* map = this->map();
8714  Heap* heap = GetHeap();
8715  if (map == heap->string_map()) {
8716    this->set_map(heap->undetectable_string_map());
8717    return true;
8718  } else if (map == heap->ascii_string_map()) {
8719    this->set_map(heap->undetectable_ascii_string_map());
8720    return true;
8721  }
8722  // Rest cannot be marked as undetectable
8723  return false;
8724}
8725
8726
8727bool String::IsUtf8EqualTo(Vector<const char> str, bool allow_prefix_match) {
8728  int slen = length();
8729  // Can't check exact length equality, but we can check bounds.
8730  int str_len = str.length();
8731  if (!allow_prefix_match &&
8732      (str_len < slen ||
8733          str_len > slen*static_cast<int>(unibrow::Utf8::kMaxEncodedSize))) {
8734    return false;
8735  }
8736  int i;
8737  unsigned remaining_in_str = static_cast<unsigned>(str_len);
8738  const uint8_t* utf8_data = reinterpret_cast<const uint8_t*>(str.start());
8739  for (i = 0; i < slen && remaining_in_str > 0; i++) {
8740    unsigned cursor = 0;
8741    uint32_t r = unibrow::Utf8::ValueOf(utf8_data, remaining_in_str, &cursor);
8742    ASSERT(cursor > 0 && cursor <= remaining_in_str);
8743    if (r > unibrow::Utf16::kMaxNonSurrogateCharCode) {
8744      if (i > slen - 1) return false;
8745      if (Get(i++) != unibrow::Utf16::LeadSurrogate(r)) return false;
8746      if (Get(i) != unibrow::Utf16::TrailSurrogate(r)) return false;
8747    } else {
8748      if (Get(i) != r) return false;
8749    }
8750    utf8_data += cursor;
8751    remaining_in_str -= cursor;
8752  }
8753  return (allow_prefix_match || i == slen) && remaining_in_str == 0;
8754}
8755
8756
8757bool String::IsOneByteEqualTo(Vector<const uint8_t> str) {
8758  int slen = length();
8759  if (str.length() != slen) return false;
8760  DisallowHeapAllocation no_gc;
8761  FlatContent content = GetFlatContent();
8762  if (content.IsAscii()) {
8763    return CompareChars(content.ToOneByteVector().start(),
8764                        str.start(), slen) == 0;
8765  }
8766  for (int i = 0; i < slen; i++) {
8767    if (Get(i) != static_cast<uint16_t>(str[i])) return false;
8768  }
8769  return true;
8770}
8771
8772
8773bool String::IsTwoByteEqualTo(Vector<const uc16> str) {
8774  int slen = length();
8775  if (str.length() != slen) return false;
8776  DisallowHeapAllocation no_gc;
8777  FlatContent content = GetFlatContent();
8778  if (content.IsTwoByte()) {
8779    return CompareChars(content.ToUC16Vector().start(), str.start(), slen) == 0;
8780  }
8781  for (int i = 0; i < slen; i++) {
8782    if (Get(i) != str[i]) return false;
8783  }
8784  return true;
8785}
8786
8787
8788class IteratingStringHasher: public StringHasher {
8789 public:
8790  static inline uint32_t Hash(String* string, uint32_t seed) {
8791    const unsigned len = static_cast<unsigned>(string->length());
8792    IteratingStringHasher hasher(len, seed);
8793    if (hasher.has_trivial_hash()) {
8794      return hasher.GetHashField();
8795    }
8796    int32_t type = string->map()->instance_type();
8797    ConsStringNullOp null_op;
8798    String::Visit(string, 0, hasher, null_op, type, len);
8799    // Flat strings terminate immediately.
8800    if (hasher.consumed_ == len) {
8801      ASSERT(!string->IsConsString());
8802      return hasher.GetHashField();
8803    }
8804    ASSERT(string->IsConsString());
8805    // This is a ConsString, iterate across it.
8806    ConsStringIteratorOp op;
8807    unsigned offset = 0;
8808    unsigned leaf_length = len;
8809    string = op.Operate(string, &offset, &type, &leaf_length);
8810    while (true) {
8811      ASSERT(hasher.consumed_ < len);
8812      String::Visit(string, 0, hasher, null_op, type, leaf_length);
8813      if (hasher.consumed_ == len) break;
8814      string = op.ContinueOperation(&type, &leaf_length);
8815      // This should be taken care of by the length check.
8816      ASSERT(string != NULL);
8817    }
8818    return hasher.GetHashField();
8819  }
8820  inline void VisitOneByteString(const uint8_t* chars, unsigned length) {
8821    AddCharacters(chars, static_cast<int>(length));
8822    consumed_ += length;
8823  }
8824  inline void VisitTwoByteString(const uint16_t* chars, unsigned length) {
8825    AddCharacters(chars, static_cast<int>(length));
8826    consumed_ += length;
8827  }
8828
8829 private:
8830  inline IteratingStringHasher(int len, uint32_t seed)
8831    : StringHasher(len, seed),
8832      consumed_(0) {}
8833  unsigned consumed_;
8834  DISALLOW_COPY_AND_ASSIGN(IteratingStringHasher);
8835};
8836
8837
8838uint32_t String::ComputeAndSetHash() {
8839  // Should only be called if hash code has not yet been computed.
8840  ASSERT(!HasHashCode());
8841
8842  // Store the hash code in the object.
8843  uint32_t field = IteratingStringHasher::Hash(this, GetHeap()->HashSeed());
8844  set_hash_field(field);
8845
8846  // Check the hash code is there.
8847  ASSERT(HasHashCode());
8848  uint32_t result = field >> kHashShift;
8849  ASSERT(result != 0);  // Ensure that the hash value of 0 is never computed.
8850  return result;
8851}
8852
8853
8854bool String::ComputeArrayIndex(uint32_t* index) {
8855  int length = this->length();
8856  if (length == 0 || length > kMaxArrayIndexSize) return false;
8857  ConsStringIteratorOp op;
8858  StringCharacterStream stream(this, &op);
8859  uint16_t ch = stream.GetNext();
8860
8861  // If the string begins with a '0' character, it must only consist
8862  // of it to be a legal array index.
8863  if (ch == '0') {
8864    *index = 0;
8865    return length == 1;
8866  }
8867
8868  // Convert string to uint32 array index; character by character.
8869  int d = ch - '0';
8870  if (d < 0 || d > 9) return false;
8871  uint32_t result = d;
8872  while (stream.HasMore()) {
8873    d = stream.GetNext() - '0';
8874    if (d < 0 || d > 9) return false;
8875    // Check that the new result is below the 32 bit limit.
8876    if (result > 429496729U - ((d > 5) ? 1 : 0)) return false;
8877    result = (result * 10) + d;
8878  }
8879
8880  *index = result;
8881  return true;
8882}
8883
8884
8885bool String::SlowAsArrayIndex(uint32_t* index) {
8886  if (length() <= kMaxCachedArrayIndexLength) {
8887    Hash();  // force computation of hash code
8888    uint32_t field = hash_field();
8889    if ((field & kIsNotArrayIndexMask) != 0) return false;
8890    // Isolate the array index form the full hash field.
8891    *index = (kArrayIndexHashMask & field) >> kHashShift;
8892    return true;
8893  } else {
8894    return ComputeArrayIndex(index);
8895  }
8896}
8897
8898
8899Handle<String> SeqString::Truncate(Handle<SeqString> string, int new_length) {
8900  int new_size, old_size;
8901  int old_length = string->length();
8902  if (old_length <= new_length) return string;
8903
8904  if (string->IsSeqOneByteString()) {
8905    old_size = SeqOneByteString::SizeFor(old_length);
8906    new_size = SeqOneByteString::SizeFor(new_length);
8907  } else {
8908    ASSERT(string->IsSeqTwoByteString());
8909    old_size = SeqTwoByteString::SizeFor(old_length);
8910    new_size = SeqTwoByteString::SizeFor(new_length);
8911  }
8912
8913  int delta = old_size - new_size;
8914  string->set_length(new_length);
8915
8916  Address start_of_string = string->address();
8917  ASSERT_OBJECT_ALIGNED(start_of_string);
8918  ASSERT_OBJECT_ALIGNED(start_of_string + new_size);
8919
8920  Heap* heap = string->GetHeap();
8921  NewSpace* newspace = heap->new_space();
8922  if (newspace->Contains(start_of_string) &&
8923      newspace->top() == start_of_string + old_size) {
8924    // Last allocated object in new space.  Simply lower allocation top.
8925    *(newspace->allocation_top_address()) = start_of_string + new_size;
8926  } else {
8927    // Sizes are pointer size aligned, so that we can use filler objects
8928    // that are a multiple of pointer size.
8929    heap->CreateFillerObjectAt(start_of_string + new_size, delta);
8930  }
8931  if (Marking::IsBlack(Marking::MarkBitFrom(start_of_string))) {
8932    MemoryChunk::IncrementLiveBytesFromMutator(start_of_string, -delta);
8933  }
8934
8935
8936  if (new_length == 0) return heap->isolate()->factory()->empty_string();
8937  return string;
8938}
8939
8940
8941AllocationMemento* AllocationMemento::FindForJSObject(JSObject* object) {
8942  // Currently, AllocationMemento objects are only allocated immediately
8943  // after JSArrays in NewSpace, and detecting whether a JSArray has one
8944  // involves carefully checking the object immediately after the JSArray
8945  // (if there is one) to see if it's an AllocationMemento.
8946  if (FLAG_track_allocation_sites && object->GetHeap()->InNewSpace(object)) {
8947    Address ptr_end = (reinterpret_cast<Address>(object) - kHeapObjectTag) +
8948        object->Size();
8949    if ((ptr_end + AllocationMemento::kSize) <=
8950        object->GetHeap()->NewSpaceTop()) {
8951      // There is room in newspace for allocation info. Do we have some?
8952      Map** possible_allocation_memento_map =
8953          reinterpret_cast<Map**>(ptr_end);
8954      if (*possible_allocation_memento_map ==
8955          object->GetHeap()->allocation_memento_map()) {
8956        AllocationMemento* memento = AllocationMemento::cast(
8957            reinterpret_cast<Object*>(ptr_end + 1));
8958        return memento;
8959      }
8960    }
8961  }
8962  return NULL;
8963}
8964
8965
8966uint32_t StringHasher::MakeArrayIndexHash(uint32_t value, int length) {
8967  // For array indexes mix the length into the hash as an array index could
8968  // be zero.
8969  ASSERT(length > 0);
8970  ASSERT(length <= String::kMaxArrayIndexSize);
8971  ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
8972         (1 << String::kArrayIndexValueBits));
8973
8974  value <<= String::kHashShift;
8975  value |= length << String::kArrayIndexHashLengthShift;
8976
8977  ASSERT((value & String::kIsNotArrayIndexMask) == 0);
8978  ASSERT((length > String::kMaxCachedArrayIndexLength) ||
8979         (value & String::kContainsCachedArrayIndexMask) == 0);
8980  return value;
8981}
8982
8983
8984uint32_t StringHasher::GetHashField() {
8985  if (length_ <= String::kMaxHashCalcLength) {
8986    if (is_array_index_) {
8987      return MakeArrayIndexHash(array_index_, length_);
8988    }
8989    return (GetHashCore(raw_running_hash_) << String::kHashShift) |
8990           String::kIsNotArrayIndexMask;
8991  } else {
8992    return (length_ << String::kHashShift) | String::kIsNotArrayIndexMask;
8993  }
8994}
8995
8996
8997uint32_t StringHasher::ComputeUtf8Hash(Vector<const char> chars,
8998                                       uint32_t seed,
8999                                       int* utf16_length_out) {
9000  int vector_length = chars.length();
9001  // Handle some edge cases
9002  if (vector_length <= 1) {
9003    ASSERT(vector_length == 0 ||
9004           static_cast<uint8_t>(chars.start()[0]) <=
9005               unibrow::Utf8::kMaxOneByteChar);
9006    *utf16_length_out = vector_length;
9007    return HashSequentialString(chars.start(), vector_length, seed);
9008  }
9009  // Start with a fake length which won't affect computation.
9010  // It will be updated later.
9011  StringHasher hasher(String::kMaxArrayIndexSize, seed);
9012  unsigned remaining = static_cast<unsigned>(vector_length);
9013  const uint8_t* stream = reinterpret_cast<const uint8_t*>(chars.start());
9014  int utf16_length = 0;
9015  bool is_index = true;
9016  ASSERT(hasher.is_array_index_);
9017  while (remaining > 0) {
9018    unsigned consumed = 0;
9019    uint32_t c = unibrow::Utf8::ValueOf(stream, remaining, &consumed);
9020    ASSERT(consumed > 0 && consumed <= remaining);
9021    stream += consumed;
9022    remaining -= consumed;
9023    bool is_two_characters = c > unibrow::Utf16::kMaxNonSurrogateCharCode;
9024    utf16_length += is_two_characters ? 2 : 1;
9025    // No need to keep hashing. But we do need to calculate utf16_length.
9026    if (utf16_length > String::kMaxHashCalcLength) continue;
9027    if (is_two_characters) {
9028      uint16_t c1 = unibrow::Utf16::LeadSurrogate(c);
9029      uint16_t c2 = unibrow::Utf16::TrailSurrogate(c);
9030      hasher.AddCharacter(c1);
9031      hasher.AddCharacter(c2);
9032      if (is_index) is_index = hasher.UpdateIndex(c1);
9033      if (is_index) is_index = hasher.UpdateIndex(c2);
9034    } else {
9035      hasher.AddCharacter(c);
9036      if (is_index) is_index = hasher.UpdateIndex(c);
9037    }
9038  }
9039  *utf16_length_out = static_cast<int>(utf16_length);
9040  // Must set length here so that hash computation is correct.
9041  hasher.length_ = utf16_length;
9042  return hasher.GetHashField();
9043}
9044
9045
9046MaybeObject* String::SubString(int start, int end, PretenureFlag pretenure) {
9047  Heap* heap = GetHeap();
9048  if (start == 0 && end == length()) return this;
9049  MaybeObject* result = heap->AllocateSubString(this, start, end, pretenure);
9050  return result;
9051}
9052
9053
9054void String::PrintOn(FILE* file) {
9055  int length = this->length();
9056  for (int i = 0; i < length; i++) {
9057    PrintF(file, "%c", Get(i));
9058  }
9059}
9060
9061
9062static void TrimEnumCache(Heap* heap, Map* map, DescriptorArray* descriptors) {
9063  int live_enum = map->EnumLength();
9064  if (live_enum == Map::kInvalidEnumCache) {
9065    live_enum = map->NumberOfDescribedProperties(OWN_DESCRIPTORS, DONT_ENUM);
9066  }
9067  if (live_enum == 0) return descriptors->ClearEnumCache();
9068
9069  FixedArray* enum_cache = descriptors->GetEnumCache();
9070
9071  int to_trim = enum_cache->length() - live_enum;
9072  if (to_trim <= 0) return;
9073  RightTrimFixedArray<FROM_GC>(heap, descriptors->GetEnumCache(), to_trim);
9074
9075  if (!descriptors->HasEnumIndicesCache()) return;
9076  FixedArray* enum_indices_cache = descriptors->GetEnumIndicesCache();
9077  RightTrimFixedArray<FROM_GC>(heap, enum_indices_cache, to_trim);
9078}
9079
9080
9081static void TrimDescriptorArray(Heap* heap,
9082                                Map* map,
9083                                DescriptorArray* descriptors,
9084                                int number_of_own_descriptors) {
9085  int number_of_descriptors = descriptors->number_of_descriptors_storage();
9086  int to_trim = number_of_descriptors - number_of_own_descriptors;
9087  if (to_trim == 0) return;
9088
9089  RightTrimFixedArray<FROM_GC>(
9090      heap, descriptors, to_trim * DescriptorArray::kDescriptorSize);
9091  descriptors->SetNumberOfDescriptors(number_of_own_descriptors);
9092
9093  if (descriptors->HasEnumCache()) TrimEnumCache(heap, map, descriptors);
9094  descriptors->Sort();
9095}
9096
9097
9098// Clear a possible back pointer in case the transition leads to a dead map.
9099// Return true in case a back pointer has been cleared and false otherwise.
9100static bool ClearBackPointer(Heap* heap, Map* target) {
9101  if (Marking::MarkBitFrom(target).Get()) return false;
9102  target->SetBackPointer(heap->undefined_value(), SKIP_WRITE_BARRIER);
9103  return true;
9104}
9105
9106
9107// TODO(mstarzinger): This method should be moved into MarkCompactCollector,
9108// because it cannot be called from outside the GC and we already have methods
9109// depending on the transitions layout in the GC anyways.
9110void Map::ClearNonLiveTransitions(Heap* heap) {
9111  // If there are no transitions to be cleared, return.
9112  // TODO(verwaest) Should be an assert, otherwise back pointers are not
9113  // properly cleared.
9114  if (!HasTransitionArray()) return;
9115
9116  TransitionArray* t = transitions();
9117  MarkCompactCollector* collector = heap->mark_compact_collector();
9118
9119  int transition_index = 0;
9120
9121  DescriptorArray* descriptors = instance_descriptors();
9122  bool descriptors_owner_died = false;
9123
9124  // Compact all live descriptors to the left.
9125  for (int i = 0; i < t->number_of_transitions(); ++i) {
9126    Map* target = t->GetTarget(i);
9127    if (ClearBackPointer(heap, target)) {
9128      if (target->instance_descriptors() == descriptors) {
9129        descriptors_owner_died = true;
9130      }
9131    } else {
9132      if (i != transition_index) {
9133        Name* key = t->GetKey(i);
9134        t->SetKey(transition_index, key);
9135        Object** key_slot = t->GetKeySlot(transition_index);
9136        collector->RecordSlot(key_slot, key_slot, key);
9137        // Target slots do not need to be recorded since maps are not compacted.
9138        t->SetTarget(transition_index, t->GetTarget(i));
9139      }
9140      transition_index++;
9141    }
9142  }
9143
9144  // If there are no transitions to be cleared, return.
9145  // TODO(verwaest) Should be an assert, otherwise back pointers are not
9146  // properly cleared.
9147  if (transition_index == t->number_of_transitions()) return;
9148
9149  int number_of_own_descriptors = NumberOfOwnDescriptors();
9150
9151  if (descriptors_owner_died) {
9152    if (number_of_own_descriptors > 0) {
9153      TrimDescriptorArray(heap, this, descriptors, number_of_own_descriptors);
9154      ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors);
9155    } else {
9156      ASSERT(descriptors == GetHeap()->empty_descriptor_array());
9157    }
9158  }
9159
9160  int trim = t->number_of_transitions() - transition_index;
9161  if (trim > 0) {
9162    RightTrimFixedArray<FROM_GC>(heap, t, t->IsSimpleTransition()
9163        ? trim : trim * TransitionArray::kTransitionSize);
9164  }
9165}
9166
9167
9168int Map::Hash() {
9169  // For performance reasons we only hash the 3 most variable fields of a map:
9170  // constructor, prototype and bit_field2.
9171
9172  // Shift away the tag.
9173  int hash = (static_cast<uint32_t>(
9174        reinterpret_cast<uintptr_t>(constructor())) >> 2);
9175
9176  // XOR-ing the prototype and constructor directly yields too many zero bits
9177  // when the two pointers are close (which is fairly common).
9178  // To avoid this we shift the prototype 4 bits relatively to the constructor.
9179  hash ^= (static_cast<uint32_t>(
9180        reinterpret_cast<uintptr_t>(prototype())) << 2);
9181
9182  return hash ^ (hash >> 16) ^ bit_field2();
9183}
9184
9185
9186static bool CheckEquivalent(Map* first, Map* second) {
9187  return
9188    first->constructor() == second->constructor() &&
9189    first->prototype() == second->prototype() &&
9190    first->instance_type() == second->instance_type() &&
9191    first->bit_field() == second->bit_field() &&
9192    first->bit_field2() == second->bit_field2() &&
9193    first->is_observed() == second->is_observed() &&
9194    first->function_with_prototype() == second->function_with_prototype();
9195}
9196
9197
9198bool Map::EquivalentToForTransition(Map* other) {
9199  return CheckEquivalent(this, other);
9200}
9201
9202
9203bool Map::EquivalentToForNormalization(Map* other,
9204                                       PropertyNormalizationMode mode) {
9205  int properties = mode == CLEAR_INOBJECT_PROPERTIES
9206      ? 0 : other->inobject_properties();
9207  return CheckEquivalent(this, other) && inobject_properties() == properties;
9208}
9209
9210
9211void JSFunction::JSFunctionIterateBody(int object_size, ObjectVisitor* v) {
9212  // Iterate over all fields in the body but take care in dealing with
9213  // the code entry.
9214  IteratePointers(v, kPropertiesOffset, kCodeEntryOffset);
9215  v->VisitCodeEntry(this->address() + kCodeEntryOffset);
9216  IteratePointers(v, kCodeEntryOffset + kPointerSize, object_size);
9217}
9218
9219
9220void JSFunction::MarkForLazyRecompilation() {
9221  ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints());
9222  ASSERT(!IsOptimized());
9223  ASSERT(shared()->allows_lazy_compilation() ||
9224         code()->optimizable());
9225  ASSERT(!shared()->is_generator());
9226  set_code_no_write_barrier(
9227      GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile));
9228  // No write barrier required, since the builtin is part of the root set.
9229}
9230
9231
9232void JSFunction::MarkForParallelRecompilation() {
9233  ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints());
9234  ASSERT(!IsOptimized());
9235  ASSERT(shared()->allows_lazy_compilation() || code()->optimizable());
9236  ASSERT(!shared()->is_generator());
9237  ASSERT(FLAG_parallel_recompilation);
9238  if (FLAG_trace_parallel_recompilation) {
9239    PrintF("  ** Marking ");
9240    PrintName();
9241    PrintF(" for parallel recompilation.\n");
9242  }
9243  set_code_no_write_barrier(
9244      GetIsolate()->builtins()->builtin(Builtins::kParallelRecompile));
9245  // No write barrier required, since the builtin is part of the root set.
9246}
9247
9248
9249void JSFunction::MarkForInstallingRecompiledCode() {
9250  // The debugger could have switched the builtin to lazy compile.
9251  // In that case, simply carry on.  It will be dealt with later.
9252  ASSERT(!IsOptimized());
9253  ASSERT(shared()->allows_lazy_compilation() || code()->optimizable());
9254  ASSERT(FLAG_parallel_recompilation);
9255  set_code_no_write_barrier(
9256      GetIsolate()->builtins()->builtin(Builtins::kInstallRecompiledCode));
9257  // No write barrier required, since the builtin is part of the root set.
9258}
9259
9260
9261void JSFunction::MarkInRecompileQueue() {
9262  // We can only arrive here via the parallel-recompilation builtin.  If
9263  // break points were set, the code would point to the lazy-compile builtin.
9264  ASSERT(!GetIsolate()->DebuggerHasBreakPoints());
9265  ASSERT(IsMarkedForParallelRecompilation() && !IsOptimized());
9266  ASSERT(shared()->allows_lazy_compilation() || code()->optimizable());
9267  ASSERT(FLAG_parallel_recompilation);
9268  if (FLAG_trace_parallel_recompilation) {
9269    PrintF("  ** Queueing ");
9270    PrintName();
9271    PrintF(" for parallel recompilation.\n");
9272  }
9273  set_code_no_write_barrier(
9274      GetIsolate()->builtins()->builtin(Builtins::kInRecompileQueue));
9275  // No write barrier required, since the builtin is part of the root set.
9276}
9277
9278
9279static bool CompileLazyHelper(CompilationInfo* info,
9280                              ClearExceptionFlag flag) {
9281  // Compile the source information to a code object.
9282  ASSERT(info->IsOptimizing() || !info->shared_info()->is_compiled());
9283  ASSERT(!info->isolate()->has_pending_exception());
9284  bool result = Compiler::CompileLazy(info);
9285  ASSERT(result != Isolate::Current()->has_pending_exception());
9286  if (!result && flag == CLEAR_EXCEPTION) {
9287    info->isolate()->clear_pending_exception();
9288  }
9289  return result;
9290}
9291
9292
9293bool SharedFunctionInfo::CompileLazy(Handle<SharedFunctionInfo> shared,
9294                                     ClearExceptionFlag flag) {
9295  ASSERT(shared->allows_lazy_compilation_without_context());
9296  CompilationInfoWithZone info(shared);
9297  return CompileLazyHelper(&info, flag);
9298}
9299
9300
9301void SharedFunctionInfo::AddToOptimizedCodeMap(
9302    Handle<SharedFunctionInfo> shared,
9303    Handle<Context> native_context,
9304    Handle<Code> code,
9305    Handle<FixedArray> literals) {
9306  CALL_HEAP_FUNCTION_VOID(
9307      shared->GetIsolate(),
9308      shared->AddToOptimizedCodeMap(*native_context, *code, *literals));
9309}
9310
9311
9312MaybeObject* SharedFunctionInfo::AddToOptimizedCodeMap(Context* native_context,
9313                                                       Code* code,
9314                                                       FixedArray* literals) {
9315  ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
9316  ASSERT(native_context->IsNativeContext());
9317  STATIC_ASSERT(kEntryLength == 3);
9318  Heap* heap = GetHeap();
9319  FixedArray* new_code_map;
9320  Object* value = optimized_code_map();
9321  if (value->IsSmi()) {
9322    // No optimized code map.
9323    ASSERT_EQ(0, Smi::cast(value)->value());
9324    // Crate 3 entries per context {context, code, literals}.
9325    MaybeObject* maybe = heap->AllocateFixedArray(kInitialLength);
9326    if (!maybe->To(&new_code_map)) return maybe;
9327    new_code_map->set(kEntriesStart + 0, native_context);
9328    new_code_map->set(kEntriesStart + 1, code);
9329    new_code_map->set(kEntriesStart + 2, literals);
9330  } else {
9331    // Copy old map and append one new entry.
9332    FixedArray* old_code_map = FixedArray::cast(value);
9333    ASSERT_EQ(-1, SearchOptimizedCodeMap(native_context));
9334    int old_length = old_code_map->length();
9335    int new_length = old_length + kEntryLength;
9336    MaybeObject* maybe = old_code_map->CopySize(new_length);
9337    if (!maybe->To(&new_code_map)) return maybe;
9338    new_code_map->set(old_length + 0, native_context);
9339    new_code_map->set(old_length + 1, code);
9340    new_code_map->set(old_length + 2, literals);
9341    // Zap the old map for the sake of the heap verifier.
9342    if (Heap::ShouldZapGarbage()) {
9343      Object** data = old_code_map->data_start();
9344      MemsetPointer(data, heap->the_hole_value(), old_length);
9345    }
9346  }
9347#ifdef DEBUG
9348  for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) {
9349    ASSERT(new_code_map->get(i)->IsNativeContext());
9350    ASSERT(new_code_map->get(i + 1)->IsCode());
9351    ASSERT(Code::cast(new_code_map->get(i + 1))->kind() ==
9352           Code::OPTIMIZED_FUNCTION);
9353    ASSERT(new_code_map->get(i + 2)->IsFixedArray());
9354  }
9355#endif
9356  set_optimized_code_map(new_code_map);
9357  return new_code_map;
9358}
9359
9360
9361void SharedFunctionInfo::InstallFromOptimizedCodeMap(JSFunction* function,
9362                                                     int index) {
9363  ASSERT(index > kEntriesStart);
9364  FixedArray* code_map = FixedArray::cast(optimized_code_map());
9365  if (!bound()) {
9366    FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1));
9367    ASSERT(cached_literals != NULL);
9368    function->set_literals(cached_literals);
9369  }
9370  Code* code = Code::cast(code_map->get(index));
9371  ASSERT(code != NULL);
9372  ASSERT(function->context()->native_context() == code_map->get(index - 1));
9373  function->ReplaceCode(code);
9374}
9375
9376
9377void SharedFunctionInfo::ClearOptimizedCodeMap() {
9378  FixedArray* code_map = FixedArray::cast(optimized_code_map());
9379
9380  // If the next map link slot is already used then the function was
9381  // enqueued with code flushing and we remove it now.
9382  if (!code_map->get(kNextMapIndex)->IsUndefined()) {
9383    CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
9384    flusher->EvictOptimizedCodeMap(this);
9385  }
9386
9387  ASSERT(code_map->get(kNextMapIndex)->IsUndefined());
9388  set_optimized_code_map(Smi::FromInt(0));
9389}
9390
9391
9392void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code,
9393                                                   const char* reason) {
9394  if (optimized_code_map()->IsSmi()) return;
9395
9396  int i;
9397  bool removed_entry = false;
9398  FixedArray* code_map = FixedArray::cast(optimized_code_map());
9399  for (i = kEntriesStart; i < code_map->length(); i += kEntryLength) {
9400    ASSERT(code_map->get(i)->IsNativeContext());
9401    if (Code::cast(code_map->get(i + 1)) == optimized_code) {
9402      if (FLAG_trace_opt) {
9403        PrintF("[evicting entry from optimizing code map (%s) for ", reason);
9404        ShortPrint();
9405        PrintF("]\n");
9406      }
9407      removed_entry = true;
9408      break;
9409    }
9410  }
9411  while (i < (code_map->length() - kEntryLength)) {
9412    code_map->set(i, code_map->get(i + kEntryLength));
9413    code_map->set(i + 1, code_map->get(i + 1 + kEntryLength));
9414    code_map->set(i + 2, code_map->get(i + 2 + kEntryLength));
9415    i += kEntryLength;
9416  }
9417  if (removed_entry) {
9418    // Always trim even when array is cleared because of heap verifier.
9419    RightTrimFixedArray<FROM_MUTATOR>(GetHeap(), code_map, kEntryLength);
9420    if (code_map->length() == kEntriesStart) {
9421      ClearOptimizedCodeMap();
9422    }
9423  }
9424}
9425
9426
9427void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) {
9428  FixedArray* code_map = FixedArray::cast(optimized_code_map());
9429  ASSERT(shrink_by % kEntryLength == 0);
9430  ASSERT(shrink_by <= code_map->length() - kEntriesStart);
9431  // Always trim even when array is cleared because of heap verifier.
9432  RightTrimFixedArray<FROM_GC>(GetHeap(), code_map, shrink_by);
9433  if (code_map->length() == kEntriesStart) {
9434    ClearOptimizedCodeMap();
9435  }
9436}
9437
9438
9439bool JSFunction::CompileLazy(Handle<JSFunction> function,
9440                             ClearExceptionFlag flag) {
9441  bool result = true;
9442  if (function->shared()->is_compiled()) {
9443    function->ReplaceCode(function->shared()->code());
9444  } else {
9445    ASSERT(function->shared()->allows_lazy_compilation());
9446    CompilationInfoWithZone info(function);
9447    result = CompileLazyHelper(&info, flag);
9448    ASSERT(!result || function->is_compiled());
9449  }
9450  return result;
9451}
9452
9453
9454bool JSFunction::CompileOptimized(Handle<JSFunction> function,
9455                                  BailoutId osr_ast_id,
9456                                  ClearExceptionFlag flag) {
9457  CompilationInfoWithZone info(function);
9458  info.SetOptimizing(osr_ast_id);
9459  return CompileLazyHelper(&info, flag);
9460}
9461
9462
9463bool JSFunction::EnsureCompiled(Handle<JSFunction> function,
9464                                ClearExceptionFlag flag) {
9465  return function->is_compiled() || CompileLazy(function, flag);
9466}
9467
9468
9469bool JSFunction::IsInlineable() {
9470  if (IsBuiltin()) return false;
9471  SharedFunctionInfo* shared_info = shared();
9472  // Check that the function has a script associated with it.
9473  if (!shared_info->script()->IsScript()) return false;
9474  if (shared_info->optimization_disabled()) return false;
9475  Code* code = shared_info->code();
9476  if (code->kind() == Code::OPTIMIZED_FUNCTION) return true;
9477  // If we never ran this (unlikely) then lets try to optimize it.
9478  if (code->kind() != Code::FUNCTION) return true;
9479  return code->optimizable();
9480}
9481
9482
9483void JSObject::OptimizeAsPrototype(Handle<JSObject> object) {
9484  CALL_HEAP_FUNCTION_VOID(object->GetIsolate(), object->OptimizeAsPrototype());
9485}
9486
9487
9488MaybeObject* JSObject::OptimizeAsPrototype() {
9489  if (IsGlobalObject()) return this;
9490
9491  // Make sure prototypes are fast objects and their maps have the bit set
9492  // so they remain fast.
9493  if (!HasFastProperties()) {
9494    MaybeObject* new_proto = TransformToFastProperties(0);
9495    if (new_proto->IsFailure()) return new_proto;
9496    ASSERT(new_proto == this);
9497  }
9498  return this;
9499}
9500
9501
9502static MUST_USE_RESULT MaybeObject* CacheInitialJSArrayMaps(
9503    Context* native_context, Map* initial_map) {
9504  // Replace all of the cached initial array maps in the native context with
9505  // the appropriate transitioned elements kind maps.
9506  Heap* heap = native_context->GetHeap();
9507  MaybeObject* maybe_maps =
9508      heap->AllocateFixedArrayWithHoles(kElementsKindCount, TENURED);
9509  FixedArray* maps;
9510  if (!maybe_maps->To(&maps)) return maybe_maps;
9511
9512  Map* current_map = initial_map;
9513  ElementsKind kind = current_map->elements_kind();
9514  ASSERT(kind == GetInitialFastElementsKind());
9515  maps->set(kind, current_map);
9516  for (int i = GetSequenceIndexFromFastElementsKind(kind) + 1;
9517       i < kFastElementsKindCount; ++i) {
9518    Map* new_map;
9519    ElementsKind next_kind = GetFastElementsKindFromSequenceIndex(i);
9520    if (current_map->HasElementsTransition()) {
9521      new_map = current_map->elements_transition_map();
9522      ASSERT(new_map->elements_kind() == next_kind);
9523    } else {
9524      MaybeObject* maybe_new_map =
9525          current_map->CopyAsElementsKind(next_kind, INSERT_TRANSITION);
9526      if (!maybe_new_map->To(&new_map)) return maybe_new_map;
9527    }
9528    maps->set(next_kind, new_map);
9529    current_map = new_map;
9530  }
9531  native_context->set_js_array_maps(maps);
9532  return initial_map;
9533}
9534
9535
9536Handle<Object> CacheInitialJSArrayMaps(Handle<Context> native_context,
9537                                       Handle<Map> initial_map) {
9538  CALL_HEAP_FUNCTION(native_context->GetIsolate(),
9539                     CacheInitialJSArrayMaps(*native_context, *initial_map),
9540                     Object);
9541}
9542
9543
9544void JSFunction::SetInstancePrototype(Handle<JSFunction> function,
9545                                      Handle<Object> value) {
9546  ASSERT(value->IsJSReceiver());
9547
9548  // First some logic for the map of the prototype to make sure it is in fast
9549  // mode.
9550  if (value->IsJSObject()) {
9551    JSObject::OptimizeAsPrototype(Handle<JSObject>::cast(value));
9552  }
9553
9554  // Now some logic for the maps of the objects that are created by using this
9555  // function as a constructor.
9556  if (function->has_initial_map()) {
9557    // If the function has allocated the initial map replace it with a
9558    // copy containing the new prototype.  Also complete any in-object
9559    // slack tracking that is in progress at this point because it is
9560    // still tracking the old copy.
9561    if (function->shared()->IsInobjectSlackTrackingInProgress()) {
9562      function->shared()->CompleteInobjectSlackTracking();
9563    }
9564    Handle<Map> new_map = Map::Copy(handle(function->initial_map()));
9565    new_map->set_prototype(*value);
9566
9567    // If the function is used as the global Array function, cache the
9568    // initial map (and transitioned versions) in the native context.
9569    Context* native_context = function->context()->native_context();
9570    Object* array_function = native_context->get(Context::ARRAY_FUNCTION_INDEX);
9571    if (array_function->IsJSFunction() &&
9572        *function == JSFunction::cast(array_function)) {
9573      CacheInitialJSArrayMaps(handle(native_context), new_map);
9574    }
9575
9576    function->set_initial_map(*new_map);
9577  } else {
9578    // Put the value in the initial map field until an initial map is
9579    // needed.  At that point, a new initial map is created and the
9580    // prototype is put into the initial map where it belongs.
9581    function->set_prototype_or_initial_map(*value);
9582  }
9583  function->GetHeap()->ClearInstanceofCache();
9584}
9585
9586
9587void JSFunction::SetPrototype(Handle<JSFunction> function,
9588                              Handle<Object> value) {
9589  ASSERT(function->should_have_prototype());
9590  Handle<Object> construct_prototype = value;
9591
9592  // If the value is not a JSReceiver, store the value in the map's
9593  // constructor field so it can be accessed.  Also, set the prototype
9594  // used for constructing objects to the original object prototype.
9595  // See ECMA-262 13.2.2.
9596  if (!value->IsJSReceiver()) {
9597    // Copy the map so this does not affect unrelated functions.
9598    // Remove map transitions because they point to maps with a
9599    // different prototype.
9600    Handle<Map> new_map = Map::Copy(handle(function->map()));
9601
9602    function->set_map(*new_map);
9603    new_map->set_constructor(*value);
9604    new_map->set_non_instance_prototype(true);
9605    Isolate* isolate = new_map->GetIsolate();
9606    construct_prototype = handle(
9607        isolate->context()->native_context()->initial_object_prototype(),
9608        isolate);
9609  } else {
9610    function->map()->set_non_instance_prototype(false);
9611  }
9612
9613  return SetInstancePrototype(function, construct_prototype);
9614}
9615
9616
9617void JSFunction::RemovePrototype() {
9618  Context* native_context = context()->native_context();
9619  Map* no_prototype_map = shared()->is_classic_mode()
9620      ? native_context->function_without_prototype_map()
9621      : native_context->strict_mode_function_without_prototype_map();
9622
9623  if (map() == no_prototype_map) return;
9624
9625  ASSERT(map() == (shared()->is_classic_mode()
9626                   ? native_context->function_map()
9627                   : native_context->strict_mode_function_map()));
9628
9629  set_map(no_prototype_map);
9630  set_prototype_or_initial_map(no_prototype_map->GetHeap()->the_hole_value());
9631}
9632
9633
9634void JSFunction::SetInstanceClassName(String* name) {
9635  shared()->set_instance_class_name(name);
9636}
9637
9638
9639void JSFunction::PrintName(FILE* out) {
9640  SmartArrayPointer<char> name = shared()->DebugName()->ToCString();
9641  PrintF(out, "%s", *name);
9642}
9643
9644
9645Context* JSFunction::NativeContextFromLiterals(FixedArray* literals) {
9646  return Context::cast(literals->get(JSFunction::kLiteralNativeContextIndex));
9647}
9648
9649
9650bool JSFunction::PassesHydrogenFilter() {
9651  String* name = shared()->DebugName();
9652  // The filter string is a pattern that matches functions in this way:
9653  //   "*"      all; the default
9654  //   "-"      all but the top-level function
9655  //   "-name"  all but the function "name"
9656  //   ""       only the top-level function
9657  //   "name"   only the function "name"
9658  //   "name*"  only functions starting with "name"
9659  if (*FLAG_hydrogen_filter != '*') {
9660    Vector<const char> filter = CStrVector(FLAG_hydrogen_filter);
9661    if (filter.length() == 0) return name->length() == 0;
9662    if (filter[0] != '-' && name->IsUtf8EqualTo(filter)) return true;
9663    if (filter[0] == '-' &&
9664        !name->IsUtf8EqualTo(filter.SubVector(1, filter.length()))) {
9665      return true;
9666    }
9667    if (filter[filter.length() - 1] == '*' &&
9668        name->IsUtf8EqualTo(filter.SubVector(0, filter.length() - 1), true)) {
9669      return true;
9670    }
9671    return false;
9672  }
9673
9674  return true;
9675}
9676
9677
9678MaybeObject* Oddball::Initialize(const char* to_string,
9679                                 Object* to_number,
9680                                 byte kind) {
9681  String* internalized_to_string;
9682  { MaybeObject* maybe_string =
9683      Isolate::Current()->heap()->InternalizeUtf8String(
9684          CStrVector(to_string));
9685    if (!maybe_string->To(&internalized_to_string)) return maybe_string;
9686  }
9687  set_to_string(internalized_to_string);
9688  set_to_number(to_number);
9689  set_kind(kind);
9690  return this;
9691}
9692
9693
9694String* SharedFunctionInfo::DebugName() {
9695  Object* n = name();
9696  if (!n->IsString() || String::cast(n)->length() == 0) return inferred_name();
9697  return String::cast(n);
9698}
9699
9700
9701bool SharedFunctionInfo::HasSourceCode() {
9702  return !script()->IsUndefined() &&
9703         !reinterpret_cast<Script*>(script())->source()->IsUndefined();
9704}
9705
9706
9707Handle<Object> SharedFunctionInfo::GetSourceCode() {
9708  if (!HasSourceCode()) return GetIsolate()->factory()->undefined_value();
9709  Handle<String> source(String::cast(Script::cast(script())->source()));
9710  return SubString(source, start_position(), end_position());
9711}
9712
9713
9714int SharedFunctionInfo::SourceSize() {
9715  return end_position() - start_position();
9716}
9717
9718
9719int SharedFunctionInfo::CalculateInstanceSize() {
9720  int instance_size =
9721      JSObject::kHeaderSize +
9722      expected_nof_properties() * kPointerSize;
9723  if (instance_size > JSObject::kMaxInstanceSize) {
9724    instance_size = JSObject::kMaxInstanceSize;
9725  }
9726  return instance_size;
9727}
9728
9729
9730int SharedFunctionInfo::CalculateInObjectProperties() {
9731  return (CalculateInstanceSize() - JSObject::kHeaderSize) / kPointerSize;
9732}
9733
9734
9735// Support function for printing the source code to a StringStream
9736// without any allocation in the heap.
9737void SharedFunctionInfo::SourceCodePrint(StringStream* accumulator,
9738                                         int max_length) {
9739  // For some native functions there is no source.
9740  if (!HasSourceCode()) {
9741    accumulator->Add("<No Source>");
9742    return;
9743  }
9744
9745  // Get the source for the script which this function came from.
9746  // Don't use String::cast because we don't want more assertion errors while
9747  // we are already creating a stack dump.
9748  String* script_source =
9749      reinterpret_cast<String*>(Script::cast(script())->source());
9750
9751  if (!script_source->LooksValid()) {
9752    accumulator->Add("<Invalid Source>");
9753    return;
9754  }
9755
9756  if (!is_toplevel()) {
9757    accumulator->Add("function ");
9758    Object* name = this->name();
9759    if (name->IsString() && String::cast(name)->length() > 0) {
9760      accumulator->PrintName(name);
9761    }
9762  }
9763
9764  int len = end_position() - start_position();
9765  if (len <= max_length || max_length < 0) {
9766    accumulator->Put(script_source, start_position(), end_position());
9767  } else {
9768    accumulator->Put(script_source,
9769                     start_position(),
9770                     start_position() + max_length);
9771    accumulator->Add("...\n");
9772  }
9773}
9774
9775
9776static bool IsCodeEquivalent(Code* code, Code* recompiled) {
9777  if (code->instruction_size() != recompiled->instruction_size()) return false;
9778  ByteArray* code_relocation = code->relocation_info();
9779  ByteArray* recompiled_relocation = recompiled->relocation_info();
9780  int length = code_relocation->length();
9781  if (length != recompiled_relocation->length()) return false;
9782  int compare = memcmp(code_relocation->GetDataStartAddress(),
9783                       recompiled_relocation->GetDataStartAddress(),
9784                       length);
9785  return compare == 0;
9786}
9787
9788
9789void SharedFunctionInfo::EnableDeoptimizationSupport(Code* recompiled) {
9790  ASSERT(!has_deoptimization_support());
9791  DisallowHeapAllocation no_allocation;
9792  Code* code = this->code();
9793  if (IsCodeEquivalent(code, recompiled)) {
9794    // Copy the deoptimization data from the recompiled code.
9795    code->set_deoptimization_data(recompiled->deoptimization_data());
9796    code->set_has_deoptimization_support(true);
9797  } else {
9798    // TODO(3025757): In case the recompiled isn't equivalent to the
9799    // old code, we have to replace it. We should try to avoid this
9800    // altogether because it flushes valuable type feedback by
9801    // effectively resetting all IC state.
9802    ReplaceCode(recompiled);
9803  }
9804  ASSERT(has_deoptimization_support());
9805}
9806
9807
9808void SharedFunctionInfo::DisableOptimization(BailoutReason reason) {
9809  // Disable optimization for the shared function info and mark the
9810  // code as non-optimizable. The marker on the shared function info
9811  // is there because we flush non-optimized code thereby loosing the
9812  // non-optimizable information for the code. When the code is
9813  // regenerated and set on the shared function info it is marked as
9814  // non-optimizable if optimization is disabled for the shared
9815  // function info.
9816  set_optimization_disabled(true);
9817  // Code should be the lazy compilation stub or else unoptimized.  If the
9818  // latter, disable optimization for the code too.
9819  ASSERT(code()->kind() == Code::FUNCTION || code()->kind() == Code::BUILTIN);
9820  if (code()->kind() == Code::FUNCTION) {
9821    code()->set_optimizable(false);
9822  }
9823  if (FLAG_trace_opt) {
9824    PrintF("[disabled optimization for ");
9825    ShortPrint();
9826    PrintF(", reason: %s]\n", GetBailoutReason(reason));
9827  }
9828}
9829
9830
9831bool SharedFunctionInfo::VerifyBailoutId(BailoutId id) {
9832  ASSERT(!id.IsNone());
9833  Code* unoptimized = code();
9834  DeoptimizationOutputData* data =
9835      DeoptimizationOutputData::cast(unoptimized->deoptimization_data());
9836  unsigned ignore = Deoptimizer::GetOutputInfo(data, id, this);
9837  USE(ignore);
9838  return true;  // Return true if there was no ASSERT.
9839}
9840
9841
9842void SharedFunctionInfo::StartInobjectSlackTracking(Map* map) {
9843  ASSERT(!IsInobjectSlackTrackingInProgress());
9844
9845  if (!FLAG_clever_optimizations) return;
9846
9847  // Only initiate the tracking the first time.
9848  if (live_objects_may_exist()) return;
9849  set_live_objects_may_exist(true);
9850
9851  // No tracking during the snapshot construction phase.
9852  if (Serializer::enabled()) return;
9853
9854  if (map->unused_property_fields() == 0) return;
9855
9856  // Nonzero counter is a leftover from the previous attempt interrupted
9857  // by GC, keep it.
9858  if (construction_count() == 0) {
9859    set_construction_count(kGenerousAllocationCount);
9860  }
9861  set_initial_map(map);
9862  Builtins* builtins = map->GetHeap()->isolate()->builtins();
9863  ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubGeneric),
9864            construct_stub());
9865  set_construct_stub(builtins->builtin(Builtins::kJSConstructStubCountdown));
9866}
9867
9868
9869// Called from GC, hence reinterpret_cast and unchecked accessors.
9870void SharedFunctionInfo::DetachInitialMap() {
9871  Map* map = reinterpret_cast<Map*>(initial_map());
9872
9873  // Make the map remember to restore the link if it survives the GC.
9874  map->set_bit_field2(
9875      map->bit_field2() | (1 << Map::kAttachedToSharedFunctionInfo));
9876
9877  // Undo state changes made by StartInobjectTracking (except the
9878  // construction_count). This way if the initial map does not survive the GC
9879  // then StartInobjectTracking will be called again the next time the
9880  // constructor is called. The countdown will continue and (possibly after
9881  // several more GCs) CompleteInobjectSlackTracking will eventually be called.
9882  Heap* heap = map->GetHeap();
9883  set_initial_map(heap->undefined_value());
9884  Builtins* builtins = heap->isolate()->builtins();
9885  ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubCountdown),
9886            *RawField(this, kConstructStubOffset));
9887  set_construct_stub(builtins->builtin(Builtins::kJSConstructStubGeneric));
9888  // It is safe to clear the flag: it will be set again if the map is live.
9889  set_live_objects_may_exist(false);
9890}
9891
9892
9893// Called from GC, hence reinterpret_cast and unchecked accessors.
9894void SharedFunctionInfo::AttachInitialMap(Map* map) {
9895  map->set_bit_field2(
9896      map->bit_field2() & ~(1 << Map::kAttachedToSharedFunctionInfo));
9897
9898  // Resume inobject slack tracking.
9899  set_initial_map(map);
9900  Builtins* builtins = map->GetHeap()->isolate()->builtins();
9901  ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubGeneric),
9902            *RawField(this, kConstructStubOffset));
9903  set_construct_stub(builtins->builtin(Builtins::kJSConstructStubCountdown));
9904  // The map survived the gc, so there may be objects referencing it.
9905  set_live_objects_may_exist(true);
9906}
9907
9908
9909void SharedFunctionInfo::ResetForNewContext(int new_ic_age) {
9910  code()->ClearInlineCaches();
9911  set_ic_age(new_ic_age);
9912  if (code()->kind() == Code::FUNCTION) {
9913    code()->set_profiler_ticks(0);
9914    if (optimization_disabled() &&
9915        opt_count() >= FLAG_max_opt_count) {
9916      // Re-enable optimizations if they were disabled due to opt_count limit.
9917      set_optimization_disabled(false);
9918      code()->set_optimizable(true);
9919    }
9920    set_opt_count(0);
9921    set_deopt_count(0);
9922  }
9923}
9924
9925
9926static void GetMinInobjectSlack(Map* map, void* data) {
9927  int slack = map->unused_property_fields();
9928  if (*reinterpret_cast<int*>(data) > slack) {
9929    *reinterpret_cast<int*>(data) = slack;
9930  }
9931}
9932
9933
9934static void ShrinkInstanceSize(Map* map, void* data) {
9935  int slack = *reinterpret_cast<int*>(data);
9936  map->set_inobject_properties(map->inobject_properties() - slack);
9937  map->set_unused_property_fields(map->unused_property_fields() - slack);
9938  map->set_instance_size(map->instance_size() - slack * kPointerSize);
9939
9940  // Visitor id might depend on the instance size, recalculate it.
9941  map->set_visitor_id(StaticVisitorBase::GetVisitorId(map));
9942}
9943
9944
9945void SharedFunctionInfo::CompleteInobjectSlackTracking() {
9946  ASSERT(live_objects_may_exist() && IsInobjectSlackTrackingInProgress());
9947  Map* map = Map::cast(initial_map());
9948
9949  Heap* heap = map->GetHeap();
9950  set_initial_map(heap->undefined_value());
9951  Builtins* builtins = heap->isolate()->builtins();
9952  ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubCountdown),
9953            construct_stub());
9954  set_construct_stub(builtins->builtin(Builtins::kJSConstructStubGeneric));
9955
9956  int slack = map->unused_property_fields();
9957  map->TraverseTransitionTree(&GetMinInobjectSlack, &slack);
9958  if (slack != 0) {
9959    // Resize the initial map and all maps in its transition tree.
9960    map->TraverseTransitionTree(&ShrinkInstanceSize, &slack);
9961
9962    // Give the correct expected_nof_properties to initial maps created later.
9963    ASSERT(expected_nof_properties() >= slack);
9964    set_expected_nof_properties(expected_nof_properties() - slack);
9965  }
9966}
9967
9968
9969int SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context) {
9970  ASSERT(native_context->IsNativeContext());
9971  if (!FLAG_cache_optimized_code) return -1;
9972  Object* value = optimized_code_map();
9973  if (!value->IsSmi()) {
9974    FixedArray* optimized_code_map = FixedArray::cast(value);
9975    int length = optimized_code_map->length();
9976    for (int i = kEntriesStart; i < length; i += kEntryLength) {
9977      if (optimized_code_map->get(i) == native_context) {
9978        return i + 1;
9979      }
9980    }
9981    if (FLAG_trace_opt) {
9982      PrintF("[didn't find optimized code in optimized code map for ");
9983      ShortPrint();
9984      PrintF("]\n");
9985    }
9986  }
9987  return -1;
9988}
9989
9990
9991#define DECLARE_TAG(ignore1, name, ignore2) name,
9992const char* const VisitorSynchronization::kTags[
9993    VisitorSynchronization::kNumberOfSyncTags] = {
9994  VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG)
9995};
9996#undef DECLARE_TAG
9997
9998
9999#define DECLARE_TAG(ignore1, ignore2, name) name,
10000const char* const VisitorSynchronization::kTagNames[
10001    VisitorSynchronization::kNumberOfSyncTags] = {
10002  VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG)
10003};
10004#undef DECLARE_TAG
10005
10006
10007void ObjectVisitor::VisitCodeTarget(RelocInfo* rinfo) {
10008  ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
10009  Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
10010  Object* old_target = target;
10011  VisitPointer(&target);
10012  CHECK_EQ(target, old_target);  // VisitPointer doesn't change Code* *target.
10013}
10014
10015
10016void ObjectVisitor::VisitCodeAgeSequence(RelocInfo* rinfo) {
10017  ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
10018  Object* stub = rinfo->code_age_stub();
10019  if (stub) {
10020    VisitPointer(&stub);
10021  }
10022}
10023
10024
10025void ObjectVisitor::VisitCodeEntry(Address entry_address) {
10026  Object* code = Code::GetObjectFromEntryAddress(entry_address);
10027  Object* old_code = code;
10028  VisitPointer(&code);
10029  if (code != old_code) {
10030    Memory::Address_at(entry_address) = reinterpret_cast<Code*>(code)->entry();
10031  }
10032}
10033
10034
10035void ObjectVisitor::VisitCell(RelocInfo* rinfo) {
10036  ASSERT(rinfo->rmode() == RelocInfo::CELL);
10037  Object* cell = rinfo->target_cell();
10038  Object* old_cell = cell;
10039  VisitPointer(&cell);
10040  if (cell != old_cell) {
10041    rinfo->set_target_cell(reinterpret_cast<Cell*>(cell));
10042  }
10043}
10044
10045
10046void ObjectVisitor::VisitDebugTarget(RelocInfo* rinfo) {
10047  ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
10048          rinfo->IsPatchedReturnSequence()) ||
10049         (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
10050          rinfo->IsPatchedDebugBreakSlotSequence()));
10051  Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
10052  Object* old_target = target;
10053  VisitPointer(&target);
10054  CHECK_EQ(target, old_target);  // VisitPointer doesn't change Code* *target.
10055}
10056
10057
10058void ObjectVisitor::VisitEmbeddedPointer(RelocInfo* rinfo) {
10059  ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
10060  VisitPointer(rinfo->target_object_address());
10061}
10062
10063
10064void ObjectVisitor::VisitExternalReference(RelocInfo* rinfo) {
10065  Address* p = rinfo->target_reference_address();
10066  VisitExternalReferences(p, p + 1);
10067}
10068
10069
10070void Code::InvalidateRelocation() {
10071  set_relocation_info(GetHeap()->empty_byte_array());
10072}
10073
10074
10075void Code::Relocate(intptr_t delta) {
10076  for (RelocIterator it(this, RelocInfo::kApplyMask); !it.done(); it.next()) {
10077    it.rinfo()->apply(delta);
10078  }
10079  CPU::FlushICache(instruction_start(), instruction_size());
10080}
10081
10082
10083void Code::CopyFrom(const CodeDesc& desc) {
10084  ASSERT(Marking::Color(this) == Marking::WHITE_OBJECT);
10085
10086  // copy code
10087  CopyBytes(instruction_start(), desc.buffer,
10088            static_cast<size_t>(desc.instr_size));
10089
10090  // copy reloc info
10091  CopyBytes(relocation_start(),
10092            desc.buffer + desc.buffer_size - desc.reloc_size,
10093            static_cast<size_t>(desc.reloc_size));
10094
10095  // unbox handles and relocate
10096  intptr_t delta = instruction_start() - desc.buffer;
10097  int mode_mask = RelocInfo::kCodeTargetMask |
10098                  RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
10099                  RelocInfo::ModeMask(RelocInfo::CELL) |
10100                  RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) |
10101                  RelocInfo::kApplyMask;
10102  // Needed to find target_object and runtime_entry on X64
10103  Assembler* origin = desc.origin;
10104  AllowDeferredHandleDereference embedding_raw_address;
10105  for (RelocIterator it(this, mode_mask); !it.done(); it.next()) {
10106    RelocInfo::Mode mode = it.rinfo()->rmode();
10107    if (mode == RelocInfo::EMBEDDED_OBJECT) {
10108      Handle<Object> p = it.rinfo()->target_object_handle(origin);
10109      it.rinfo()->set_target_object(*p, SKIP_WRITE_BARRIER);
10110    } else if (mode == RelocInfo::CELL) {
10111      Handle<Cell> cell  = it.rinfo()->target_cell_handle();
10112      it.rinfo()->set_target_cell(*cell, SKIP_WRITE_BARRIER);
10113    } else if (RelocInfo::IsCodeTarget(mode)) {
10114      // rewrite code handles in inline cache targets to direct
10115      // pointers to the first instruction in the code object
10116      Handle<Object> p = it.rinfo()->target_object_handle(origin);
10117      Code* code = Code::cast(*p);
10118      it.rinfo()->set_target_address(code->instruction_start(),
10119                                     SKIP_WRITE_BARRIER);
10120    } else if (RelocInfo::IsRuntimeEntry(mode)) {
10121      Address p = it.rinfo()->target_runtime_entry(origin);
10122      it.rinfo()->set_target_runtime_entry(p, SKIP_WRITE_BARRIER);
10123    } else {
10124      it.rinfo()->apply(delta);
10125    }
10126  }
10127  CPU::FlushICache(instruction_start(), instruction_size());
10128}
10129
10130
10131// Locate the source position which is closest to the address in the code. This
10132// is using the source position information embedded in the relocation info.
10133// The position returned is relative to the beginning of the script where the
10134// source for this function is found.
10135int Code::SourcePosition(Address pc) {
10136  int distance = kMaxInt;
10137  int position = RelocInfo::kNoPosition;  // Initially no position found.
10138  // Run through all the relocation info to find the best matching source
10139  // position. All the code needs to be considered as the sequence of the
10140  // instructions in the code does not necessarily follow the same order as the
10141  // source.
10142  RelocIterator it(this, RelocInfo::kPositionMask);
10143  while (!it.done()) {
10144    // Only look at positions after the current pc.
10145    if (it.rinfo()->pc() < pc) {
10146      // Get position and distance.
10147
10148      int dist = static_cast<int>(pc - it.rinfo()->pc());
10149      int pos = static_cast<int>(it.rinfo()->data());
10150      // If this position is closer than the current candidate or if it has the
10151      // same distance as the current candidate and the position is higher then
10152      // this position is the new candidate.
10153      if ((dist < distance) ||
10154          (dist == distance && pos > position)) {
10155        position = pos;
10156        distance = dist;
10157      }
10158    }
10159    it.next();
10160  }
10161  return position;
10162}
10163
10164
10165// Same as Code::SourcePosition above except it only looks for statement
10166// positions.
10167int Code::SourceStatementPosition(Address pc) {
10168  // First find the position as close as possible using all position
10169  // information.
10170  int position = SourcePosition(pc);
10171  // Now find the closest statement position before the position.
10172  int statement_position = 0;
10173  RelocIterator it(this, RelocInfo::kPositionMask);
10174  while (!it.done()) {
10175    if (RelocInfo::IsStatementPosition(it.rinfo()->rmode())) {
10176      int p = static_cast<int>(it.rinfo()->data());
10177      if (statement_position < p && p <= position) {
10178        statement_position = p;
10179      }
10180    }
10181    it.next();
10182  }
10183  return statement_position;
10184}
10185
10186
10187SafepointEntry Code::GetSafepointEntry(Address pc) {
10188  SafepointTable table(this);
10189  return table.FindEntry(pc);
10190}
10191
10192
10193Object* Code::FindNthObject(int n, Map* match_map) {
10194  ASSERT(is_inline_cache_stub());
10195  DisallowHeapAllocation no_allocation;
10196  int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10197  for (RelocIterator it(this, mask); !it.done(); it.next()) {
10198    RelocInfo* info = it.rinfo();
10199    Object* object = info->target_object();
10200    if (object->IsHeapObject()) {
10201      if (HeapObject::cast(object)->map() == match_map) {
10202        if (--n == 0) return object;
10203      }
10204    }
10205  }
10206  return NULL;
10207}
10208
10209
10210Map* Code::FindFirstMap() {
10211  Object* result = FindNthObject(1, GetHeap()->meta_map());
10212  return (result != NULL) ? Map::cast(result) : NULL;
10213}
10214
10215
10216void Code::ReplaceNthObject(int n,
10217                            Map* match_map,
10218                            Object* replace_with) {
10219  ASSERT(is_inline_cache_stub());
10220  DisallowHeapAllocation no_allocation;
10221  int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10222  for (RelocIterator it(this, mask); !it.done(); it.next()) {
10223    RelocInfo* info = it.rinfo();
10224    Object* object = info->target_object();
10225    if (object->IsHeapObject()) {
10226      if (HeapObject::cast(object)->map() == match_map) {
10227        if (--n == 0) {
10228          info->set_target_object(replace_with);
10229          return;
10230        }
10231      }
10232    }
10233  }
10234  UNREACHABLE();
10235}
10236
10237
10238void Code::FindAllMaps(MapHandleList* maps) {
10239  ASSERT(is_inline_cache_stub());
10240  DisallowHeapAllocation no_allocation;
10241  int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10242  for (RelocIterator it(this, mask); !it.done(); it.next()) {
10243    RelocInfo* info = it.rinfo();
10244    Object* object = info->target_object();
10245    if (object->IsMap()) maps->Add(Handle<Map>(Map::cast(object)));
10246  }
10247}
10248
10249
10250void Code::ReplaceFirstMap(Map* replace_with) {
10251  ReplaceNthObject(1, GetHeap()->meta_map(), replace_with);
10252}
10253
10254
10255Code* Code::FindFirstCode() {
10256  ASSERT(is_inline_cache_stub());
10257  DisallowHeapAllocation no_allocation;
10258  int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET);
10259  for (RelocIterator it(this, mask); !it.done(); it.next()) {
10260    RelocInfo* info = it.rinfo();
10261    return Code::GetCodeFromTargetAddress(info->target_address());
10262  }
10263  return NULL;
10264}
10265
10266
10267void Code::FindAllCode(CodeHandleList* code_list, int length) {
10268  ASSERT(is_inline_cache_stub());
10269  DisallowHeapAllocation no_allocation;
10270  int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET);
10271  int i = 0;
10272  for (RelocIterator it(this, mask); !it.done(); it.next()) {
10273    if (i++ == length) return;
10274    RelocInfo* info = it.rinfo();
10275    Code* code = Code::GetCodeFromTargetAddress(info->target_address());
10276    ASSERT(code->kind() == Code::STUB);
10277    code_list->Add(Handle<Code>(code));
10278  }
10279  UNREACHABLE();
10280}
10281
10282
10283Name* Code::FindFirstName() {
10284  ASSERT(is_inline_cache_stub());
10285  DisallowHeapAllocation no_allocation;
10286  int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10287  for (RelocIterator it(this, mask); !it.done(); it.next()) {
10288    RelocInfo* info = it.rinfo();
10289    Object* object = info->target_object();
10290    if (object->IsName()) return Name::cast(object);
10291  }
10292  return NULL;
10293}
10294
10295
10296void Code::ReplaceNthCell(int n, Cell* replace_with) {
10297  ASSERT(is_inline_cache_stub());
10298  DisallowHeapAllocation no_allocation;
10299  int mask = RelocInfo::ModeMask(RelocInfo::CELL);
10300  for (RelocIterator it(this, mask); !it.done(); it.next()) {
10301    RelocInfo* info = it.rinfo();
10302    if (--n == 0) {
10303      info->set_target_cell(replace_with);
10304      return;
10305    }
10306  }
10307  UNREACHABLE();
10308}
10309
10310
10311void Code::ClearInlineCaches() {
10312  int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
10313             RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
10314             RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID) |
10315             RelocInfo::ModeMask(RelocInfo::CODE_TARGET_CONTEXT);
10316  for (RelocIterator it(this, mask); !it.done(); it.next()) {
10317    RelocInfo* info = it.rinfo();
10318    Code* target(Code::GetCodeFromTargetAddress(info->target_address()));
10319    if (target->is_inline_cache_stub()) {
10320      IC::Clear(info->pc());
10321    }
10322  }
10323}
10324
10325
10326void Code::ClearTypeFeedbackCells(Heap* heap) {
10327  if (kind() != FUNCTION) return;
10328  Object* raw_info = type_feedback_info();
10329  if (raw_info->IsTypeFeedbackInfo()) {
10330    TypeFeedbackCells* type_feedback_cells =
10331        TypeFeedbackInfo::cast(raw_info)->type_feedback_cells();
10332    for (int i = 0; i < type_feedback_cells->CellCount(); i++) {
10333      Cell* cell = type_feedback_cells->GetCell(i);
10334      // Don't clear AllocationSites
10335      Object* value = cell->value();
10336      if (value == NULL || !value->IsAllocationSite()) {
10337        cell->set_value(TypeFeedbackCells::RawUninitializedSentinel(heap));
10338      }
10339    }
10340  }
10341}
10342
10343
10344bool Code::allowed_in_shared_map_code_cache() {
10345  return is_keyed_load_stub() || is_keyed_store_stub() ||
10346      (is_compare_ic_stub() &&
10347       ICCompareStub::CompareState(stub_info()) == CompareIC::KNOWN_OBJECT);
10348}
10349
10350
10351void Code::MakeCodeAgeSequenceYoung(byte* sequence) {
10352  PatchPlatformCodeAge(sequence, kNoAge, NO_MARKING_PARITY);
10353}
10354
10355
10356void Code::MakeOlder(MarkingParity current_parity) {
10357  byte* sequence = FindCodeAgeSequence();
10358  if (sequence != NULL) {
10359    Age age;
10360    MarkingParity code_parity;
10361    GetCodeAgeAndParity(sequence, &age, &code_parity);
10362    if (age != kLastCodeAge && code_parity != current_parity) {
10363      PatchPlatformCodeAge(sequence, static_cast<Age>(age + 1),
10364                           current_parity);
10365    }
10366  }
10367}
10368
10369
10370bool Code::IsOld() {
10371  byte* sequence = FindCodeAgeSequence();
10372  if (sequence == NULL) return false;
10373  Age age;
10374  MarkingParity parity;
10375  GetCodeAgeAndParity(sequence, &age, &parity);
10376  return age >= kSexagenarianCodeAge;
10377}
10378
10379
10380byte* Code::FindCodeAgeSequence() {
10381  return FLAG_age_code &&
10382      prologue_offset() != kPrologueOffsetNotSet &&
10383      (kind() == OPTIMIZED_FUNCTION ||
10384       (kind() == FUNCTION && !has_debug_break_slots()))
10385      ? instruction_start() + prologue_offset()
10386      : NULL;
10387}
10388
10389
10390int Code::GetAge() {
10391  byte* sequence = FindCodeAgeSequence();
10392  if (sequence == NULL) {
10393    return Code::kNoAge;
10394  }
10395  Age age;
10396  MarkingParity parity;
10397  GetCodeAgeAndParity(sequence, &age, &parity);
10398  return age;
10399}
10400
10401
10402void Code::GetCodeAgeAndParity(Code* code, Age* age,
10403                               MarkingParity* parity) {
10404  Isolate* isolate = Isolate::Current();
10405  Builtins* builtins = isolate->builtins();
10406  Code* stub = NULL;
10407#define HANDLE_CODE_AGE(AGE)                                            \
10408  stub = *builtins->Make##AGE##CodeYoungAgainEvenMarking();             \
10409  if (code == stub) {                                                   \
10410    *age = k##AGE##CodeAge;                                             \
10411    *parity = EVEN_MARKING_PARITY;                                      \
10412    return;                                                             \
10413  }                                                                     \
10414  stub = *builtins->Make##AGE##CodeYoungAgainOddMarking();              \
10415  if (code == stub) {                                                   \
10416    *age = k##AGE##CodeAge;                                             \
10417    *parity = ODD_MARKING_PARITY;                                       \
10418    return;                                                             \
10419  }
10420  CODE_AGE_LIST(HANDLE_CODE_AGE)
10421#undef HANDLE_CODE_AGE
10422  UNREACHABLE();
10423}
10424
10425
10426Code* Code::GetCodeAgeStub(Age age, MarkingParity parity) {
10427  Isolate* isolate = Isolate::Current();
10428  Builtins* builtins = isolate->builtins();
10429  switch (age) {
10430#define HANDLE_CODE_AGE(AGE)                                            \
10431    case k##AGE##CodeAge: {                                             \
10432      Code* stub = parity == EVEN_MARKING_PARITY                        \
10433          ? *builtins->Make##AGE##CodeYoungAgainEvenMarking()           \
10434          : *builtins->Make##AGE##CodeYoungAgainOddMarking();           \
10435      return stub;                                                      \
10436    }
10437    CODE_AGE_LIST(HANDLE_CODE_AGE)
10438#undef HANDLE_CODE_AGE
10439    default:
10440      UNREACHABLE();
10441      break;
10442  }
10443  return NULL;
10444}
10445
10446
10447void Code::PrintDeoptLocation(int bailout_id) {
10448  const char* last_comment = NULL;
10449  int mask = RelocInfo::ModeMask(RelocInfo::COMMENT)
10450      | RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
10451  for (RelocIterator it(this, mask); !it.done(); it.next()) {
10452    RelocInfo* info = it.rinfo();
10453    if (info->rmode() == RelocInfo::COMMENT) {
10454      last_comment = reinterpret_cast<const char*>(info->data());
10455    } else if (last_comment != NULL) {
10456      if ((bailout_id == Deoptimizer::GetDeoptimizationId(
10457              GetIsolate(), info->target_address(), Deoptimizer::EAGER)) ||
10458          (bailout_id == Deoptimizer::GetDeoptimizationId(
10459              GetIsolate(), info->target_address(), Deoptimizer::SOFT))) {
10460        CHECK(RelocInfo::IsRuntimeEntry(info->rmode()));
10461        PrintF("            %s\n", last_comment);
10462        return;
10463      }
10464    }
10465  }
10466}
10467
10468
10469bool Code::CanDeoptAt(Address pc) {
10470  DeoptimizationInputData* deopt_data =
10471      DeoptimizationInputData::cast(deoptimization_data());
10472  Address code_start_address = instruction_start();
10473  for (int i = 0; i < deopt_data->DeoptCount(); i++) {
10474    if (deopt_data->Pc(i)->value() == -1) continue;
10475    Address address = code_start_address + deopt_data->Pc(i)->value();
10476    if (address == pc) return true;
10477  }
10478  return false;
10479}
10480
10481
10482// Identify kind of code.
10483const char* Code::Kind2String(Kind kind) {
10484  switch (kind) {
10485#define CASE(name) case name: return #name;
10486    CODE_KIND_LIST(CASE)
10487#undef CASE
10488    case NUMBER_OF_KINDS: break;
10489  }
10490  UNREACHABLE();
10491  return NULL;
10492}
10493
10494
10495#ifdef ENABLE_DISASSEMBLER
10496
10497void DeoptimizationInputData::DeoptimizationInputDataPrint(FILE* out) {
10498  disasm::NameConverter converter;
10499  int deopt_count = DeoptCount();
10500  PrintF(out, "Deoptimization Input Data (deopt points = %d)\n", deopt_count);
10501  if (0 == deopt_count) return;
10502
10503  PrintF(out, "%6s  %6s  %6s %6s %12s\n", "index", "ast id", "argc", "pc",
10504         FLAG_print_code_verbose ? "commands" : "");
10505  for (int i = 0; i < deopt_count; i++) {
10506    PrintF(out, "%6d  %6d  %6d %6d",
10507           i,
10508           AstId(i).ToInt(),
10509           ArgumentsStackHeight(i)->value(),
10510           Pc(i)->value());
10511
10512    if (!FLAG_print_code_verbose) {
10513      PrintF(out, "\n");
10514      continue;
10515    }
10516    // Print details of the frame translation.
10517    int translation_index = TranslationIndex(i)->value();
10518    TranslationIterator iterator(TranslationByteArray(), translation_index);
10519    Translation::Opcode opcode =
10520        static_cast<Translation::Opcode>(iterator.Next());
10521    ASSERT(Translation::BEGIN == opcode);
10522    int frame_count = iterator.Next();
10523    int jsframe_count = iterator.Next();
10524    PrintF(out, "  %s {frame count=%d, js frame count=%d}\n",
10525           Translation::StringFor(opcode),
10526           frame_count,
10527           jsframe_count);
10528
10529    while (iterator.HasNext() &&
10530           Translation::BEGIN !=
10531           (opcode = static_cast<Translation::Opcode>(iterator.Next()))) {
10532      PrintF(out, "%24s    %s ", "", Translation::StringFor(opcode));
10533
10534      switch (opcode) {
10535        case Translation::BEGIN:
10536          UNREACHABLE();
10537          break;
10538
10539        case Translation::JS_FRAME: {
10540          int ast_id = iterator.Next();
10541          int function_id = iterator.Next();
10542          unsigned height = iterator.Next();
10543          PrintF(out, "{ast_id=%d, function=", ast_id);
10544          if (function_id != Translation::kSelfLiteralId) {
10545            Object* function = LiteralArray()->get(function_id);
10546            JSFunction::cast(function)->PrintName(out);
10547          } else {
10548            PrintF(out, "<self>");
10549          }
10550          PrintF(out, ", height=%u}", height);
10551          break;
10552        }
10553
10554        case Translation::COMPILED_STUB_FRAME: {
10555          Code::Kind stub_kind = static_cast<Code::Kind>(iterator.Next());
10556          PrintF(out, "{kind=%d}", stub_kind);
10557          break;
10558        }
10559
10560        case Translation::ARGUMENTS_ADAPTOR_FRAME:
10561        case Translation::CONSTRUCT_STUB_FRAME: {
10562          int function_id = iterator.Next();
10563          JSFunction* function =
10564              JSFunction::cast(LiteralArray()->get(function_id));
10565          unsigned height = iterator.Next();
10566          PrintF(out, "{function=");
10567          function->PrintName(out);
10568          PrintF(out, ", height=%u}", height);
10569          break;
10570        }
10571
10572        case Translation::GETTER_STUB_FRAME:
10573        case Translation::SETTER_STUB_FRAME: {
10574          int function_id = iterator.Next();
10575          JSFunction* function =
10576              JSFunction::cast(LiteralArray()->get(function_id));
10577          PrintF(out, "{function=");
10578          function->PrintName(out);
10579          PrintF(out, "}");
10580          break;
10581        }
10582
10583        case Translation::REGISTER: {
10584          int reg_code = iterator.Next();
10585            PrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code));
10586          break;
10587        }
10588
10589        case Translation::INT32_REGISTER: {
10590          int reg_code = iterator.Next();
10591          PrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code));
10592          break;
10593        }
10594
10595        case Translation::UINT32_REGISTER: {
10596          int reg_code = iterator.Next();
10597          PrintF(out, "{input=%s (unsigned)}",
10598                 converter.NameOfCPURegister(reg_code));
10599          break;
10600        }
10601
10602        case Translation::DOUBLE_REGISTER: {
10603          int reg_code = iterator.Next();
10604          PrintF(out, "{input=%s}",
10605                 DoubleRegister::AllocationIndexToString(reg_code));
10606          break;
10607        }
10608
10609        case Translation::STACK_SLOT: {
10610          int input_slot_index = iterator.Next();
10611          PrintF(out, "{input=%d}", input_slot_index);
10612          break;
10613        }
10614
10615        case Translation::INT32_STACK_SLOT: {
10616          int input_slot_index = iterator.Next();
10617          PrintF(out, "{input=%d}", input_slot_index);
10618          break;
10619        }
10620
10621        case Translation::UINT32_STACK_SLOT: {
10622          int input_slot_index = iterator.Next();
10623          PrintF(out, "{input=%d (unsigned)}", input_slot_index);
10624          break;
10625        }
10626
10627        case Translation::DOUBLE_STACK_SLOT: {
10628          int input_slot_index = iterator.Next();
10629          PrintF(out, "{input=%d}", input_slot_index);
10630          break;
10631        }
10632
10633        case Translation::LITERAL: {
10634          unsigned literal_index = iterator.Next();
10635          PrintF(out, "{literal_id=%u}", literal_index);
10636          break;
10637        }
10638
10639        case Translation::DUPLICATED_OBJECT: {
10640          int object_index = iterator.Next();
10641          PrintF(out, "{object_index=%d}", object_index);
10642          break;
10643        }
10644
10645        case Translation::ARGUMENTS_OBJECT:
10646        case Translation::CAPTURED_OBJECT: {
10647          int args_length = iterator.Next();
10648          PrintF(out, "{length=%d}", args_length);
10649          break;
10650        }
10651      }
10652      PrintF(out, "\n");
10653    }
10654  }
10655}
10656
10657
10658void DeoptimizationOutputData::DeoptimizationOutputDataPrint(FILE* out) {
10659  PrintF(out, "Deoptimization Output Data (deopt points = %d)\n",
10660         this->DeoptPoints());
10661  if (this->DeoptPoints() == 0) return;
10662
10663  PrintF("%6s  %8s  %s\n", "ast id", "pc", "state");
10664  for (int i = 0; i < this->DeoptPoints(); i++) {
10665    int pc_and_state = this->PcAndState(i)->value();
10666    PrintF("%6d  %8d  %s\n",
10667           this->AstId(i).ToInt(),
10668           FullCodeGenerator::PcField::decode(pc_and_state),
10669           FullCodeGenerator::State2String(
10670               FullCodeGenerator::StateField::decode(pc_and_state)));
10671  }
10672}
10673
10674
10675const char* Code::ICState2String(InlineCacheState state) {
10676  switch (state) {
10677    case UNINITIALIZED: return "UNINITIALIZED";
10678    case PREMONOMORPHIC: return "PREMONOMORPHIC";
10679    case MONOMORPHIC: return "MONOMORPHIC";
10680    case MONOMORPHIC_PROTOTYPE_FAILURE: return "MONOMORPHIC_PROTOTYPE_FAILURE";
10681    case POLYMORPHIC: return "POLYMORPHIC";
10682    case MEGAMORPHIC: return "MEGAMORPHIC";
10683    case GENERIC: return "GENERIC";
10684    case DEBUG_STUB: return "DEBUG_STUB";
10685  }
10686  UNREACHABLE();
10687  return NULL;
10688}
10689
10690
10691const char* Code::StubType2String(StubType type) {
10692  switch (type) {
10693    case NORMAL: return "NORMAL";
10694    case FIELD: return "FIELD";
10695    case CONSTANT: return "CONSTANT";
10696    case CALLBACKS: return "CALLBACKS";
10697    case INTERCEPTOR: return "INTERCEPTOR";
10698    case MAP_TRANSITION: return "MAP_TRANSITION";
10699    case NONEXISTENT: return "NONEXISTENT";
10700  }
10701  UNREACHABLE();  // keep the compiler happy
10702  return NULL;
10703}
10704
10705
10706void Code::PrintExtraICState(FILE* out, Kind kind, ExtraICState extra) {
10707  PrintF(out, "extra_ic_state = ");
10708  const char* name = NULL;
10709  switch (kind) {
10710    case CALL_IC:
10711      if (extra == STRING_INDEX_OUT_OF_BOUNDS) {
10712        name = "STRING_INDEX_OUT_OF_BOUNDS";
10713      }
10714      break;
10715    case STORE_IC:
10716    case KEYED_STORE_IC:
10717      if (extra == kStrictMode) {
10718        name = "STRICT";
10719      }
10720      break;
10721    default:
10722      break;
10723  }
10724  if (name != NULL) {
10725    PrintF(out, "%s\n", name);
10726  } else {
10727    PrintF(out, "%d\n", extra);
10728  }
10729}
10730
10731
10732void Code::Disassemble(const char* name, FILE* out) {
10733  PrintF(out, "kind = %s\n", Kind2String(kind()));
10734  if (is_inline_cache_stub()) {
10735    PrintF(out, "ic_state = %s\n", ICState2String(ic_state()));
10736    PrintExtraICState(out, kind(), needs_extended_extra_ic_state(kind()) ?
10737        extended_extra_ic_state() : extra_ic_state());
10738    if (ic_state() == MONOMORPHIC) {
10739      PrintF(out, "type = %s\n", StubType2String(type()));
10740    }
10741    if (is_call_stub() || is_keyed_call_stub()) {
10742      PrintF(out, "argc = %d\n", arguments_count());
10743    }
10744    if (is_compare_ic_stub()) {
10745      ASSERT(major_key() == CodeStub::CompareIC);
10746      CompareIC::State left_state, right_state, handler_state;
10747      Token::Value op;
10748      ICCompareStub::DecodeMinorKey(stub_info(), &left_state, &right_state,
10749                                    &handler_state, &op);
10750      PrintF(out, "compare_state = %s*%s -> %s\n",
10751             CompareIC::GetStateName(left_state),
10752             CompareIC::GetStateName(right_state),
10753             CompareIC::GetStateName(handler_state));
10754      PrintF(out, "compare_operation = %s\n", Token::Name(op));
10755    }
10756  }
10757  if ((name != NULL) && (name[0] != '\0')) {
10758    PrintF(out, "name = %s\n", name);
10759  }
10760  if (kind() == OPTIMIZED_FUNCTION) {
10761    PrintF(out, "stack_slots = %d\n", stack_slots());
10762  }
10763
10764  PrintF(out, "Instructions (size = %d)\n", instruction_size());
10765  Disassembler::Decode(out, this);
10766  PrintF(out, "\n");
10767
10768  if (kind() == FUNCTION) {
10769    DeoptimizationOutputData* data =
10770        DeoptimizationOutputData::cast(this->deoptimization_data());
10771    data->DeoptimizationOutputDataPrint(out);
10772  } else if (kind() == OPTIMIZED_FUNCTION) {
10773    DeoptimizationInputData* data =
10774        DeoptimizationInputData::cast(this->deoptimization_data());
10775    data->DeoptimizationInputDataPrint(out);
10776  }
10777  PrintF("\n");
10778
10779  if (is_crankshafted()) {
10780    SafepointTable table(this);
10781    PrintF(out, "Safepoints (size = %u)\n", table.size());
10782    for (unsigned i = 0; i < table.length(); i++) {
10783      unsigned pc_offset = table.GetPcOffset(i);
10784      PrintF(out, "%p  %4d  ", (instruction_start() + pc_offset), pc_offset);
10785      table.PrintEntry(i);
10786      PrintF(out, " (sp -> fp)");
10787      SafepointEntry entry = table.GetEntry(i);
10788      if (entry.deoptimization_index() != Safepoint::kNoDeoptimizationIndex) {
10789        PrintF(out, "  %6d", entry.deoptimization_index());
10790      } else {
10791        PrintF(out, "  <none>");
10792      }
10793      if (entry.argument_count() > 0) {
10794        PrintF(out, " argc: %d", entry.argument_count());
10795      }
10796      PrintF(out, "\n");
10797    }
10798    PrintF(out, "\n");
10799  } else if (kind() == FUNCTION) {
10800    unsigned offset = back_edge_table_offset();
10801    // If there is no back edge table, the "table start" will be at or after
10802    // (due to alignment) the end of the instruction stream.
10803    if (static_cast<int>(offset) < instruction_size()) {
10804      FullCodeGenerator::BackEdgeTableIterator back_edges(this);
10805
10806      PrintF(out, "Back edges (size = %u)\n", back_edges.table_length());
10807      PrintF(out, "ast_id  pc_offset  loop_depth\n");
10808
10809      for ( ; !back_edges.Done(); back_edges.Next()) {
10810        PrintF(out, "%6d  %9u  %10u\n", back_edges.ast_id().ToInt(),
10811                                        back_edges.pc_offset(),
10812                                        back_edges.loop_depth());
10813      }
10814
10815      PrintF(out, "\n");
10816    }
10817#ifdef OBJECT_PRINT
10818    if (!type_feedback_info()->IsUndefined()) {
10819      TypeFeedbackInfo::cast(type_feedback_info())->TypeFeedbackInfoPrint(out);
10820      PrintF(out, "\n");
10821    }
10822#endif
10823  }
10824
10825  PrintF("RelocInfo (size = %d)\n", relocation_size());
10826  for (RelocIterator it(this); !it.done(); it.next()) {
10827    it.rinfo()->Print(GetIsolate(), out);
10828  }
10829  PrintF(out, "\n");
10830}
10831#endif  // ENABLE_DISASSEMBLER
10832
10833
10834MaybeObject* JSObject::SetFastElementsCapacityAndLength(
10835    int capacity,
10836    int length,
10837    SetFastElementsCapacitySmiMode smi_mode) {
10838  Heap* heap = GetHeap();
10839  // We should never end in here with a pixel or external array.
10840  ASSERT(!HasExternalArrayElements());
10841  ASSERT(!map()->is_observed());
10842
10843  // Allocate a new fast elements backing store.
10844  FixedArray* new_elements;
10845  MaybeObject* maybe = heap->AllocateUninitializedFixedArray(capacity);
10846  if (!maybe->To(&new_elements)) return maybe;
10847
10848  ElementsKind elements_kind = GetElementsKind();
10849  ElementsKind new_elements_kind;
10850  // The resized array has FAST_*_SMI_ELEMENTS if the capacity mode forces it,
10851  // or if it's allowed and the old elements array contained only SMIs.
10852  bool has_fast_smi_elements =
10853      (smi_mode == kForceSmiElements) ||
10854      ((smi_mode == kAllowSmiElements) && HasFastSmiElements());
10855  if (has_fast_smi_elements) {
10856    if (IsHoleyElementsKind(elements_kind)) {
10857      new_elements_kind = FAST_HOLEY_SMI_ELEMENTS;
10858    } else {
10859      new_elements_kind = FAST_SMI_ELEMENTS;
10860    }
10861  } else {
10862    if (IsHoleyElementsKind(elements_kind)) {
10863      new_elements_kind = FAST_HOLEY_ELEMENTS;
10864    } else {
10865      new_elements_kind = FAST_ELEMENTS;
10866    }
10867  }
10868  FixedArrayBase* old_elements = elements();
10869  ElementsAccessor* accessor = ElementsAccessor::ForKind(new_elements_kind);
10870  MaybeObject* maybe_obj =
10871      accessor->CopyElements(this, new_elements, elements_kind);
10872  if (maybe_obj->IsFailure()) return maybe_obj;
10873
10874  if (elements_kind != NON_STRICT_ARGUMENTS_ELEMENTS) {
10875    Map* new_map = map();
10876    if (new_elements_kind != elements_kind) {
10877      MaybeObject* maybe =
10878          GetElementsTransitionMap(GetIsolate(), new_elements_kind);
10879      if (!maybe->To(&new_map)) return maybe;
10880    }
10881    ValidateElements();
10882    set_map_and_elements(new_map, new_elements);
10883  } else {
10884    FixedArray* parameter_map = FixedArray::cast(old_elements);
10885    parameter_map->set(1, new_elements);
10886  }
10887
10888  if (FLAG_trace_elements_transitions) {
10889    PrintElementsTransition(stdout, elements_kind, old_elements,
10890                            GetElementsKind(), new_elements);
10891  }
10892
10893  if (IsJSArray()) {
10894    JSArray::cast(this)->set_length(Smi::FromInt(length));
10895  }
10896  return new_elements;
10897}
10898
10899
10900MaybeObject* JSObject::SetFastDoubleElementsCapacityAndLength(
10901    int capacity,
10902    int length) {
10903  Heap* heap = GetHeap();
10904  // We should never end in here with a pixel or external array.
10905  ASSERT(!HasExternalArrayElements());
10906  ASSERT(!map()->is_observed());
10907
10908  FixedArrayBase* elems;
10909  { MaybeObject* maybe_obj =
10910        heap->AllocateUninitializedFixedDoubleArray(capacity);
10911    if (!maybe_obj->To(&elems)) return maybe_obj;
10912  }
10913
10914  ElementsKind elements_kind = GetElementsKind();
10915  ElementsKind new_elements_kind = elements_kind;
10916  if (IsHoleyElementsKind(elements_kind)) {
10917    new_elements_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
10918  } else {
10919    new_elements_kind = FAST_DOUBLE_ELEMENTS;
10920  }
10921
10922  Map* new_map;
10923  { MaybeObject* maybe_obj =
10924        GetElementsTransitionMap(heap->isolate(), new_elements_kind);
10925    if (!maybe_obj->To(&new_map)) return maybe_obj;
10926  }
10927
10928  FixedArrayBase* old_elements = elements();
10929  ElementsAccessor* accessor = ElementsAccessor::ForKind(FAST_DOUBLE_ELEMENTS);
10930  { MaybeObject* maybe_obj =
10931        accessor->CopyElements(this, elems, elements_kind);
10932    if (maybe_obj->IsFailure()) return maybe_obj;
10933  }
10934  if (elements_kind != NON_STRICT_ARGUMENTS_ELEMENTS) {
10935    ValidateElements();
10936    set_map_and_elements(new_map, elems);
10937  } else {
10938    FixedArray* parameter_map = FixedArray::cast(old_elements);
10939    parameter_map->set(1, elems);
10940  }
10941
10942  if (FLAG_trace_elements_transitions) {
10943    PrintElementsTransition(stdout, elements_kind, old_elements,
10944                            GetElementsKind(), elems);
10945  }
10946
10947  if (IsJSArray()) {
10948    JSArray::cast(this)->set_length(Smi::FromInt(length));
10949  }
10950
10951  return this;
10952}
10953
10954
10955MaybeObject* JSArray::Initialize(int capacity, int length) {
10956  ASSERT(capacity >= 0);
10957  return GetHeap()->AllocateJSArrayStorage(this, length, capacity,
10958                                           INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
10959}
10960
10961
10962void JSArray::Expand(int required_size) {
10963  GetIsolate()->factory()->SetElementsCapacityAndLength(
10964      Handle<JSArray>(this), required_size, required_size);
10965}
10966
10967
10968// Returns false if the passed-in index is marked non-configurable,
10969// which will cause the ES5 truncation operation to halt, and thus
10970// no further old values need be collected.
10971static bool GetOldValue(Isolate* isolate,
10972                        Handle<JSObject> object,
10973                        uint32_t index,
10974                        List<Handle<Object> >* old_values,
10975                        List<uint32_t>* indices) {
10976  PropertyAttributes attributes = object->GetLocalElementAttribute(index);
10977  ASSERT(attributes != ABSENT);
10978  if (attributes == DONT_DELETE) return false;
10979  old_values->Add(object->GetLocalElementAccessorPair(index) == NULL
10980      ? Object::GetElement(object, index)
10981      : Handle<Object>::cast(isolate->factory()->the_hole_value()));
10982  indices->Add(index);
10983  return true;
10984}
10985
10986static void EnqueueSpliceRecord(Handle<JSArray> object,
10987                                uint32_t index,
10988                                Handle<JSArray> deleted,
10989                                uint32_t add_count) {
10990  Isolate* isolate = object->GetIsolate();
10991  HandleScope scope(isolate);
10992  Handle<Object> index_object = isolate->factory()->NewNumberFromUint(index);
10993  Handle<Object> add_count_object =
10994      isolate->factory()->NewNumberFromUint(add_count);
10995
10996  Handle<Object> args[] =
10997      { object, index_object, deleted, add_count_object };
10998
10999  bool threw;
11000  Execution::Call(Handle<JSFunction>(isolate->observers_enqueue_splice()),
11001                  isolate->factory()->undefined_value(), ARRAY_SIZE(args), args,
11002                  &threw);
11003  ASSERT(!threw);
11004}
11005
11006
11007static void BeginPerformSplice(Handle<JSArray> object) {
11008  Isolate* isolate = object->GetIsolate();
11009  HandleScope scope(isolate);
11010  Handle<Object> args[] = { object };
11011
11012  bool threw;
11013  Execution::Call(Handle<JSFunction>(isolate->observers_begin_perform_splice()),
11014                  isolate->factory()->undefined_value(), ARRAY_SIZE(args), args,
11015                  &threw);
11016  ASSERT(!threw);
11017}
11018
11019
11020static void EndPerformSplice(Handle<JSArray> object) {
11021  Isolate* isolate = object->GetIsolate();
11022  HandleScope scope(isolate);
11023  Handle<Object> args[] = { object };
11024
11025  bool threw;
11026  Execution::Call(Handle<JSFunction>(isolate->observers_end_perform_splice()),
11027                  isolate->factory()->undefined_value(), ARRAY_SIZE(args), args,
11028                  &threw);
11029  ASSERT(!threw);
11030}
11031
11032
11033MaybeObject* JSArray::SetElementsLength(Object* len) {
11034  // We should never end in here with a pixel or external array.
11035  ASSERT(AllowsSetElementsLength());
11036  if (!(FLAG_harmony_observation && map()->is_observed()))
11037    return GetElementsAccessor()->SetLength(this, len);
11038
11039  Isolate* isolate = GetIsolate();
11040  HandleScope scope(isolate);
11041  Handle<JSArray> self(this);
11042  List<uint32_t> indices;
11043  List<Handle<Object> > old_values;
11044  Handle<Object> old_length_handle(self->length(), isolate);
11045  Handle<Object> new_length_handle(len, isolate);
11046  uint32_t old_length = 0;
11047  CHECK(old_length_handle->ToArrayIndex(&old_length));
11048  uint32_t new_length = 0;
11049  if (!new_length_handle->ToArrayIndex(&new_length))
11050    return Failure::InternalError();
11051
11052  // Observed arrays should always be in dictionary mode;
11053  // if they were in fast mode, the below is slower than necessary
11054  // as it iterates over the array backing store multiple times.
11055  ASSERT(self->HasDictionaryElements());
11056  static const PropertyAttributes kNoAttrFilter = NONE;
11057  int num_elements = self->NumberOfLocalElements(kNoAttrFilter);
11058  if (num_elements > 0) {
11059    if (old_length == static_cast<uint32_t>(num_elements)) {
11060      // Simple case for arrays without holes.
11061      for (uint32_t i = old_length - 1; i + 1 > new_length; --i) {
11062        if (!GetOldValue(isolate, self, i, &old_values, &indices)) break;
11063      }
11064    } else {
11065      // For sparse arrays, only iterate over existing elements.
11066      Handle<FixedArray> keys = isolate->factory()->NewFixedArray(num_elements);
11067      self->GetLocalElementKeys(*keys, kNoAttrFilter);
11068      while (num_elements-- > 0) {
11069        uint32_t index = NumberToUint32(keys->get(num_elements));
11070        if (index < new_length) break;
11071        if (!GetOldValue(isolate, self, index, &old_values, &indices)) break;
11072      }
11073    }
11074  }
11075
11076  MaybeObject* result =
11077      self->GetElementsAccessor()->SetLength(*self, *new_length_handle);
11078  Handle<Object> hresult;
11079  if (!result->ToHandle(&hresult, isolate)) return result;
11080
11081  CHECK(self->length()->ToArrayIndex(&new_length));
11082  if (old_length == new_length) return *hresult;
11083
11084  BeginPerformSplice(self);
11085
11086  for (int i = 0; i < indices.length(); ++i) {
11087    JSObject::EnqueueChangeRecord(
11088        self, "deleted", isolate->factory()->Uint32ToString(indices[i]),
11089        old_values[i]);
11090  }
11091  JSObject::EnqueueChangeRecord(
11092      self, "updated", isolate->factory()->length_string(),
11093      old_length_handle);
11094
11095  EndPerformSplice(self);
11096
11097  uint32_t index = Min(old_length, new_length);
11098  uint32_t add_count = new_length > old_length ? new_length - old_length : 0;
11099  uint32_t delete_count = new_length < old_length ? old_length - new_length : 0;
11100  Handle<JSArray> deleted = isolate->factory()->NewJSArray(0);
11101  if (delete_count > 0) {
11102    for (int i = indices.length() - 1; i >= 0; i--) {
11103      JSObject::SetElement(deleted, indices[i] - index, old_values[i], NONE,
11104                           kNonStrictMode);
11105    }
11106
11107    SetProperty(deleted, isolate->factory()->length_string(),
11108                isolate->factory()->NewNumberFromUint(delete_count),
11109                NONE, kNonStrictMode);
11110  }
11111
11112  EnqueueSpliceRecord(self, index, deleted, add_count);
11113
11114  return *hresult;
11115}
11116
11117
11118Handle<Map> Map::GetPrototypeTransition(Handle<Map> map,
11119                                        Handle<Object> prototype) {
11120  FixedArray* cache = map->GetPrototypeTransitions();
11121  int number_of_transitions = map->NumberOfProtoTransitions();
11122  const int proto_offset =
11123      kProtoTransitionHeaderSize + kProtoTransitionPrototypeOffset;
11124  const int map_offset = kProtoTransitionHeaderSize + kProtoTransitionMapOffset;
11125  const int step = kProtoTransitionElementsPerEntry;
11126  for (int i = 0; i < number_of_transitions; i++) {
11127    if (cache->get(proto_offset + i * step) == *prototype) {
11128      Object* result = cache->get(map_offset + i * step);
11129      return Handle<Map>(Map::cast(result));
11130    }
11131  }
11132  return Handle<Map>();
11133}
11134
11135
11136Handle<Map> Map::PutPrototypeTransition(Handle<Map> map,
11137                                        Handle<Object> prototype,
11138                                        Handle<Map> target_map) {
11139  ASSERT(target_map->IsMap());
11140  ASSERT(HeapObject::cast(*prototype)->map()->IsMap());
11141  // Don't cache prototype transition if this map is shared.
11142  if (map->is_shared() || !FLAG_cache_prototype_transitions) return map;
11143
11144  const int step = kProtoTransitionElementsPerEntry;
11145  const int header = kProtoTransitionHeaderSize;
11146
11147  Handle<FixedArray> cache(map->GetPrototypeTransitions());
11148  int capacity = (cache->length() - header) / step;
11149  int transitions = map->NumberOfProtoTransitions() + 1;
11150
11151  if (transitions > capacity) {
11152    if (capacity > kMaxCachedPrototypeTransitions) return map;
11153
11154    // Grow array by factor 2 over and above what we need.
11155    Factory* factory = map->GetIsolate()->factory();
11156    cache = factory->CopySizeFixedArray(cache, transitions * 2 * step + header);
11157
11158    CALL_AND_RETRY_OR_DIE(map->GetIsolate(),
11159                          map->SetPrototypeTransitions(*cache),
11160                          break,
11161                          return Handle<Map>());
11162  }
11163
11164  // Reload number of transitions as GC might shrink them.
11165  int last = map->NumberOfProtoTransitions();
11166  int entry = header + last * step;
11167
11168  cache->set(entry + kProtoTransitionPrototypeOffset, *prototype);
11169  cache->set(entry + kProtoTransitionMapOffset, *target_map);
11170  map->SetNumberOfProtoTransitions(transitions);
11171
11172  return map;
11173}
11174
11175
11176void Map::ZapTransitions() {
11177  TransitionArray* transition_array = transitions();
11178  // TODO(mstarzinger): Temporarily use a slower version instead of the faster
11179  // MemsetPointer to investigate a crasher. Switch back to MemsetPointer.
11180  Object** data = transition_array->data_start();
11181  Object* the_hole = GetHeap()->the_hole_value();
11182  int length = transition_array->length();
11183  for (int i = 0; i < length; i++) {
11184    data[i] = the_hole;
11185  }
11186}
11187
11188
11189void Map::ZapPrototypeTransitions() {
11190  FixedArray* proto_transitions = GetPrototypeTransitions();
11191  MemsetPointer(proto_transitions->data_start(),
11192                GetHeap()->the_hole_value(),
11193                proto_transitions->length());
11194}
11195
11196
11197void Map::AddDependentCompilationInfo(DependentCode::DependencyGroup group,
11198                                      CompilationInfo* info) {
11199  Handle<DependentCode> dep(dependent_code());
11200  Handle<DependentCode> codes =
11201      DependentCode::Insert(dep, group, info->object_wrapper());
11202  if (*codes != dependent_code()) set_dependent_code(*codes);
11203  info->dependencies(group)->Add(Handle<HeapObject>(this), info->zone());
11204}
11205
11206
11207void Map::AddDependentCode(DependentCode::DependencyGroup group,
11208                           Handle<Code> code) {
11209  Handle<DependentCode> codes = DependentCode::Insert(
11210      Handle<DependentCode>(dependent_code()), group, code);
11211  if (*codes != dependent_code()) set_dependent_code(*codes);
11212}
11213
11214
11215DependentCode::GroupStartIndexes::GroupStartIndexes(DependentCode* entries) {
11216  Recompute(entries);
11217}
11218
11219
11220void DependentCode::GroupStartIndexes::Recompute(DependentCode* entries) {
11221  start_indexes_[0] = 0;
11222  for (int g = 1; g <= kGroupCount; g++) {
11223    int count = entries->number_of_entries(static_cast<DependencyGroup>(g - 1));
11224    start_indexes_[g] = start_indexes_[g - 1] + count;
11225  }
11226}
11227
11228
11229DependentCode* DependentCode::ForObject(Handle<HeapObject> object,
11230                                        DependencyGroup group) {
11231  AllowDeferredHandleDereference dependencies_are_safe;
11232  if (group == DependentCode::kPropertyCellChangedGroup) {
11233    return Handle<PropertyCell>::cast(object)->dependent_code();
11234  }
11235  return Handle<Map>::cast(object)->dependent_code();
11236}
11237
11238
11239Handle<DependentCode> DependentCode::Insert(Handle<DependentCode> entries,
11240                                            DependencyGroup group,
11241                                            Handle<Object> object) {
11242  GroupStartIndexes starts(*entries);
11243  int start = starts.at(group);
11244  int end = starts.at(group + 1);
11245  int number_of_entries = starts.number_of_entries();
11246  if (start < end && entries->object_at(end - 1) == *object) {
11247    // Do not append the compilation info if it is already in the array.
11248    // It is sufficient to just check only the last element because
11249    // we process embedded maps of an optimized code in one batch.
11250    return entries;
11251  }
11252  if (entries->length() < kCodesStartIndex + number_of_entries + 1) {
11253    Factory* factory = entries->GetIsolate()->factory();
11254    int capacity = kCodesStartIndex + number_of_entries + 1;
11255    if (capacity > 5) capacity = capacity * 5 / 4;
11256    Handle<DependentCode> new_entries = Handle<DependentCode>::cast(
11257        factory->CopySizeFixedArray(entries, capacity));
11258    // The number of codes can change after GC.
11259    starts.Recompute(*entries);
11260    start = starts.at(group);
11261    end = starts.at(group + 1);
11262    number_of_entries = starts.number_of_entries();
11263    for (int i = 0; i < number_of_entries; i++) {
11264      entries->clear_at(i);
11265    }
11266    // If the old fixed array was empty, we need to reset counters of the
11267    // new array.
11268    if (number_of_entries == 0) {
11269      for (int g = 0; g < kGroupCount; g++) {
11270        new_entries->set_number_of_entries(static_cast<DependencyGroup>(g), 0);
11271      }
11272    }
11273    entries = new_entries;
11274  }
11275  entries->ExtendGroup(group);
11276  entries->set_object_at(end, *object);
11277  entries->set_number_of_entries(group, end + 1 - start);
11278  return entries;
11279}
11280
11281
11282void DependentCode::UpdateToFinishedCode(DependencyGroup group,
11283                                         CompilationInfo* info,
11284                                         Code* code) {
11285  DisallowHeapAllocation no_gc;
11286  AllowDeferredHandleDereference get_object_wrapper;
11287  Foreign* info_wrapper = *info->object_wrapper();
11288  GroupStartIndexes starts(this);
11289  int start = starts.at(group);
11290  int end = starts.at(group + 1);
11291  for (int i = start; i < end; i++) {
11292    if (object_at(i) == info_wrapper) {
11293      set_object_at(i, code);
11294      break;
11295    }
11296  }
11297
11298#ifdef DEBUG
11299  for (int i = start; i < end; i++) {
11300    ASSERT(is_code_at(i) || compilation_info_at(i) != info);
11301  }
11302#endif
11303}
11304
11305
11306void DependentCode::RemoveCompilationInfo(DependentCode::DependencyGroup group,
11307                                          CompilationInfo* info) {
11308  DisallowHeapAllocation no_allocation;
11309  AllowDeferredHandleDereference get_object_wrapper;
11310  Foreign* info_wrapper = *info->object_wrapper();
11311  GroupStartIndexes starts(this);
11312  int start = starts.at(group);
11313  int end = starts.at(group + 1);
11314  // Find compilation info wrapper.
11315  int info_pos = -1;
11316  for (int i = start; i < end; i++) {
11317    if (object_at(i) == info_wrapper) {
11318      info_pos = i;
11319      break;
11320    }
11321  }
11322  if (info_pos == -1) return;  // Not found.
11323  int gap = info_pos;
11324  // Use the last of each group to fill the gap in the previous group.
11325  for (int i = group; i < kGroupCount; i++) {
11326    int last_of_group = starts.at(i + 1) - 1;
11327    ASSERT(last_of_group >= gap);
11328    if (last_of_group == gap) continue;
11329    copy(last_of_group, gap);
11330    gap = last_of_group;
11331  }
11332  ASSERT(gap == starts.number_of_entries() - 1);
11333  clear_at(gap);  // Clear last gap.
11334  set_number_of_entries(group, end - start - 1);
11335
11336#ifdef DEBUG
11337  for (int i = start; i < end - 1; i++) {
11338    ASSERT(is_code_at(i) || compilation_info_at(i) != info);
11339  }
11340#endif
11341}
11342
11343
11344bool DependentCode::Contains(DependencyGroup group, Code* code) {
11345  GroupStartIndexes starts(this);
11346  int number_of_entries = starts.number_of_entries();
11347  for (int i = 0; i < number_of_entries; i++) {
11348    if (object_at(i) == code) return true;
11349  }
11350  return false;
11351}
11352
11353
11354void DependentCode::DeoptimizeDependentCodeGroup(
11355    Isolate* isolate,
11356    DependentCode::DependencyGroup group) {
11357  ASSERT(AllowCodeDependencyChange::IsAllowed());
11358  DisallowHeapAllocation no_allocation_scope;
11359  DependentCode::GroupStartIndexes starts(this);
11360  int start = starts.at(group);
11361  int end = starts.at(group + 1);
11362  int code_entries = starts.number_of_entries();
11363  if (start == end) return;
11364
11365  // Collect all the code to deoptimize.
11366  Zone zone(isolate);
11367  ZoneList<Code*> codes(end - start, &zone);
11368  for (int i = start; i < end; i++) {
11369    if (is_code_at(i)) {
11370      Code* code = code_at(i);
11371      if (!code->marked_for_deoptimization()) codes.Add(code, &zone);
11372    } else {
11373      CompilationInfo* info = compilation_info_at(i);
11374      info->AbortDueToDependencyChange();
11375    }
11376  }
11377  // Compact the array by moving all subsequent groups to fill in the new holes.
11378  for (int src = end, dst = start; src < code_entries; src++, dst++) {
11379    copy(src, dst);
11380  }
11381  // Now the holes are at the end of the array, zap them for heap-verifier.
11382  int removed = end - start;
11383  for (int i = code_entries - removed; i < code_entries; i++) {
11384    clear_at(i);
11385  }
11386  set_number_of_entries(group, 0);
11387  Deoptimizer::DeoptimizeCodeList(isolate, &codes);
11388}
11389
11390
11391Handle<Object> JSObject::SetPrototype(Handle<JSObject> object,
11392                                      Handle<Object> value,
11393                                      bool skip_hidden_prototypes) {
11394#ifdef DEBUG
11395  int size = object->Size();
11396#endif
11397
11398  Isolate* isolate = object->GetIsolate();
11399  Heap* heap = isolate->heap();
11400  // Silently ignore the change if value is not a JSObject or null.
11401  // SpiderMonkey behaves this way.
11402  if (!value->IsJSReceiver() && !value->IsNull()) return value;
11403
11404  // From 8.6.2 Object Internal Methods
11405  // ...
11406  // In addition, if [[Extensible]] is false the value of the [[Class]] and
11407  // [[Prototype]] internal properties of the object may not be modified.
11408  // ...
11409  // Implementation specific extensions that modify [[Class]], [[Prototype]]
11410  // or [[Extensible]] must not violate the invariants defined in the preceding
11411  // paragraph.
11412  if (!object->map()->is_extensible()) {
11413    Handle<Object> args[] = { object };
11414    Handle<Object> error = isolate->factory()->NewTypeError(
11415        "non_extensible_proto", HandleVector(args, ARRAY_SIZE(args)));
11416    isolate->Throw(*error);
11417    return Handle<Object>();
11418  }
11419
11420  // Before we can set the prototype we need to be sure
11421  // prototype cycles are prevented.
11422  // It is sufficient to validate that the receiver is not in the new prototype
11423  // chain.
11424  for (Object* pt = *value;
11425       pt != heap->null_value();
11426       pt = pt->GetPrototype(isolate)) {
11427    if (JSReceiver::cast(pt) == *object) {
11428      // Cycle detected.
11429      Handle<Object> error = isolate->factory()->NewError(
11430          "cyclic_proto", HandleVector<Object>(NULL, 0));
11431      isolate->Throw(*error);
11432      return Handle<Object>();
11433    }
11434  }
11435
11436  Handle<JSObject> real_receiver = object;
11437
11438  if (skip_hidden_prototypes) {
11439    // Find the first object in the chain whose prototype object is not
11440    // hidden and set the new prototype on that object.
11441    Object* current_proto = real_receiver->GetPrototype();
11442    while (current_proto->IsJSObject() &&
11443          JSObject::cast(current_proto)->map()->is_hidden_prototype()) {
11444      real_receiver = handle(JSObject::cast(current_proto), isolate);
11445      current_proto = current_proto->GetPrototype(isolate);
11446    }
11447  }
11448
11449  // Set the new prototype of the object.
11450  Handle<Map> map(real_receiver->map());
11451
11452  // Nothing to do if prototype is already set.
11453  if (map->prototype() == *value) return value;
11454
11455  if (value->IsJSObject()) {
11456    JSObject::OptimizeAsPrototype(Handle<JSObject>::cast(value));
11457  }
11458
11459  Handle<Map> new_map = Map::GetPrototypeTransition(map, value);
11460  if (new_map.is_null()) {
11461    new_map = Map::Copy(map);
11462    Map::PutPrototypeTransition(map, value, new_map);
11463    new_map->set_prototype(*value);
11464  }
11465  ASSERT(new_map->prototype() == *value);
11466  real_receiver->set_map(*new_map);
11467
11468  heap->ClearInstanceofCache();
11469  ASSERT(size == object->Size());
11470  return value;
11471}
11472
11473
11474MaybeObject* JSObject::EnsureCanContainElements(Arguments* args,
11475                                                uint32_t first_arg,
11476                                                uint32_t arg_count,
11477                                                EnsureElementsMode mode) {
11478  // Elements in |Arguments| are ordered backwards (because they're on the
11479  // stack), but the method that's called here iterates over them in forward
11480  // direction.
11481  return EnsureCanContainElements(
11482      args->arguments() - first_arg - (arg_count - 1),
11483      arg_count, mode);
11484}
11485
11486
11487PropertyType JSObject::GetLocalPropertyType(Name* name) {
11488  uint32_t index = 0;
11489  if (name->AsArrayIndex(&index)) {
11490    return GetLocalElementType(index);
11491  }
11492  LookupResult lookup(GetIsolate());
11493  LocalLookup(name, &lookup, true);
11494  return lookup.type();
11495}
11496
11497
11498PropertyType JSObject::GetLocalElementType(uint32_t index) {
11499  return GetElementsAccessor()->GetType(this, this, index);
11500}
11501
11502
11503AccessorPair* JSObject::GetLocalPropertyAccessorPair(Name* name) {
11504  uint32_t index = 0;
11505  if (name->AsArrayIndex(&index)) {
11506    return GetLocalElementAccessorPair(index);
11507  }
11508
11509  LookupResult lookup(GetIsolate());
11510  LocalLookupRealNamedProperty(name, &lookup);
11511
11512  if (lookup.IsPropertyCallbacks() &&
11513      lookup.GetCallbackObject()->IsAccessorPair()) {
11514    return AccessorPair::cast(lookup.GetCallbackObject());
11515  }
11516  return NULL;
11517}
11518
11519
11520AccessorPair* JSObject::GetLocalElementAccessorPair(uint32_t index) {
11521  if (IsJSGlobalProxy()) {
11522    Object* proto = GetPrototype();
11523    if (proto->IsNull()) return NULL;
11524    ASSERT(proto->IsJSGlobalObject());
11525    return JSObject::cast(proto)->GetLocalElementAccessorPair(index);
11526  }
11527
11528  // Check for lookup interceptor.
11529  if (HasIndexedInterceptor()) return NULL;
11530
11531  return GetElementsAccessor()->GetAccessorPair(this, this, index);
11532}
11533
11534
11535MaybeObject* JSObject::SetElementWithInterceptor(uint32_t index,
11536                                                 Object* value,
11537                                                 PropertyAttributes attributes,
11538                                                 StrictModeFlag strict_mode,
11539                                                 bool check_prototype,
11540                                                 SetPropertyMode set_mode) {
11541  Isolate* isolate = GetIsolate();
11542  // Make sure that the top context does not change when doing
11543  // callbacks or interceptor calls.
11544  AssertNoContextChange ncc;
11545  HandleScope scope(isolate);
11546  Handle<InterceptorInfo> interceptor(GetIndexedInterceptor());
11547  Handle<JSObject> this_handle(this);
11548  Handle<Object> value_handle(value, isolate);
11549  if (!interceptor->setter()->IsUndefined()) {
11550    v8::IndexedPropertySetter setter =
11551        v8::ToCData<v8::IndexedPropertySetter>(interceptor->setter());
11552    LOG(isolate,
11553        ApiIndexedPropertyAccess("interceptor-indexed-set", this, index));
11554    PropertyCallbackArguments args(isolate, interceptor->data(), this, this);
11555    v8::Handle<v8::Value> result =
11556        args.Call(setter, index, v8::Utils::ToLocal(value_handle));
11557    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
11558    if (!result.IsEmpty()) return *value_handle;
11559  }
11560  MaybeObject* raw_result =
11561      this_handle->SetElementWithoutInterceptor(index,
11562                                                *value_handle,
11563                                                attributes,
11564                                                strict_mode,
11565                                                check_prototype,
11566                                                set_mode);
11567  RETURN_IF_SCHEDULED_EXCEPTION(isolate);
11568  return raw_result;
11569}
11570
11571
11572MaybeObject* JSObject::GetElementWithCallback(Object* receiver,
11573                                              Object* structure,
11574                                              uint32_t index,
11575                                              Object* holder) {
11576  Isolate* isolate = GetIsolate();
11577  ASSERT(!structure->IsForeign());
11578
11579  // api style callbacks.
11580  if (structure->IsExecutableAccessorInfo()) {
11581    Handle<ExecutableAccessorInfo> data(
11582        ExecutableAccessorInfo::cast(structure));
11583    Object* fun_obj = data->getter();
11584    v8::AccessorGetter call_fun = v8::ToCData<v8::AccessorGetter>(fun_obj);
11585    if (call_fun == NULL) return isolate->heap()->undefined_value();
11586    HandleScope scope(isolate);
11587    Handle<JSObject> self(JSObject::cast(receiver));
11588    Handle<JSObject> holder_handle(JSObject::cast(holder));
11589    Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
11590    Handle<String> key = isolate->factory()->NumberToString(number);
11591    LOG(isolate, ApiNamedPropertyAccess("load", *self, *key));
11592    PropertyCallbackArguments
11593        args(isolate, data->data(), *self, *holder_handle);
11594    v8::Handle<v8::Value> result = args.Call(call_fun, v8::Utils::ToLocal(key));
11595    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
11596    if (result.IsEmpty()) return isolate->heap()->undefined_value();
11597    Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
11598    result_internal->VerifyApiCallResultType();
11599    return *result_internal;
11600  }
11601
11602  // __defineGetter__ callback
11603  if (structure->IsAccessorPair()) {
11604    Object* getter = AccessorPair::cast(structure)->getter();
11605    if (getter->IsSpecFunction()) {
11606      // TODO(rossberg): nicer would be to cast to some JSCallable here...
11607      return GetPropertyWithDefinedGetter(receiver, JSReceiver::cast(getter));
11608    }
11609    // Getter is not a function.
11610    return isolate->heap()->undefined_value();
11611  }
11612
11613  if (structure->IsDeclaredAccessorInfo()) {
11614    return GetDeclaredAccessorProperty(receiver,
11615                                       DeclaredAccessorInfo::cast(structure),
11616                                       isolate);
11617  }
11618
11619  UNREACHABLE();
11620  return NULL;
11621}
11622
11623
11624MaybeObject* JSObject::SetElementWithCallback(Object* structure,
11625                                              uint32_t index,
11626                                              Object* value,
11627                                              JSObject* holder,
11628                                              StrictModeFlag strict_mode) {
11629  Isolate* isolate = GetIsolate();
11630  HandleScope scope(isolate);
11631
11632  // We should never get here to initialize a const with the hole
11633  // value since a const declaration would conflict with the setter.
11634  ASSERT(!value->IsTheHole());
11635  Handle<Object> value_handle(value, isolate);
11636
11637  // To accommodate both the old and the new api we switch on the
11638  // data structure used to store the callbacks.  Eventually foreign
11639  // callbacks should be phased out.
11640  ASSERT(!structure->IsForeign());
11641
11642  if (structure->IsExecutableAccessorInfo()) {
11643    // api style callbacks
11644    Handle<JSObject> self(this);
11645    Handle<JSObject> holder_handle(JSObject::cast(holder));
11646    Handle<ExecutableAccessorInfo> data(
11647        ExecutableAccessorInfo::cast(structure));
11648    Object* call_obj = data->setter();
11649    v8::AccessorSetter call_fun = v8::ToCData<v8::AccessorSetter>(call_obj);
11650    if (call_fun == NULL) return value;
11651    Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
11652    Handle<String> key(isolate->factory()->NumberToString(number));
11653    LOG(isolate, ApiNamedPropertyAccess("store", *self, *key));
11654    PropertyCallbackArguments
11655        args(isolate, data->data(), *self, *holder_handle);
11656    args.Call(call_fun,
11657              v8::Utils::ToLocal(key),
11658              v8::Utils::ToLocal(value_handle));
11659    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
11660    return *value_handle;
11661  }
11662
11663  if (structure->IsAccessorPair()) {
11664    Handle<Object> setter(AccessorPair::cast(structure)->setter(), isolate);
11665    if (setter->IsSpecFunction()) {
11666      // TODO(rossberg): nicer would be to cast to some JSCallable here...
11667      return SetPropertyWithDefinedSetter(JSReceiver::cast(*setter), value);
11668    } else {
11669      if (strict_mode == kNonStrictMode) {
11670        return value;
11671      }
11672      Handle<Object> holder_handle(holder, isolate);
11673      Handle<Object> key(isolate->factory()->NewNumberFromUint(index));
11674      Handle<Object> args[2] = { key, holder_handle };
11675      return isolate->Throw(
11676          *isolate->factory()->NewTypeError("no_setter_in_callback",
11677                                            HandleVector(args, 2)));
11678    }
11679  }
11680
11681  // TODO(dcarney): Handle correctly.
11682  if (structure->IsDeclaredAccessorInfo()) return value;
11683
11684  UNREACHABLE();
11685  return NULL;
11686}
11687
11688
11689bool JSObject::HasFastArgumentsElements() {
11690  Heap* heap = GetHeap();
11691  if (!elements()->IsFixedArray()) return false;
11692  FixedArray* elements = FixedArray::cast(this->elements());
11693  if (elements->map() != heap->non_strict_arguments_elements_map()) {
11694    return false;
11695  }
11696  FixedArray* arguments = FixedArray::cast(elements->get(1));
11697  return !arguments->IsDictionary();
11698}
11699
11700
11701bool JSObject::HasDictionaryArgumentsElements() {
11702  Heap* heap = GetHeap();
11703  if (!elements()->IsFixedArray()) return false;
11704  FixedArray* elements = FixedArray::cast(this->elements());
11705  if (elements->map() != heap->non_strict_arguments_elements_map()) {
11706    return false;
11707  }
11708  FixedArray* arguments = FixedArray::cast(elements->get(1));
11709  return arguments->IsDictionary();
11710}
11711
11712
11713// Adding n elements in fast case is O(n*n).
11714// Note: revisit design to have dual undefined values to capture absent
11715// elements.
11716MaybeObject* JSObject::SetFastElement(uint32_t index,
11717                                      Object* value,
11718                                      StrictModeFlag strict_mode,
11719                                      bool check_prototype) {
11720  ASSERT(HasFastSmiOrObjectElements() ||
11721         HasFastArgumentsElements());
11722
11723  // Array optimizations rely on the prototype lookups of Array objects always
11724  // returning undefined. If there is a store to the initial prototype object,
11725  // make sure all of these optimizations are invalidated.
11726  Isolate* isolate(GetIsolate());
11727  if (isolate->is_initial_object_prototype(this) ||
11728      isolate->is_initial_array_prototype(this)) {
11729    HandleScope scope(GetIsolate());
11730    map()->dependent_code()->DeoptimizeDependentCodeGroup(
11731        GetIsolate(),
11732        DependentCode::kElementsCantBeAddedGroup);
11733  }
11734
11735  FixedArray* backing_store = FixedArray::cast(elements());
11736  if (backing_store->map() == GetHeap()->non_strict_arguments_elements_map()) {
11737    backing_store = FixedArray::cast(backing_store->get(1));
11738  } else {
11739    MaybeObject* maybe = EnsureWritableFastElements();
11740    if (!maybe->To(&backing_store)) return maybe;
11741  }
11742  uint32_t capacity = static_cast<uint32_t>(backing_store->length());
11743
11744  if (check_prototype &&
11745      (index >= capacity || backing_store->get(index)->IsTheHole())) {
11746    bool found;
11747    MaybeObject* result = SetElementWithCallbackSetterInPrototypes(index,
11748                                                                   value,
11749                                                                   &found,
11750                                                                   strict_mode);
11751    if (found) return result;
11752  }
11753
11754  uint32_t new_capacity = capacity;
11755  // Check if the length property of this object needs to be updated.
11756  uint32_t array_length = 0;
11757  bool must_update_array_length = false;
11758  bool introduces_holes = true;
11759  if (IsJSArray()) {
11760    CHECK(JSArray::cast(this)->length()->ToArrayIndex(&array_length));
11761    introduces_holes = index > array_length;
11762    if (index >= array_length) {
11763      must_update_array_length = true;
11764      array_length = index + 1;
11765    }
11766  } else {
11767    introduces_holes = index >= capacity;
11768  }
11769
11770  // If the array is growing, and it's not growth by a single element at the
11771  // end, make sure that the ElementsKind is HOLEY.
11772  ElementsKind elements_kind = GetElementsKind();
11773  if (introduces_holes &&
11774      IsFastElementsKind(elements_kind) &&
11775      !IsFastHoleyElementsKind(elements_kind)) {
11776    ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind);
11777    MaybeObject* maybe = TransitionElementsKind(transitioned_kind);
11778    if (maybe->IsFailure()) return maybe;
11779  }
11780
11781  // Check if the capacity of the backing store needs to be increased, or if
11782  // a transition to slow elements is necessary.
11783  if (index >= capacity) {
11784    bool convert_to_slow = true;
11785    if ((index - capacity) < kMaxGap) {
11786      new_capacity = NewElementsCapacity(index + 1);
11787      ASSERT(new_capacity > index);
11788      if (!ShouldConvertToSlowElements(new_capacity)) {
11789        convert_to_slow = false;
11790      }
11791    }
11792    if (convert_to_slow) {
11793      MaybeObject* result = NormalizeElements();
11794      if (result->IsFailure()) return result;
11795      return SetDictionaryElement(index, value, NONE, strict_mode,
11796                                  check_prototype);
11797    }
11798  }
11799  // Convert to fast double elements if appropriate.
11800  if (HasFastSmiElements() && !value->IsSmi() && value->IsNumber()) {
11801    // Consider fixing the boilerplate as well if we have one.
11802    ElementsKind to_kind = IsHoleyElementsKind(elements_kind)
11803        ? FAST_HOLEY_DOUBLE_ELEMENTS
11804        : FAST_DOUBLE_ELEMENTS;
11805
11806    MaybeObject* maybe_failure = UpdateAllocationSite(to_kind);
11807    if (maybe_failure->IsFailure()) return maybe_failure;
11808
11809    MaybeObject* maybe =
11810        SetFastDoubleElementsCapacityAndLength(new_capacity, array_length);
11811    if (maybe->IsFailure()) return maybe;
11812    FixedDoubleArray::cast(elements())->set(index, value->Number());
11813    ValidateElements();
11814    return value;
11815  }
11816  // Change elements kind from Smi-only to generic FAST if necessary.
11817  if (HasFastSmiElements() && !value->IsSmi()) {
11818    Map* new_map;
11819    ElementsKind kind = HasFastHoleyElements()
11820        ? FAST_HOLEY_ELEMENTS
11821        : FAST_ELEMENTS;
11822
11823    MaybeObject* maybe_failure = UpdateAllocationSite(kind);
11824    if (maybe_failure->IsFailure()) return maybe_failure;
11825
11826    MaybeObject* maybe_new_map = GetElementsTransitionMap(GetIsolate(),
11827                                                          kind);
11828    if (!maybe_new_map->To(&new_map)) return maybe_new_map;
11829
11830    set_map(new_map);
11831  }
11832  // Increase backing store capacity if that's been decided previously.
11833  if (new_capacity != capacity) {
11834    FixedArray* new_elements;
11835    SetFastElementsCapacitySmiMode smi_mode =
11836        value->IsSmi() && HasFastSmiElements()
11837            ? kAllowSmiElements
11838            : kDontAllowSmiElements;
11839    { MaybeObject* maybe =
11840          SetFastElementsCapacityAndLength(new_capacity,
11841                                           array_length,
11842                                           smi_mode);
11843      if (!maybe->To(&new_elements)) return maybe;
11844    }
11845    new_elements->set(index, value);
11846    ValidateElements();
11847    return value;
11848  }
11849
11850  // Finally, set the new element and length.
11851  ASSERT(elements()->IsFixedArray());
11852  backing_store->set(index, value);
11853  if (must_update_array_length) {
11854    JSArray::cast(this)->set_length(Smi::FromInt(array_length));
11855  }
11856  return value;
11857}
11858
11859
11860MaybeObject* JSObject::SetDictionaryElement(uint32_t index,
11861                                            Object* value_raw,
11862                                            PropertyAttributes attributes,
11863                                            StrictModeFlag strict_mode,
11864                                            bool check_prototype,
11865                                            SetPropertyMode set_mode) {
11866  ASSERT(HasDictionaryElements() || HasDictionaryArgumentsElements());
11867  Isolate* isolate = GetIsolate();
11868  Heap* heap = isolate->heap();
11869  Handle<JSObject> self(this);
11870  Handle<Object> value(value_raw, isolate);
11871
11872  // Insert element in the dictionary.
11873  Handle<FixedArray> elements(FixedArray::cast(this->elements()));
11874  bool is_arguments =
11875      (elements->map() == heap->non_strict_arguments_elements_map());
11876  Handle<SeededNumberDictionary> dictionary(is_arguments
11877    ? SeededNumberDictionary::cast(elements->get(1))
11878    : SeededNumberDictionary::cast(*elements));
11879
11880  int entry = dictionary->FindEntry(index);
11881  if (entry != SeededNumberDictionary::kNotFound) {
11882    Object* element = dictionary->ValueAt(entry);
11883    PropertyDetails details = dictionary->DetailsAt(entry);
11884    if (details.type() == CALLBACKS && set_mode == SET_PROPERTY) {
11885      return SetElementWithCallback(element, index, *value, this, strict_mode);
11886    } else {
11887      dictionary->UpdateMaxNumberKey(index);
11888      // If a value has not been initialized we allow writing to it even if it
11889      // is read-only (a declared const that has not been initialized).  If a
11890      // value is being defined we skip attribute checks completely.
11891      if (set_mode == DEFINE_PROPERTY) {
11892        details = PropertyDetails(
11893            attributes, NORMAL, details.dictionary_index());
11894        dictionary->DetailsAtPut(entry, details);
11895      } else if (details.IsReadOnly() && !element->IsTheHole()) {
11896        if (strict_mode == kNonStrictMode) {
11897          return isolate->heap()->undefined_value();
11898        } else {
11899          Handle<Object> holder(this, isolate);
11900          Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
11901          Handle<Object> args[2] = { number, holder };
11902          Handle<Object> error =
11903              isolate->factory()->NewTypeError("strict_read_only_property",
11904                                               HandleVector(args, 2));
11905          return isolate->Throw(*error);
11906        }
11907      }
11908      // Elements of the arguments object in slow mode might be slow aliases.
11909      if (is_arguments && element->IsAliasedArgumentsEntry()) {
11910        AliasedArgumentsEntry* entry = AliasedArgumentsEntry::cast(element);
11911        Context* context = Context::cast(elements->get(0));
11912        int context_index = entry->aliased_context_slot();
11913        ASSERT(!context->get(context_index)->IsTheHole());
11914        context->set(context_index, *value);
11915        // For elements that are still writable we keep slow aliasing.
11916        if (!details.IsReadOnly()) value = handle(element, isolate);
11917      }
11918      dictionary->ValueAtPut(entry, *value);
11919    }
11920  } else {
11921    // Index not already used. Look for an accessor in the prototype chain.
11922    // Can cause GC!
11923    if (check_prototype) {
11924      bool found;
11925      MaybeObject* result = SetElementWithCallbackSetterInPrototypes(
11926          index, *value, &found, strict_mode);
11927      if (found) return result;
11928    }
11929    // When we set the is_extensible flag to false we always force the
11930    // element into dictionary mode (and force them to stay there).
11931    if (!self->map()->is_extensible()) {
11932      if (strict_mode == kNonStrictMode) {
11933        return isolate->heap()->undefined_value();
11934      } else {
11935        Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
11936        Handle<String> name = isolate->factory()->NumberToString(number);
11937        Handle<Object> args[1] = { name };
11938        Handle<Object> error =
11939            isolate->factory()->NewTypeError("object_not_extensible",
11940                                             HandleVector(args, 1));
11941        return isolate->Throw(*error);
11942      }
11943    }
11944    FixedArrayBase* new_dictionary;
11945    PropertyDetails details = PropertyDetails(attributes, NORMAL, 0);
11946    MaybeObject* maybe = dictionary->AddNumberEntry(index, *value, details);
11947    if (!maybe->To(&new_dictionary)) return maybe;
11948    if (*dictionary != SeededNumberDictionary::cast(new_dictionary)) {
11949      if (is_arguments) {
11950        elements->set(1, new_dictionary);
11951      } else {
11952        self->set_elements(new_dictionary);
11953      }
11954      dictionary =
11955          handle(SeededNumberDictionary::cast(new_dictionary), isolate);
11956    }
11957  }
11958
11959  // Update the array length if this JSObject is an array.
11960  if (self->IsJSArray()) {
11961    MaybeObject* result =
11962        JSArray::cast(*self)->JSArrayUpdateLengthFromIndex(index, *value);
11963    if (result->IsFailure()) return result;
11964  }
11965
11966  // Attempt to put this object back in fast case.
11967  if (self->ShouldConvertToFastElements()) {
11968    uint32_t new_length = 0;
11969    if (self->IsJSArray()) {
11970      CHECK(JSArray::cast(*self)->length()->ToArrayIndex(&new_length));
11971    } else {
11972      new_length = dictionary->max_number_key() + 1;
11973    }
11974    SetFastElementsCapacitySmiMode smi_mode = FLAG_smi_only_arrays
11975        ? kAllowSmiElements
11976        : kDontAllowSmiElements;
11977    bool has_smi_only_elements = false;
11978    bool should_convert_to_fast_double_elements =
11979        self->ShouldConvertToFastDoubleElements(&has_smi_only_elements);
11980    if (has_smi_only_elements) {
11981      smi_mode = kForceSmiElements;
11982    }
11983    MaybeObject* result = should_convert_to_fast_double_elements
11984        ? self->SetFastDoubleElementsCapacityAndLength(new_length, new_length)
11985        : self->SetFastElementsCapacityAndLength(
11986            new_length, new_length, smi_mode);
11987    self->ValidateElements();
11988    if (result->IsFailure()) return result;
11989#ifdef DEBUG
11990    if (FLAG_trace_normalization) {
11991      PrintF("Object elements are fast case again:\n");
11992      Print();
11993    }
11994#endif
11995  }
11996  return *value;
11997}
11998
11999
12000MUST_USE_RESULT MaybeObject* JSObject::SetFastDoubleElement(
12001    uint32_t index,
12002    Object* value,
12003    StrictModeFlag strict_mode,
12004    bool check_prototype) {
12005  ASSERT(HasFastDoubleElements());
12006
12007  FixedArrayBase* base_elms = FixedArrayBase::cast(elements());
12008  uint32_t elms_length = static_cast<uint32_t>(base_elms->length());
12009
12010  // If storing to an element that isn't in the array, pass the store request
12011  // up the prototype chain before storing in the receiver's elements.
12012  if (check_prototype &&
12013      (index >= elms_length ||
12014       FixedDoubleArray::cast(base_elms)->is_the_hole(index))) {
12015    bool found;
12016    MaybeObject* result = SetElementWithCallbackSetterInPrototypes(index,
12017                                                                   value,
12018                                                                   &found,
12019                                                                   strict_mode);
12020    if (found) return result;
12021  }
12022
12023  // If the value object is not a heap number, switch to fast elements and try
12024  // again.
12025  bool value_is_smi = value->IsSmi();
12026  bool introduces_holes = true;
12027  uint32_t length = elms_length;
12028  if (IsJSArray()) {
12029    CHECK(JSArray::cast(this)->length()->ToArrayIndex(&length));
12030    introduces_holes = index > length;
12031  } else {
12032    introduces_holes = index >= elms_length;
12033  }
12034
12035  if (!value->IsNumber()) {
12036    MaybeObject* maybe_obj = SetFastElementsCapacityAndLength(
12037        elms_length,
12038        length,
12039        kDontAllowSmiElements);
12040    if (maybe_obj->IsFailure()) return maybe_obj;
12041    maybe_obj = SetFastElement(index, value, strict_mode, check_prototype);
12042    if (maybe_obj->IsFailure()) return maybe_obj;
12043    ValidateElements();
12044    return maybe_obj;
12045  }
12046
12047  double double_value = value_is_smi
12048      ? static_cast<double>(Smi::cast(value)->value())
12049      : HeapNumber::cast(value)->value();
12050
12051  // If the array is growing, and it's not growth by a single element at the
12052  // end, make sure that the ElementsKind is HOLEY.
12053  ElementsKind elements_kind = GetElementsKind();
12054  if (introduces_holes && !IsFastHoleyElementsKind(elements_kind)) {
12055    ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind);
12056    MaybeObject* maybe = TransitionElementsKind(transitioned_kind);
12057    if (maybe->IsFailure()) return maybe;
12058  }
12059
12060  // Check whether there is extra space in the fixed array.
12061  if (index < elms_length) {
12062    FixedDoubleArray* elms = FixedDoubleArray::cast(elements());
12063    elms->set(index, double_value);
12064    if (IsJSArray()) {
12065      // Update the length of the array if needed.
12066      uint32_t array_length = 0;
12067      CHECK(JSArray::cast(this)->length()->ToArrayIndex(&array_length));
12068      if (index >= array_length) {
12069        JSArray::cast(this)->set_length(Smi::FromInt(index + 1));
12070      }
12071    }
12072    return value;
12073  }
12074
12075  // Allow gap in fast case.
12076  if ((index - elms_length) < kMaxGap) {
12077    // Try allocating extra space.
12078    int new_capacity = NewElementsCapacity(index+1);
12079    if (!ShouldConvertToSlowElements(new_capacity)) {
12080      ASSERT(static_cast<uint32_t>(new_capacity) > index);
12081      MaybeObject* maybe_obj =
12082          SetFastDoubleElementsCapacityAndLength(new_capacity, index + 1);
12083      if (maybe_obj->IsFailure()) return maybe_obj;
12084      FixedDoubleArray::cast(elements())->set(index, double_value);
12085      ValidateElements();
12086      return value;
12087    }
12088  }
12089
12090  // Otherwise default to slow case.
12091  ASSERT(HasFastDoubleElements());
12092  ASSERT(map()->has_fast_double_elements());
12093  ASSERT(elements()->IsFixedDoubleArray());
12094  Object* obj;
12095  { MaybeObject* maybe_obj = NormalizeElements();
12096    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
12097  }
12098  ASSERT(HasDictionaryElements());
12099  return SetElement(index, value, NONE, strict_mode, check_prototype);
12100}
12101
12102
12103MaybeObject* JSReceiver::SetElement(uint32_t index,
12104                                    Object* value,
12105                                    PropertyAttributes attributes,
12106                                    StrictModeFlag strict_mode,
12107                                    bool check_proto) {
12108  if (IsJSProxy()) {
12109    return JSProxy::cast(this)->SetElementWithHandler(
12110        this, index, value, strict_mode);
12111  } else {
12112    return JSObject::cast(this)->SetElement(
12113        index, value, attributes, strict_mode, check_proto);
12114  }
12115}
12116
12117
12118Handle<Object> JSObject::SetOwnElement(Handle<JSObject> object,
12119                                       uint32_t index,
12120                                       Handle<Object> value,
12121                                       StrictModeFlag strict_mode) {
12122  ASSERT(!object->HasExternalArrayElements());
12123  CALL_HEAP_FUNCTION(
12124      object->GetIsolate(),
12125      object->SetElement(index, *value, NONE, strict_mode, false),
12126      Object);
12127}
12128
12129
12130Handle<Object> JSObject::SetElement(Handle<JSObject> object,
12131                                    uint32_t index,
12132                                    Handle<Object> value,
12133                                    PropertyAttributes attr,
12134                                    StrictModeFlag strict_mode,
12135                                    SetPropertyMode set_mode) {
12136  if (object->HasExternalArrayElements()) {
12137    if (!value->IsNumber() && !value->IsUndefined()) {
12138      bool has_exception;
12139      Handle<Object> number = Execution::ToNumber(value, &has_exception);
12140      if (has_exception) return Handle<Object>();
12141      value = number;
12142    }
12143  }
12144  CALL_HEAP_FUNCTION(
12145      object->GetIsolate(),
12146      object->SetElement(index, *value, attr, strict_mode, true, set_mode),
12147      Object);
12148}
12149
12150
12151MaybeObject* JSObject::SetElement(uint32_t index,
12152                                  Object* value_raw,
12153                                  PropertyAttributes attributes,
12154                                  StrictModeFlag strict_mode,
12155                                  bool check_prototype,
12156                                  SetPropertyMode set_mode) {
12157  Isolate* isolate = GetIsolate();
12158
12159  // Check access rights if needed.
12160  if (IsAccessCheckNeeded()) {
12161    if (!isolate->MayIndexedAccess(this, index, v8::ACCESS_SET)) {
12162      isolate->ReportFailedAccessCheck(this, v8::ACCESS_SET);
12163      RETURN_IF_SCHEDULED_EXCEPTION(isolate);
12164      return value_raw;
12165    }
12166  }
12167
12168  if (IsJSGlobalProxy()) {
12169    Object* proto = GetPrototype();
12170    if (proto->IsNull()) return value_raw;
12171    ASSERT(proto->IsJSGlobalObject());
12172    return JSObject::cast(proto)->SetElement(index,
12173                                             value_raw,
12174                                             attributes,
12175                                             strict_mode,
12176                                             check_prototype,
12177                                             set_mode);
12178  }
12179
12180  // Don't allow element properties to be redefined for external arrays.
12181  if (HasExternalArrayElements() && set_mode == DEFINE_PROPERTY) {
12182    Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12183    Handle<Object> args[] = { handle(this, isolate), number };
12184    Handle<Object> error = isolate->factory()->NewTypeError(
12185        "redef_external_array_element", HandleVector(args, ARRAY_SIZE(args)));
12186    return isolate->Throw(*error);
12187  }
12188
12189  // Normalize the elements to enable attributes on the property.
12190  if ((attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) != 0) {
12191    SeededNumberDictionary* dictionary;
12192    MaybeObject* maybe_object = NormalizeElements();
12193    if (!maybe_object->To(&dictionary)) return maybe_object;
12194    // Make sure that we never go back to fast case.
12195    dictionary->set_requires_slow_elements();
12196  }
12197
12198  if (!(FLAG_harmony_observation && map()->is_observed())) {
12199    return HasIndexedInterceptor()
12200      ? SetElementWithInterceptor(
12201          index, value_raw, attributes, strict_mode, check_prototype, set_mode)
12202      : SetElementWithoutInterceptor(
12203          index, value_raw, attributes, strict_mode, check_prototype, set_mode);
12204  }
12205
12206  // From here on, everything has to be handlified.
12207  Handle<JSObject> self(this);
12208  Handle<Object> value(value_raw, isolate);
12209  PropertyAttributes old_attributes = self->GetLocalElementAttribute(index);
12210  Handle<Object> old_value = isolate->factory()->the_hole_value();
12211  Handle<Object> old_length_handle;
12212  Handle<Object> new_length_handle;
12213
12214  if (old_attributes != ABSENT) {
12215    if (self->GetLocalElementAccessorPair(index) == NULL)
12216      old_value = Object::GetElement(self, index);
12217  } else if (self->IsJSArray()) {
12218    // Store old array length in case adding an element grows the array.
12219    old_length_handle = handle(Handle<JSArray>::cast(self)->length(), isolate);
12220  }
12221
12222  // Check for lookup interceptor
12223  MaybeObject* result = self->HasIndexedInterceptor()
12224    ? self->SetElementWithInterceptor(
12225        index, *value, attributes, strict_mode, check_prototype, set_mode)
12226    : self->SetElementWithoutInterceptor(
12227        index, *value, attributes, strict_mode, check_prototype, set_mode);
12228
12229  Handle<Object> hresult;
12230  if (!result->ToHandle(&hresult, isolate)) return result;
12231
12232  Handle<String> name = isolate->factory()->Uint32ToString(index);
12233  PropertyAttributes new_attributes = self->GetLocalElementAttribute(index);
12234  if (old_attributes == ABSENT) {
12235    if (self->IsJSArray() &&
12236        !old_length_handle->SameValue(Handle<JSArray>::cast(self)->length())) {
12237      new_length_handle = handle(Handle<JSArray>::cast(self)->length(),
12238                                 isolate);
12239      uint32_t old_length = 0;
12240      uint32_t new_length = 0;
12241      CHECK(old_length_handle->ToArrayIndex(&old_length));
12242      CHECK(new_length_handle->ToArrayIndex(&new_length));
12243
12244      BeginPerformSplice(Handle<JSArray>::cast(self));
12245      EnqueueChangeRecord(self, "new", name, old_value);
12246      EnqueueChangeRecord(self, "updated", isolate->factory()->length_string(),
12247                          old_length_handle);
12248      EndPerformSplice(Handle<JSArray>::cast(self));
12249      Handle<JSArray> deleted = isolate->factory()->NewJSArray(0);
12250      EnqueueSpliceRecord(Handle<JSArray>::cast(self), old_length, deleted,
12251                          new_length - old_length);
12252    } else {
12253      EnqueueChangeRecord(self, "new", name, old_value);
12254    }
12255  } else if (old_value->IsTheHole()) {
12256    EnqueueChangeRecord(self, "reconfigured", name, old_value);
12257  } else {
12258    Handle<Object> new_value = Object::GetElement(self, index);
12259    bool value_changed = !old_value->SameValue(*new_value);
12260    if (old_attributes != new_attributes) {
12261      if (!value_changed) old_value = isolate->factory()->the_hole_value();
12262      EnqueueChangeRecord(self, "reconfigured", name, old_value);
12263    } else if (value_changed) {
12264      EnqueueChangeRecord(self, "updated", name, old_value);
12265    }
12266  }
12267
12268  return *hresult;
12269}
12270
12271
12272MaybeObject* JSObject::SetElementWithoutInterceptor(uint32_t index,
12273                                                    Object* value,
12274                                                    PropertyAttributes attr,
12275                                                    StrictModeFlag strict_mode,
12276                                                    bool check_prototype,
12277                                                    SetPropertyMode set_mode) {
12278  ASSERT(HasDictionaryElements() ||
12279         HasDictionaryArgumentsElements() ||
12280         (attr & (DONT_DELETE | DONT_ENUM | READ_ONLY)) == 0);
12281  Isolate* isolate = GetIsolate();
12282  if (FLAG_trace_external_array_abuse &&
12283      IsExternalArrayElementsKind(GetElementsKind())) {
12284    CheckArrayAbuse(this, "external elements write", index);
12285  }
12286  if (FLAG_trace_js_array_abuse &&
12287      !IsExternalArrayElementsKind(GetElementsKind())) {
12288    if (IsJSArray()) {
12289      CheckArrayAbuse(this, "elements write", index, true);
12290    }
12291  }
12292  switch (GetElementsKind()) {
12293    case FAST_SMI_ELEMENTS:
12294    case FAST_ELEMENTS:
12295    case FAST_HOLEY_SMI_ELEMENTS:
12296    case FAST_HOLEY_ELEMENTS:
12297      return SetFastElement(index, value, strict_mode, check_prototype);
12298    case FAST_DOUBLE_ELEMENTS:
12299    case FAST_HOLEY_DOUBLE_ELEMENTS:
12300      return SetFastDoubleElement(index, value, strict_mode, check_prototype);
12301    case EXTERNAL_PIXEL_ELEMENTS: {
12302      ExternalPixelArray* pixels = ExternalPixelArray::cast(elements());
12303      return pixels->SetValue(index, value);
12304    }
12305    case EXTERNAL_BYTE_ELEMENTS: {
12306      ExternalByteArray* array = ExternalByteArray::cast(elements());
12307      return array->SetValue(index, value);
12308    }
12309    case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: {
12310      ExternalUnsignedByteArray* array =
12311          ExternalUnsignedByteArray::cast(elements());
12312      return array->SetValue(index, value);
12313    }
12314    case EXTERNAL_SHORT_ELEMENTS: {
12315      ExternalShortArray* array = ExternalShortArray::cast(elements());
12316      return array->SetValue(index, value);
12317    }
12318    case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: {
12319      ExternalUnsignedShortArray* array =
12320          ExternalUnsignedShortArray::cast(elements());
12321      return array->SetValue(index, value);
12322    }
12323    case EXTERNAL_INT_ELEMENTS: {
12324      ExternalIntArray* array = ExternalIntArray::cast(elements());
12325      return array->SetValue(index, value);
12326    }
12327    case EXTERNAL_UNSIGNED_INT_ELEMENTS: {
12328      ExternalUnsignedIntArray* array =
12329          ExternalUnsignedIntArray::cast(elements());
12330      return array->SetValue(index, value);
12331    }
12332    case EXTERNAL_FLOAT_ELEMENTS: {
12333      ExternalFloatArray* array = ExternalFloatArray::cast(elements());
12334      return array->SetValue(index, value);
12335    }
12336    case EXTERNAL_DOUBLE_ELEMENTS: {
12337      ExternalDoubleArray* array = ExternalDoubleArray::cast(elements());
12338      return array->SetValue(index, value);
12339    }
12340    case DICTIONARY_ELEMENTS:
12341      return SetDictionaryElement(index, value, attr, strict_mode,
12342                                  check_prototype, set_mode);
12343    case NON_STRICT_ARGUMENTS_ELEMENTS: {
12344      FixedArray* parameter_map = FixedArray::cast(elements());
12345      uint32_t length = parameter_map->length();
12346      Object* probe =
12347          (index < length - 2) ? parameter_map->get(index + 2) : NULL;
12348      if (probe != NULL && !probe->IsTheHole()) {
12349        Context* context = Context::cast(parameter_map->get(0));
12350        int context_index = Smi::cast(probe)->value();
12351        ASSERT(!context->get(context_index)->IsTheHole());
12352        context->set(context_index, value);
12353        // Redefining attributes of an aliased element destroys fast aliasing.
12354        if (set_mode == SET_PROPERTY || attr == NONE) return value;
12355        parameter_map->set_the_hole(index + 2);
12356        // For elements that are still writable we re-establish slow aliasing.
12357        if ((attr & READ_ONLY) == 0) {
12358          MaybeObject* maybe_entry =
12359              isolate->heap()->AllocateAliasedArgumentsEntry(context_index);
12360          if (!maybe_entry->ToObject(&value)) return maybe_entry;
12361        }
12362      }
12363      FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
12364      if (arguments->IsDictionary()) {
12365        return SetDictionaryElement(index, value, attr, strict_mode,
12366                                    check_prototype, set_mode);
12367      } else {
12368        return SetFastElement(index, value, strict_mode, check_prototype);
12369      }
12370    }
12371  }
12372  // All possible cases have been handled above. Add a return to avoid the
12373  // complaints from the compiler.
12374  UNREACHABLE();
12375  return isolate->heap()->null_value();
12376}
12377
12378
12379Handle<Object> JSObject::TransitionElementsKind(Handle<JSObject> object,
12380                                                ElementsKind to_kind) {
12381  CALL_HEAP_FUNCTION(object->GetIsolate(),
12382                     object->TransitionElementsKind(to_kind),
12383                     Object);
12384}
12385
12386
12387MaybeObject* JSObject::UpdateAllocationSite(ElementsKind to_kind) {
12388  if (!FLAG_track_allocation_sites || !IsJSArray()) {
12389    return this;
12390  }
12391
12392  AllocationMemento* memento = AllocationMemento::FindForJSObject(this);
12393  if (memento == NULL || !memento->IsValid()) {
12394    return this;
12395  }
12396
12397  // Walk through to the Allocation Site
12398  AllocationSite* site = memento->GetAllocationSite();
12399  if (site->IsLiteralSite()) {
12400    JSArray* transition_info = JSArray::cast(site->transition_info());
12401    ElementsKind kind = transition_info->GetElementsKind();
12402    // if kind is holey ensure that to_kind is as well.
12403    if (IsHoleyElementsKind(kind)) {
12404      to_kind = GetHoleyElementsKind(to_kind);
12405    }
12406    if (AllocationSite::GetMode(kind, to_kind) == TRACK_ALLOCATION_SITE) {
12407      // If the array is huge, it's not likely to be defined in a local
12408      // function, so we shouldn't make new instances of it very often.
12409      uint32_t length = 0;
12410      CHECK(transition_info->length()->ToArrayIndex(&length));
12411      if (length <= AllocationSite::kMaximumArrayBytesToPretransition) {
12412        if (FLAG_trace_track_allocation_sites) {
12413          PrintF(
12414              "AllocationSite: JSArray %p boilerplate updated %s->%s\n",
12415              reinterpret_cast<void*>(this),
12416              ElementsKindToString(kind),
12417              ElementsKindToString(to_kind));
12418        }
12419        return transition_info->TransitionElementsKind(to_kind);
12420      }
12421    }
12422  } else {
12423    ElementsKind kind = site->GetElementsKind();
12424    // if kind is holey ensure that to_kind is as well.
12425    if (IsHoleyElementsKind(kind)) {
12426      to_kind = GetHoleyElementsKind(to_kind);
12427    }
12428    if (AllocationSite::GetMode(kind, to_kind) == TRACK_ALLOCATION_SITE) {
12429      if (FLAG_trace_track_allocation_sites) {
12430        PrintF("AllocationSite: JSArray %p site updated %s->%s\n",
12431               reinterpret_cast<void*>(this),
12432               ElementsKindToString(kind),
12433               ElementsKindToString(to_kind));
12434      }
12435      site->set_transition_info(Smi::FromInt(to_kind));
12436    }
12437  }
12438  return this;
12439}
12440
12441
12442MaybeObject* JSObject::TransitionElementsKind(ElementsKind to_kind) {
12443  ASSERT(!map()->is_observed());
12444  ElementsKind from_kind = map()->elements_kind();
12445
12446  if (IsFastHoleyElementsKind(from_kind)) {
12447    to_kind = GetHoleyElementsKind(to_kind);
12448  }
12449
12450  if (from_kind == to_kind) return this;
12451
12452  MaybeObject* maybe_failure = UpdateAllocationSite(to_kind);
12453  if (maybe_failure->IsFailure()) return maybe_failure;
12454
12455  Isolate* isolate = GetIsolate();
12456  if (elements() == isolate->heap()->empty_fixed_array() ||
12457      (IsFastSmiOrObjectElementsKind(from_kind) &&
12458       IsFastSmiOrObjectElementsKind(to_kind)) ||
12459      (from_kind == FAST_DOUBLE_ELEMENTS &&
12460       to_kind == FAST_HOLEY_DOUBLE_ELEMENTS)) {
12461    ASSERT(from_kind != TERMINAL_FAST_ELEMENTS_KIND);
12462    // No change is needed to the elements() buffer, the transition
12463    // only requires a map change.
12464    MaybeObject* maybe_new_map = GetElementsTransitionMap(isolate, to_kind);
12465    Map* new_map;
12466    if (!maybe_new_map->To(&new_map)) return maybe_new_map;
12467    set_map(new_map);
12468    if (FLAG_trace_elements_transitions) {
12469      FixedArrayBase* elms = FixedArrayBase::cast(elements());
12470      PrintElementsTransition(stdout, from_kind, elms, to_kind, elms);
12471    }
12472    return this;
12473  }
12474
12475  FixedArrayBase* elms = FixedArrayBase::cast(elements());
12476  uint32_t capacity = static_cast<uint32_t>(elms->length());
12477  uint32_t length = capacity;
12478
12479  if (IsJSArray()) {
12480    Object* raw_length = JSArray::cast(this)->length();
12481    if (raw_length->IsUndefined()) {
12482      // If length is undefined, then JSArray is being initialized and has no
12483      // elements, assume a length of zero.
12484      length = 0;
12485    } else {
12486      CHECK(JSArray::cast(this)->length()->ToArrayIndex(&length));
12487    }
12488  }
12489
12490  if (IsFastSmiElementsKind(from_kind) &&
12491      IsFastDoubleElementsKind(to_kind)) {
12492    MaybeObject* maybe_result =
12493        SetFastDoubleElementsCapacityAndLength(capacity, length);
12494    if (maybe_result->IsFailure()) return maybe_result;
12495    ValidateElements();
12496    return this;
12497  }
12498
12499  if (IsFastDoubleElementsKind(from_kind) &&
12500      IsFastObjectElementsKind(to_kind)) {
12501    MaybeObject* maybe_result = SetFastElementsCapacityAndLength(
12502        capacity, length, kDontAllowSmiElements);
12503    if (maybe_result->IsFailure()) return maybe_result;
12504    ValidateElements();
12505    return this;
12506  }
12507
12508  // This method should never be called for any other case than the ones
12509  // handled above.
12510  UNREACHABLE();
12511  return GetIsolate()->heap()->null_value();
12512}
12513
12514
12515// static
12516bool Map::IsValidElementsTransition(ElementsKind from_kind,
12517                                    ElementsKind to_kind) {
12518  // Transitions can't go backwards.
12519  if (!IsMoreGeneralElementsKindTransition(from_kind, to_kind)) {
12520    return false;
12521  }
12522
12523  // Transitions from HOLEY -> PACKED are not allowed.
12524  return !IsFastHoleyElementsKind(from_kind) ||
12525      IsFastHoleyElementsKind(to_kind);
12526}
12527
12528
12529MaybeObject* JSArray::JSArrayUpdateLengthFromIndex(uint32_t index,
12530                                                   Object* value) {
12531  uint32_t old_len = 0;
12532  CHECK(length()->ToArrayIndex(&old_len));
12533  // Check to see if we need to update the length. For now, we make
12534  // sure that the length stays within 32-bits (unsigned).
12535  if (index >= old_len && index != 0xffffffff) {
12536    Object* len;
12537    { MaybeObject* maybe_len =
12538          GetHeap()->NumberFromDouble(static_cast<double>(index) + 1);
12539      if (!maybe_len->ToObject(&len)) return maybe_len;
12540    }
12541    set_length(len);
12542  }
12543  return value;
12544}
12545
12546
12547MaybeObject* JSObject::GetElementWithInterceptor(Object* receiver,
12548                                                 uint32_t index) {
12549  Isolate* isolate = GetIsolate();
12550  // Make sure that the top context does not change when doing
12551  // callbacks or interceptor calls.
12552  AssertNoContextChange ncc;
12553  HandleScope scope(isolate);
12554  Handle<InterceptorInfo> interceptor(GetIndexedInterceptor(), isolate);
12555  Handle<Object> this_handle(receiver, isolate);
12556  Handle<JSObject> holder_handle(this, isolate);
12557  if (!interceptor->getter()->IsUndefined()) {
12558    v8::IndexedPropertyGetter getter =
12559        v8::ToCData<v8::IndexedPropertyGetter>(interceptor->getter());
12560    LOG(isolate,
12561        ApiIndexedPropertyAccess("interceptor-indexed-get", this, index));
12562    PropertyCallbackArguments
12563        args(isolate, interceptor->data(), receiver, this);
12564    v8::Handle<v8::Value> result = args.Call(getter, index);
12565    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
12566    if (!result.IsEmpty()) {
12567      Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
12568      result_internal->VerifyApiCallResultType();
12569      return *result_internal;
12570    }
12571  }
12572
12573  Heap* heap = holder_handle->GetHeap();
12574  ElementsAccessor* handler = holder_handle->GetElementsAccessor();
12575  MaybeObject* raw_result = handler->Get(*this_handle,
12576                                         *holder_handle,
12577                                         index);
12578  if (raw_result != heap->the_hole_value()) return raw_result;
12579
12580  RETURN_IF_SCHEDULED_EXCEPTION(isolate);
12581
12582  Object* pt = holder_handle->GetPrototype();
12583  if (pt == heap->null_value()) return heap->undefined_value();
12584  return pt->GetElementWithReceiver(*this_handle, index);
12585}
12586
12587
12588bool JSObject::HasDenseElements() {
12589  int capacity = 0;
12590  int used = 0;
12591  GetElementsCapacityAndUsage(&capacity, &used);
12592  return (capacity == 0) || (used > (capacity / 2));
12593}
12594
12595
12596void JSObject::GetElementsCapacityAndUsage(int* capacity, int* used) {
12597  *capacity = 0;
12598  *used = 0;
12599
12600  FixedArrayBase* backing_store_base = FixedArrayBase::cast(elements());
12601  FixedArray* backing_store = NULL;
12602  switch (GetElementsKind()) {
12603    case NON_STRICT_ARGUMENTS_ELEMENTS:
12604      backing_store_base =
12605          FixedArray::cast(FixedArray::cast(backing_store_base)->get(1));
12606      backing_store = FixedArray::cast(backing_store_base);
12607      if (backing_store->IsDictionary()) {
12608        SeededNumberDictionary* dictionary =
12609            SeededNumberDictionary::cast(backing_store);
12610        *capacity = dictionary->Capacity();
12611        *used = dictionary->NumberOfElements();
12612        break;
12613      }
12614      // Fall through.
12615    case FAST_SMI_ELEMENTS:
12616    case FAST_ELEMENTS:
12617      if (IsJSArray()) {
12618        *capacity = backing_store_base->length();
12619        *used = Smi::cast(JSArray::cast(this)->length())->value();
12620        break;
12621      }
12622      // Fall through if packing is not guaranteed.
12623    case FAST_HOLEY_SMI_ELEMENTS:
12624    case FAST_HOLEY_ELEMENTS:
12625      backing_store = FixedArray::cast(backing_store_base);
12626      *capacity = backing_store->length();
12627      for (int i = 0; i < *capacity; ++i) {
12628        if (!backing_store->get(i)->IsTheHole()) ++(*used);
12629      }
12630      break;
12631    case DICTIONARY_ELEMENTS: {
12632      SeededNumberDictionary* dictionary =
12633          SeededNumberDictionary::cast(FixedArray::cast(elements()));
12634      *capacity = dictionary->Capacity();
12635      *used = dictionary->NumberOfElements();
12636      break;
12637    }
12638    case FAST_DOUBLE_ELEMENTS:
12639      if (IsJSArray()) {
12640        *capacity = backing_store_base->length();
12641        *used = Smi::cast(JSArray::cast(this)->length())->value();
12642        break;
12643      }
12644      // Fall through if packing is not guaranteed.
12645    case FAST_HOLEY_DOUBLE_ELEMENTS: {
12646      FixedDoubleArray* elms = FixedDoubleArray::cast(elements());
12647      *capacity = elms->length();
12648      for (int i = 0; i < *capacity; i++) {
12649        if (!elms->is_the_hole(i)) ++(*used);
12650      }
12651      break;
12652    }
12653    case EXTERNAL_BYTE_ELEMENTS:
12654    case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
12655    case EXTERNAL_SHORT_ELEMENTS:
12656    case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
12657    case EXTERNAL_INT_ELEMENTS:
12658    case EXTERNAL_UNSIGNED_INT_ELEMENTS:
12659    case EXTERNAL_FLOAT_ELEMENTS:
12660    case EXTERNAL_DOUBLE_ELEMENTS:
12661    case EXTERNAL_PIXEL_ELEMENTS:
12662      // External arrays are considered 100% used.
12663      ExternalArray* external_array = ExternalArray::cast(elements());
12664      *capacity = external_array->length();
12665      *used = external_array->length();
12666      break;
12667  }
12668}
12669
12670
12671bool JSObject::ShouldConvertToSlowElements(int new_capacity) {
12672  STATIC_ASSERT(kMaxUncheckedOldFastElementsLength <=
12673                kMaxUncheckedFastElementsLength);
12674  if (new_capacity <= kMaxUncheckedOldFastElementsLength ||
12675      (new_capacity <= kMaxUncheckedFastElementsLength &&
12676       GetHeap()->InNewSpace(this))) {
12677    return false;
12678  }
12679  // If the fast-case backing storage takes up roughly three times as
12680  // much space (in machine words) as a dictionary backing storage
12681  // would, the object should have slow elements.
12682  int old_capacity = 0;
12683  int used_elements = 0;
12684  GetElementsCapacityAndUsage(&old_capacity, &used_elements);
12685  int dictionary_size = SeededNumberDictionary::ComputeCapacity(used_elements) *
12686      SeededNumberDictionary::kEntrySize;
12687  return 3 * dictionary_size <= new_capacity;
12688}
12689
12690
12691bool JSObject::ShouldConvertToFastElements() {
12692  ASSERT(HasDictionaryElements() || HasDictionaryArgumentsElements());
12693  // If the elements are sparse, we should not go back to fast case.
12694  if (!HasDenseElements()) return false;
12695  // An object requiring access checks is never allowed to have fast
12696  // elements.  If it had fast elements we would skip security checks.
12697  if (IsAccessCheckNeeded()) return false;
12698  // Observed objects may not go to fast mode because they rely on map checks,
12699  // and for fast element accesses we sometimes check element kinds only.
12700  if (FLAG_harmony_observation && map()->is_observed()) return false;
12701
12702  FixedArray* elements = FixedArray::cast(this->elements());
12703  SeededNumberDictionary* dictionary = NULL;
12704  if (elements->map() == GetHeap()->non_strict_arguments_elements_map()) {
12705    dictionary = SeededNumberDictionary::cast(elements->get(1));
12706  } else {
12707    dictionary = SeededNumberDictionary::cast(elements);
12708  }
12709  // If an element has been added at a very high index in the elements
12710  // dictionary, we cannot go back to fast case.
12711  if (dictionary->requires_slow_elements()) return false;
12712  // If the dictionary backing storage takes up roughly half as much
12713  // space (in machine words) as a fast-case backing storage would,
12714  // the object should have fast elements.
12715  uint32_t array_size = 0;
12716  if (IsJSArray()) {
12717    CHECK(JSArray::cast(this)->length()->ToArrayIndex(&array_size));
12718  } else {
12719    array_size = dictionary->max_number_key();
12720  }
12721  uint32_t dictionary_size = static_cast<uint32_t>(dictionary->Capacity()) *
12722      SeededNumberDictionary::kEntrySize;
12723  return 2 * dictionary_size >= array_size;
12724}
12725
12726
12727bool JSObject::ShouldConvertToFastDoubleElements(
12728    bool* has_smi_only_elements) {
12729  *has_smi_only_elements = false;
12730  if (FLAG_unbox_double_arrays) {
12731    ASSERT(HasDictionaryElements());
12732    SeededNumberDictionary* dictionary =
12733        SeededNumberDictionary::cast(elements());
12734    bool found_double = false;
12735    for (int i = 0; i < dictionary->Capacity(); i++) {
12736      Object* key = dictionary->KeyAt(i);
12737      if (key->IsNumber()) {
12738        Object* value = dictionary->ValueAt(i);
12739        if (!value->IsNumber()) return false;
12740        if (!value->IsSmi()) {
12741          found_double = true;
12742        }
12743      }
12744    }
12745    *has_smi_only_elements = !found_double;
12746    return found_double;
12747  } else {
12748    return false;
12749  }
12750}
12751
12752
12753// Certain compilers request function template instantiation when they
12754// see the definition of the other template functions in the
12755// class. This requires us to have the template functions put
12756// together, so even though this function belongs in objects-debug.cc,
12757// we keep it here instead to satisfy certain compilers.
12758#ifdef OBJECT_PRINT
12759template<typename Shape, typename Key>
12760void Dictionary<Shape, Key>::Print(FILE* out) {
12761  int capacity = HashTable<Shape, Key>::Capacity();
12762  for (int i = 0; i < capacity; i++) {
12763    Object* k = HashTable<Shape, Key>::KeyAt(i);
12764    if (HashTable<Shape, Key>::IsKey(k)) {
12765      PrintF(out, " ");
12766      if (k->IsString()) {
12767        String::cast(k)->StringPrint(out);
12768      } else {
12769        k->ShortPrint(out);
12770      }
12771      PrintF(out, ": ");
12772      ValueAt(i)->ShortPrint(out);
12773      PrintF(out, "\n");
12774    }
12775  }
12776}
12777#endif
12778
12779
12780template<typename Shape, typename Key>
12781void Dictionary<Shape, Key>::CopyValuesTo(FixedArray* elements) {
12782  int pos = 0;
12783  int capacity = HashTable<Shape, Key>::Capacity();
12784  DisallowHeapAllocation no_gc;
12785  WriteBarrierMode mode = elements->GetWriteBarrierMode(no_gc);
12786  for (int i = 0; i < capacity; i++) {
12787    Object* k =  Dictionary<Shape, Key>::KeyAt(i);
12788    if (Dictionary<Shape, Key>::IsKey(k)) {
12789      elements->set(pos++, ValueAt(i), mode);
12790    }
12791  }
12792  ASSERT(pos == elements->length());
12793}
12794
12795
12796InterceptorInfo* JSObject::GetNamedInterceptor() {
12797  ASSERT(map()->has_named_interceptor());
12798  JSFunction* constructor = JSFunction::cast(map()->constructor());
12799  ASSERT(constructor->shared()->IsApiFunction());
12800  Object* result =
12801      constructor->shared()->get_api_func_data()->named_property_handler();
12802  return InterceptorInfo::cast(result);
12803}
12804
12805
12806InterceptorInfo* JSObject::GetIndexedInterceptor() {
12807  ASSERT(map()->has_indexed_interceptor());
12808  JSFunction* constructor = JSFunction::cast(map()->constructor());
12809  ASSERT(constructor->shared()->IsApiFunction());
12810  Object* result =
12811      constructor->shared()->get_api_func_data()->indexed_property_handler();
12812  return InterceptorInfo::cast(result);
12813}
12814
12815
12816MaybeObject* JSObject::GetPropertyPostInterceptor(
12817    Object* receiver,
12818    Name* name,
12819    PropertyAttributes* attributes) {
12820  // Check local property in holder, ignore interceptor.
12821  LookupResult result(GetIsolate());
12822  LocalLookupRealNamedProperty(name, &result);
12823  if (result.IsFound()) {
12824    return GetProperty(receiver, &result, name, attributes);
12825  }
12826  // Continue searching via the prototype chain.
12827  Object* pt = GetPrototype();
12828  *attributes = ABSENT;
12829  if (pt->IsNull()) return GetHeap()->undefined_value();
12830  return pt->GetPropertyWithReceiver(receiver, name, attributes);
12831}
12832
12833
12834MaybeObject* JSObject::GetLocalPropertyPostInterceptor(
12835    Object* receiver,
12836    Name* name,
12837    PropertyAttributes* attributes) {
12838  // Check local property in holder, ignore interceptor.
12839  LookupResult result(GetIsolate());
12840  LocalLookupRealNamedProperty(name, &result);
12841  if (result.IsFound()) {
12842    return GetProperty(receiver, &result, name, attributes);
12843  }
12844  return GetHeap()->undefined_value();
12845}
12846
12847
12848MaybeObject* JSObject::GetPropertyWithInterceptor(
12849    Object* receiver,
12850    Name* name,
12851    PropertyAttributes* attributes) {
12852  // TODO(rossberg): Support symbols in the API.
12853  if (name->IsSymbol()) return GetHeap()->undefined_value();
12854
12855  Isolate* isolate = GetIsolate();
12856  InterceptorInfo* interceptor = GetNamedInterceptor();
12857  HandleScope scope(isolate);
12858  Handle<Object> receiver_handle(receiver, isolate);
12859  Handle<JSObject> holder_handle(this);
12860  Handle<String> name_handle(String::cast(name));
12861
12862  if (!interceptor->getter()->IsUndefined()) {
12863    v8::NamedPropertyGetter getter =
12864        v8::ToCData<v8::NamedPropertyGetter>(interceptor->getter());
12865    LOG(isolate,
12866        ApiNamedPropertyAccess("interceptor-named-get", *holder_handle, name));
12867    PropertyCallbackArguments
12868        args(isolate, interceptor->data(), receiver, this);
12869    v8::Handle<v8::Value> result =
12870        args.Call(getter, v8::Utils::ToLocal(name_handle));
12871    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
12872    if (!result.IsEmpty()) {
12873      *attributes = NONE;
12874      Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
12875      result_internal->VerifyApiCallResultType();
12876      return *result_internal;
12877    }
12878  }
12879
12880  MaybeObject* result = holder_handle->GetPropertyPostInterceptor(
12881      *receiver_handle,
12882      *name_handle,
12883      attributes);
12884  RETURN_IF_SCHEDULED_EXCEPTION(isolate);
12885  return result;
12886}
12887
12888
12889bool JSObject::HasRealNamedProperty(Isolate* isolate, Name* key) {
12890  // Check access rights if needed.
12891  if (IsAccessCheckNeeded()) {
12892    if (!isolate->MayNamedAccess(this, key, v8::ACCESS_HAS)) {
12893      isolate->ReportFailedAccessCheck(this, v8::ACCESS_HAS);
12894      return false;
12895    }
12896  }
12897
12898  LookupResult result(isolate);
12899  LocalLookupRealNamedProperty(key, &result);
12900  return result.IsFound() && !result.IsInterceptor();
12901}
12902
12903
12904bool JSObject::HasRealElementProperty(Isolate* isolate, uint32_t index) {
12905  // Check access rights if needed.
12906  if (IsAccessCheckNeeded()) {
12907    if (!isolate->MayIndexedAccess(this, index, v8::ACCESS_HAS)) {
12908      isolate->ReportFailedAccessCheck(this, v8::ACCESS_HAS);
12909      return false;
12910    }
12911  }
12912
12913  if (IsJSGlobalProxy()) {
12914    Object* proto = GetPrototype();
12915    if (proto->IsNull()) return false;
12916    ASSERT(proto->IsJSGlobalObject());
12917    return JSObject::cast(proto)->HasRealElementProperty(isolate, index);
12918  }
12919
12920  return GetElementAttributeWithoutInterceptor(this, index, false) != ABSENT;
12921}
12922
12923
12924bool JSObject::HasRealNamedCallbackProperty(Isolate* isolate, Name* key) {
12925  // Check access rights if needed.
12926  if (IsAccessCheckNeeded()) {
12927    if (!isolate->MayNamedAccess(this, key, v8::ACCESS_HAS)) {
12928      isolate->ReportFailedAccessCheck(this, v8::ACCESS_HAS);
12929      return false;
12930    }
12931  }
12932
12933  LookupResult result(isolate);
12934  LocalLookupRealNamedProperty(key, &result);
12935  return result.IsPropertyCallbacks();
12936}
12937
12938
12939int JSObject::NumberOfLocalProperties(PropertyAttributes filter) {
12940  if (HasFastProperties()) {
12941    Map* map = this->map();
12942    if (filter == NONE) return map->NumberOfOwnDescriptors();
12943    if (filter & DONT_ENUM) {
12944      int result = map->EnumLength();
12945      if (result != Map::kInvalidEnumCache) return result;
12946    }
12947    return map->NumberOfDescribedProperties(OWN_DESCRIPTORS, filter);
12948  }
12949  return property_dictionary()->NumberOfElementsFilterAttributes(filter);
12950}
12951
12952
12953void FixedArray::SwapPairs(FixedArray* numbers, int i, int j) {
12954  Object* temp = get(i);
12955  set(i, get(j));
12956  set(j, temp);
12957  if (this != numbers) {
12958    temp = numbers->get(i);
12959    numbers->set(i, Smi::cast(numbers->get(j)));
12960    numbers->set(j, Smi::cast(temp));
12961  }
12962}
12963
12964
12965static void InsertionSortPairs(FixedArray* content,
12966                               FixedArray* numbers,
12967                               int len) {
12968  for (int i = 1; i < len; i++) {
12969    int j = i;
12970    while (j > 0 &&
12971           (NumberToUint32(numbers->get(j - 1)) >
12972            NumberToUint32(numbers->get(j)))) {
12973      content->SwapPairs(numbers, j - 1, j);
12974      j--;
12975    }
12976  }
12977}
12978
12979
12980void HeapSortPairs(FixedArray* content, FixedArray* numbers, int len) {
12981  // In-place heap sort.
12982  ASSERT(content->length() == numbers->length());
12983
12984  // Bottom-up max-heap construction.
12985  for (int i = 1; i < len; ++i) {
12986    int child_index = i;
12987    while (child_index > 0) {
12988      int parent_index = ((child_index + 1) >> 1) - 1;
12989      uint32_t parent_value = NumberToUint32(numbers->get(parent_index));
12990      uint32_t child_value = NumberToUint32(numbers->get(child_index));
12991      if (parent_value < child_value) {
12992        content->SwapPairs(numbers, parent_index, child_index);
12993      } else {
12994        break;
12995      }
12996      child_index = parent_index;
12997    }
12998  }
12999
13000  // Extract elements and create sorted array.
13001  for (int i = len - 1; i > 0; --i) {
13002    // Put max element at the back of the array.
13003    content->SwapPairs(numbers, 0, i);
13004    // Sift down the new top element.
13005    int parent_index = 0;
13006    while (true) {
13007      int child_index = ((parent_index + 1) << 1) - 1;
13008      if (child_index >= i) break;
13009      uint32_t child1_value = NumberToUint32(numbers->get(child_index));
13010      uint32_t child2_value = NumberToUint32(numbers->get(child_index + 1));
13011      uint32_t parent_value = NumberToUint32(numbers->get(parent_index));
13012      if (child_index + 1 >= i || child1_value > child2_value) {
13013        if (parent_value > child1_value) break;
13014        content->SwapPairs(numbers, parent_index, child_index);
13015        parent_index = child_index;
13016      } else {
13017        if (parent_value > child2_value) break;
13018        content->SwapPairs(numbers, parent_index, child_index + 1);
13019        parent_index = child_index + 1;
13020      }
13021    }
13022  }
13023}
13024
13025
13026// Sort this array and the numbers as pairs wrt. the (distinct) numbers.
13027void FixedArray::SortPairs(FixedArray* numbers, uint32_t len) {
13028  ASSERT(this->length() == numbers->length());
13029  // For small arrays, simply use insertion sort.
13030  if (len <= 10) {
13031    InsertionSortPairs(this, numbers, len);
13032    return;
13033  }
13034  // Check the range of indices.
13035  uint32_t min_index = NumberToUint32(numbers->get(0));
13036  uint32_t max_index = min_index;
13037  uint32_t i;
13038  for (i = 1; i < len; i++) {
13039    if (NumberToUint32(numbers->get(i)) < min_index) {
13040      min_index = NumberToUint32(numbers->get(i));
13041    } else if (NumberToUint32(numbers->get(i)) > max_index) {
13042      max_index = NumberToUint32(numbers->get(i));
13043    }
13044  }
13045  if (max_index - min_index + 1 == len) {
13046    // Indices form a contiguous range, unless there are duplicates.
13047    // Do an in-place linear time sort assuming distinct numbers, but
13048    // avoid hanging in case they are not.
13049    for (i = 0; i < len; i++) {
13050      uint32_t p;
13051      uint32_t j = 0;
13052      // While the current element at i is not at its correct position p,
13053      // swap the elements at these two positions.
13054      while ((p = NumberToUint32(numbers->get(i)) - min_index) != i &&
13055             j++ < len) {
13056        SwapPairs(numbers, i, p);
13057      }
13058    }
13059  } else {
13060    HeapSortPairs(this, numbers, len);
13061    return;
13062  }
13063}
13064
13065
13066// Fill in the names of local properties into the supplied storage. The main
13067// purpose of this function is to provide reflection information for the object
13068// mirrors.
13069void JSObject::GetLocalPropertyNames(
13070    FixedArray* storage, int index, PropertyAttributes filter) {
13071  ASSERT(storage->length() >= (NumberOfLocalProperties(filter) - index));
13072  if (HasFastProperties()) {
13073    int real_size = map()->NumberOfOwnDescriptors();
13074    DescriptorArray* descs = map()->instance_descriptors();
13075    for (int i = 0; i < real_size; i++) {
13076      if ((descs->GetDetails(i).attributes() & filter) == 0 &&
13077          ((filter & SYMBOLIC) == 0 || !descs->GetKey(i)->IsSymbol())) {
13078        storage->set(index++, descs->GetKey(i));
13079      }
13080    }
13081  } else {
13082    property_dictionary()->CopyKeysTo(storage,
13083                                      index,
13084                                      filter,
13085                                      NameDictionary::UNSORTED);
13086  }
13087}
13088
13089
13090int JSObject::NumberOfLocalElements(PropertyAttributes filter) {
13091  return GetLocalElementKeys(NULL, filter);
13092}
13093
13094
13095int JSObject::NumberOfEnumElements() {
13096  // Fast case for objects with no elements.
13097  if (!IsJSValue() && HasFastObjectElements()) {
13098    uint32_t length = IsJSArray() ?
13099        static_cast<uint32_t>(
13100            Smi::cast(JSArray::cast(this)->length())->value()) :
13101        static_cast<uint32_t>(FixedArray::cast(elements())->length());
13102    if (length == 0) return 0;
13103  }
13104  // Compute the number of enumerable elements.
13105  return NumberOfLocalElements(static_cast<PropertyAttributes>(DONT_ENUM));
13106}
13107
13108
13109int JSObject::GetLocalElementKeys(FixedArray* storage,
13110                                  PropertyAttributes filter) {
13111  int counter = 0;
13112  switch (GetElementsKind()) {
13113    case FAST_SMI_ELEMENTS:
13114    case FAST_ELEMENTS:
13115    case FAST_HOLEY_SMI_ELEMENTS:
13116    case FAST_HOLEY_ELEMENTS: {
13117      int length = IsJSArray() ?
13118          Smi::cast(JSArray::cast(this)->length())->value() :
13119          FixedArray::cast(elements())->length();
13120      for (int i = 0; i < length; i++) {
13121        if (!FixedArray::cast(elements())->get(i)->IsTheHole()) {
13122          if (storage != NULL) {
13123            storage->set(counter, Smi::FromInt(i));
13124          }
13125          counter++;
13126        }
13127      }
13128      ASSERT(!storage || storage->length() >= counter);
13129      break;
13130    }
13131    case FAST_DOUBLE_ELEMENTS:
13132    case FAST_HOLEY_DOUBLE_ELEMENTS: {
13133      int length = IsJSArray() ?
13134          Smi::cast(JSArray::cast(this)->length())->value() :
13135          FixedDoubleArray::cast(elements())->length();
13136      for (int i = 0; i < length; i++) {
13137        if (!FixedDoubleArray::cast(elements())->is_the_hole(i)) {
13138          if (storage != NULL) {
13139            storage->set(counter, Smi::FromInt(i));
13140          }
13141          counter++;
13142        }
13143      }
13144      ASSERT(!storage || storage->length() >= counter);
13145      break;
13146    }
13147    case EXTERNAL_PIXEL_ELEMENTS: {
13148      int length = ExternalPixelArray::cast(elements())->length();
13149      while (counter < length) {
13150        if (storage != NULL) {
13151          storage->set(counter, Smi::FromInt(counter));
13152        }
13153        counter++;
13154      }
13155      ASSERT(!storage || storage->length() >= counter);
13156      break;
13157    }
13158    case EXTERNAL_BYTE_ELEMENTS:
13159    case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
13160    case EXTERNAL_SHORT_ELEMENTS:
13161    case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
13162    case EXTERNAL_INT_ELEMENTS:
13163    case EXTERNAL_UNSIGNED_INT_ELEMENTS:
13164    case EXTERNAL_FLOAT_ELEMENTS:
13165    case EXTERNAL_DOUBLE_ELEMENTS: {
13166      int length = ExternalArray::cast(elements())->length();
13167      while (counter < length) {
13168        if (storage != NULL) {
13169          storage->set(counter, Smi::FromInt(counter));
13170        }
13171        counter++;
13172      }
13173      ASSERT(!storage || storage->length() >= counter);
13174      break;
13175    }
13176    case DICTIONARY_ELEMENTS: {
13177      if (storage != NULL) {
13178        element_dictionary()->CopyKeysTo(storage,
13179                                         filter,
13180                                         SeededNumberDictionary::SORTED);
13181      }
13182      counter += element_dictionary()->NumberOfElementsFilterAttributes(filter);
13183      break;
13184    }
13185    case NON_STRICT_ARGUMENTS_ELEMENTS: {
13186      FixedArray* parameter_map = FixedArray::cast(elements());
13187      int mapped_length = parameter_map->length() - 2;
13188      FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
13189      if (arguments->IsDictionary()) {
13190        // Copy the keys from arguments first, because Dictionary::CopyKeysTo
13191        // will insert in storage starting at index 0.
13192        SeededNumberDictionary* dictionary =
13193            SeededNumberDictionary::cast(arguments);
13194        if (storage != NULL) {
13195          dictionary->CopyKeysTo(
13196              storage, filter, SeededNumberDictionary::UNSORTED);
13197        }
13198        counter += dictionary->NumberOfElementsFilterAttributes(filter);
13199        for (int i = 0; i < mapped_length; ++i) {
13200          if (!parameter_map->get(i + 2)->IsTheHole()) {
13201            if (storage != NULL) storage->set(counter, Smi::FromInt(i));
13202            ++counter;
13203          }
13204        }
13205        if (storage != NULL) storage->SortPairs(storage, counter);
13206
13207      } else {
13208        int backing_length = arguments->length();
13209        int i = 0;
13210        for (; i < mapped_length; ++i) {
13211          if (!parameter_map->get(i + 2)->IsTheHole()) {
13212            if (storage != NULL) storage->set(counter, Smi::FromInt(i));
13213            ++counter;
13214          } else if (i < backing_length && !arguments->get(i)->IsTheHole()) {
13215            if (storage != NULL) storage->set(counter, Smi::FromInt(i));
13216            ++counter;
13217          }
13218        }
13219        for (; i < backing_length; ++i) {
13220          if (storage != NULL) storage->set(counter, Smi::FromInt(i));
13221          ++counter;
13222        }
13223      }
13224      break;
13225    }
13226  }
13227
13228  if (this->IsJSValue()) {
13229    Object* val = JSValue::cast(this)->value();
13230    if (val->IsString()) {
13231      String* str = String::cast(val);
13232      if (storage) {
13233        for (int i = 0; i < str->length(); i++) {
13234          storage->set(counter + i, Smi::FromInt(i));
13235        }
13236      }
13237      counter += str->length();
13238    }
13239  }
13240  ASSERT(!storage || storage->length() == counter);
13241  return counter;
13242}
13243
13244
13245int JSObject::GetEnumElementKeys(FixedArray* storage) {
13246  return GetLocalElementKeys(storage,
13247                             static_cast<PropertyAttributes>(DONT_ENUM));
13248}
13249
13250
13251// StringKey simply carries a string object as key.
13252class StringKey : public HashTableKey {
13253 public:
13254  explicit StringKey(String* string) :
13255      string_(string),
13256      hash_(HashForObject(string)) { }
13257
13258  bool IsMatch(Object* string) {
13259    // We know that all entries in a hash table had their hash keys created.
13260    // Use that knowledge to have fast failure.
13261    if (hash_ != HashForObject(string)) {
13262      return false;
13263    }
13264    return string_->Equals(String::cast(string));
13265  }
13266
13267  uint32_t Hash() { return hash_; }
13268
13269  uint32_t HashForObject(Object* other) { return String::cast(other)->Hash(); }
13270
13271  Object* AsObject(Heap* heap) { return string_; }
13272
13273  String* string_;
13274  uint32_t hash_;
13275};
13276
13277
13278// StringSharedKeys are used as keys in the eval cache.
13279class StringSharedKey : public HashTableKey {
13280 public:
13281  StringSharedKey(String* source,
13282                  SharedFunctionInfo* shared,
13283                  LanguageMode language_mode,
13284                  int scope_position)
13285      : source_(source),
13286        shared_(shared),
13287        language_mode_(language_mode),
13288        scope_position_(scope_position) { }
13289
13290  bool IsMatch(Object* other) {
13291    if (!other->IsFixedArray()) return false;
13292    FixedArray* other_array = FixedArray::cast(other);
13293    SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0));
13294    if (shared != shared_) return false;
13295    int language_unchecked = Smi::cast(other_array->get(2))->value();
13296    ASSERT(language_unchecked == CLASSIC_MODE ||
13297           language_unchecked == STRICT_MODE ||
13298           language_unchecked == EXTENDED_MODE);
13299    LanguageMode language_mode = static_cast<LanguageMode>(language_unchecked);
13300    if (language_mode != language_mode_) return false;
13301    int scope_position = Smi::cast(other_array->get(3))->value();
13302    if (scope_position != scope_position_) return false;
13303    String* source = String::cast(other_array->get(1));
13304    return source->Equals(source_);
13305  }
13306
13307  static uint32_t StringSharedHashHelper(String* source,
13308                                         SharedFunctionInfo* shared,
13309                                         LanguageMode language_mode,
13310                                         int scope_position) {
13311    uint32_t hash = source->Hash();
13312    if (shared->HasSourceCode()) {
13313      // Instead of using the SharedFunctionInfo pointer in the hash
13314      // code computation, we use a combination of the hash of the
13315      // script source code and the start position of the calling scope.
13316      // We do this to ensure that the cache entries can survive garbage
13317      // collection.
13318      Script* script = Script::cast(shared->script());
13319      hash ^= String::cast(script->source())->Hash();
13320      if (language_mode == STRICT_MODE) hash ^= 0x8000;
13321      if (language_mode == EXTENDED_MODE) hash ^= 0x0080;
13322      hash += scope_position;
13323    }
13324    return hash;
13325  }
13326
13327  uint32_t Hash() {
13328    return StringSharedHashHelper(
13329        source_, shared_, language_mode_, scope_position_);
13330  }
13331
13332  uint32_t HashForObject(Object* obj) {
13333    FixedArray* other_array = FixedArray::cast(obj);
13334    SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0));
13335    String* source = String::cast(other_array->get(1));
13336    int language_unchecked = Smi::cast(other_array->get(2))->value();
13337    ASSERT(language_unchecked == CLASSIC_MODE ||
13338           language_unchecked == STRICT_MODE ||
13339           language_unchecked == EXTENDED_MODE);
13340    LanguageMode language_mode = static_cast<LanguageMode>(language_unchecked);
13341    int scope_position = Smi::cast(other_array->get(3))->value();
13342    return StringSharedHashHelper(
13343        source, shared, language_mode, scope_position);
13344  }
13345
13346  MUST_USE_RESULT MaybeObject* AsObject(Heap* heap) {
13347    Object* obj;
13348    { MaybeObject* maybe_obj = heap->AllocateFixedArray(4);
13349      if (!maybe_obj->ToObject(&obj)) return maybe_obj;
13350    }
13351    FixedArray* other_array = FixedArray::cast(obj);
13352    other_array->set(0, shared_);
13353    other_array->set(1, source_);
13354    other_array->set(2, Smi::FromInt(language_mode_));
13355    other_array->set(3, Smi::FromInt(scope_position_));
13356    return other_array;
13357  }
13358
13359 private:
13360  String* source_;
13361  SharedFunctionInfo* shared_;
13362  LanguageMode language_mode_;
13363  int scope_position_;
13364};
13365
13366
13367// RegExpKey carries the source and flags of a regular expression as key.
13368class RegExpKey : public HashTableKey {
13369 public:
13370  RegExpKey(String* string, JSRegExp::Flags flags)
13371      : string_(string),
13372        flags_(Smi::FromInt(flags.value())) { }
13373
13374  // Rather than storing the key in the hash table, a pointer to the
13375  // stored value is stored where the key should be.  IsMatch then
13376  // compares the search key to the found object, rather than comparing
13377  // a key to a key.
13378  bool IsMatch(Object* obj) {
13379    FixedArray* val = FixedArray::cast(obj);
13380    return string_->Equals(String::cast(val->get(JSRegExp::kSourceIndex)))
13381        && (flags_ == val->get(JSRegExp::kFlagsIndex));
13382  }
13383
13384  uint32_t Hash() { return RegExpHash(string_, flags_); }
13385
13386  Object* AsObject(Heap* heap) {
13387    // Plain hash maps, which is where regexp keys are used, don't
13388    // use this function.
13389    UNREACHABLE();
13390    return NULL;
13391  }
13392
13393  uint32_t HashForObject(Object* obj) {
13394    FixedArray* val = FixedArray::cast(obj);
13395    return RegExpHash(String::cast(val->get(JSRegExp::kSourceIndex)),
13396                      Smi::cast(val->get(JSRegExp::kFlagsIndex)));
13397  }
13398
13399  static uint32_t RegExpHash(String* string, Smi* flags) {
13400    return string->Hash() + flags->value();
13401  }
13402
13403  String* string_;
13404  Smi* flags_;
13405};
13406
13407
13408// Utf8StringKey carries a vector of chars as key.
13409class Utf8StringKey : public HashTableKey {
13410 public:
13411  explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
13412      : string_(string), hash_field_(0), seed_(seed) { }
13413
13414  bool IsMatch(Object* string) {
13415    return String::cast(string)->IsUtf8EqualTo(string_);
13416  }
13417
13418  uint32_t Hash() {
13419    if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
13420    hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
13421    uint32_t result = hash_field_ >> String::kHashShift;
13422    ASSERT(result != 0);  // Ensure that the hash value of 0 is never computed.
13423    return result;
13424  }
13425
13426  uint32_t HashForObject(Object* other) {
13427    return String::cast(other)->Hash();
13428  }
13429
13430  MaybeObject* AsObject(Heap* heap) {
13431    if (hash_field_ == 0) Hash();
13432    return heap->AllocateInternalizedStringFromUtf8(string_,
13433                                                    chars_,
13434                                                    hash_field_);
13435  }
13436
13437  Vector<const char> string_;
13438  uint32_t hash_field_;
13439  int chars_;  // Caches the number of characters when computing the hash code.
13440  uint32_t seed_;
13441};
13442
13443
13444template <typename Char>
13445class SequentialStringKey : public HashTableKey {
13446 public:
13447  explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
13448      : string_(string), hash_field_(0), seed_(seed) { }
13449
13450  uint32_t Hash() {
13451    hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
13452                                                           string_.length(),
13453                                                           seed_);
13454
13455    uint32_t result = hash_field_ >> String::kHashShift;
13456    ASSERT(result != 0);  // Ensure that the hash value of 0 is never computed.
13457    return result;
13458  }
13459
13460
13461  uint32_t HashForObject(Object* other) {
13462    return String::cast(other)->Hash();
13463  }
13464
13465  Vector<const Char> string_;
13466  uint32_t hash_field_;
13467  uint32_t seed_;
13468};
13469
13470
13471
13472class OneByteStringKey : public SequentialStringKey<uint8_t> {
13473 public:
13474  OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
13475      : SequentialStringKey<uint8_t>(str, seed) { }
13476
13477  bool IsMatch(Object* string) {
13478    return String::cast(string)->IsOneByteEqualTo(string_);
13479  }
13480
13481  MaybeObject* AsObject(Heap* heap) {
13482    if (hash_field_ == 0) Hash();
13483    return heap->AllocateOneByteInternalizedString(string_, hash_field_);
13484  }
13485};
13486
13487
13488class SubStringOneByteStringKey : public HashTableKey {
13489 public:
13490  explicit SubStringOneByteStringKey(Handle<SeqOneByteString> string,
13491                                     int from,
13492                                     int length)
13493      : string_(string), from_(from), length_(length) { }
13494
13495  uint32_t Hash() {
13496    ASSERT(length_ >= 0);
13497    ASSERT(from_ + length_ <= string_->length());
13498    uint8_t* chars = string_->GetChars() + from_;
13499    hash_field_ = StringHasher::HashSequentialString(
13500        chars, length_, string_->GetHeap()->HashSeed());
13501    uint32_t result = hash_field_ >> String::kHashShift;
13502    ASSERT(result != 0);  // Ensure that the hash value of 0 is never computed.
13503    return result;
13504  }
13505
13506
13507  uint32_t HashForObject(Object* other) {
13508    return String::cast(other)->Hash();
13509  }
13510
13511  bool IsMatch(Object* string) {
13512    Vector<const uint8_t> chars(string_->GetChars() + from_, length_);
13513    return String::cast(string)->IsOneByteEqualTo(chars);
13514  }
13515
13516  MaybeObject* AsObject(Heap* heap) {
13517    if (hash_field_ == 0) Hash();
13518    Vector<const uint8_t> chars(string_->GetChars() + from_, length_);
13519    return heap->AllocateOneByteInternalizedString(chars, hash_field_);
13520  }
13521
13522 private:
13523  Handle<SeqOneByteString> string_;
13524  int from_;
13525  int length_;
13526  uint32_t hash_field_;
13527};
13528
13529
13530class TwoByteStringKey : public SequentialStringKey<uc16> {
13531 public:
13532  explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
13533      : SequentialStringKey<uc16>(str, seed) { }
13534
13535  bool IsMatch(Object* string) {
13536    return String::cast(string)->IsTwoByteEqualTo(string_);
13537  }
13538
13539  MaybeObject* AsObject(Heap* heap) {
13540    if (hash_field_ == 0) Hash();
13541    return heap->AllocateTwoByteInternalizedString(string_, hash_field_);
13542  }
13543};
13544
13545
13546// InternalizedStringKey carries a string/internalized-string object as key.
13547class InternalizedStringKey : public HashTableKey {
13548 public:
13549  explicit InternalizedStringKey(String* string)
13550      : string_(string) { }
13551
13552  bool IsMatch(Object* string) {
13553    return String::cast(string)->Equals(string_);
13554  }
13555
13556  uint32_t Hash() { return string_->Hash(); }
13557
13558  uint32_t HashForObject(Object* other) {
13559    return String::cast(other)->Hash();
13560  }
13561
13562  MaybeObject* AsObject(Heap* heap) {
13563    // Attempt to flatten the string, so that internalized strings will most
13564    // often be flat strings.
13565    string_ = string_->TryFlattenGetString();
13566    // Internalize the string if possible.
13567    Map* map = heap->InternalizedStringMapForString(string_);
13568    if (map != NULL) {
13569      string_->set_map_no_write_barrier(map);
13570      ASSERT(string_->IsInternalizedString());
13571      return string_;
13572    }
13573    // Otherwise allocate a new internalized string.
13574    return heap->AllocateInternalizedStringImpl(
13575        string_, string_->length(), string_->hash_field());
13576  }
13577
13578  static uint32_t StringHash(Object* obj) {
13579    return String::cast(obj)->Hash();
13580  }
13581
13582  String* string_;
13583};
13584
13585
13586template<typename Shape, typename Key>
13587void HashTable<Shape, Key>::IteratePrefix(ObjectVisitor* v) {
13588  IteratePointers(v, 0, kElementsStartOffset);
13589}
13590
13591
13592template<typename Shape, typename Key>
13593void HashTable<Shape, Key>::IterateElements(ObjectVisitor* v) {
13594  IteratePointers(v,
13595                  kElementsStartOffset,
13596                  kHeaderSize + length() * kPointerSize);
13597}
13598
13599
13600template<typename Shape, typename Key>
13601MaybeObject* HashTable<Shape, Key>::Allocate(Heap* heap,
13602                                             int at_least_space_for,
13603                                             MinimumCapacity capacity_option,
13604                                             PretenureFlag pretenure) {
13605  ASSERT(!capacity_option || IS_POWER_OF_TWO(at_least_space_for));
13606  int capacity = (capacity_option == USE_CUSTOM_MINIMUM_CAPACITY)
13607                     ? at_least_space_for
13608                     : ComputeCapacity(at_least_space_for);
13609  if (capacity > HashTable::kMaxCapacity) {
13610    return Failure::OutOfMemoryException(0x10);
13611  }
13612
13613  Object* obj;
13614  { MaybeObject* maybe_obj =
13615        heap-> AllocateHashTable(EntryToIndex(capacity), pretenure);
13616    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
13617  }
13618  HashTable::cast(obj)->SetNumberOfElements(0);
13619  HashTable::cast(obj)->SetNumberOfDeletedElements(0);
13620  HashTable::cast(obj)->SetCapacity(capacity);
13621  return obj;
13622}
13623
13624
13625// Find entry for key otherwise return kNotFound.
13626int NameDictionary::FindEntry(Name* key) {
13627  if (!key->IsUniqueName()) {
13628    return HashTable<NameDictionaryShape, Name*>::FindEntry(key);
13629  }
13630
13631  // Optimized for unique names. Knowledge of the key type allows:
13632  // 1. Move the check if the key is unique out of the loop.
13633  // 2. Avoid comparing hash codes in unique-to-unique comparison.
13634  // 3. Detect a case when a dictionary key is not unique but the key is.
13635  //    In case of positive result the dictionary key may be replaced by the
13636  //    internalized string with minimal performance penalty. It gives a chance
13637  //    to perform further lookups in code stubs (and significant performance
13638  //    boost a certain style of code).
13639
13640  // EnsureCapacity will guarantee the hash table is never full.
13641  uint32_t capacity = Capacity();
13642  uint32_t entry = FirstProbe(key->Hash(), capacity);
13643  uint32_t count = 1;
13644
13645  while (true) {
13646    int index = EntryToIndex(entry);
13647    Object* element = get(index);
13648    if (element->IsUndefined()) break;  // Empty entry.
13649    if (key == element) return entry;
13650    if (!element->IsUniqueName() &&
13651        !element->IsTheHole() &&
13652        Name::cast(element)->Equals(key)) {
13653      // Replace a key that is a non-internalized string by the equivalent
13654      // internalized string for faster further lookups.
13655      set(index, key);
13656      return entry;
13657    }
13658    ASSERT(element->IsTheHole() || !Name::cast(element)->Equals(key));
13659    entry = NextProbe(entry, count++, capacity);
13660  }
13661  return kNotFound;
13662}
13663
13664
13665template<typename Shape, typename Key>
13666MaybeObject* HashTable<Shape, Key>::Rehash(HashTable* new_table, Key key) {
13667  ASSERT(NumberOfElements() < new_table->Capacity());
13668
13669  DisallowHeapAllocation no_gc;
13670  WriteBarrierMode mode = new_table->GetWriteBarrierMode(no_gc);
13671
13672  // Copy prefix to new array.
13673  for (int i = kPrefixStartIndex;
13674       i < kPrefixStartIndex + Shape::kPrefixSize;
13675       i++) {
13676    new_table->set(i, get(i), mode);
13677  }
13678
13679  // Rehash the elements.
13680  int capacity = Capacity();
13681  for (int i = 0; i < capacity; i++) {
13682    uint32_t from_index = EntryToIndex(i);
13683    Object* k = get(from_index);
13684    if (IsKey(k)) {
13685      uint32_t hash = HashTable<Shape, Key>::HashForObject(key, k);
13686      uint32_t insertion_index =
13687          EntryToIndex(new_table->FindInsertionEntry(hash));
13688      for (int j = 0; j < Shape::kEntrySize; j++) {
13689        new_table->set(insertion_index + j, get(from_index + j), mode);
13690      }
13691    }
13692  }
13693  new_table->SetNumberOfElements(NumberOfElements());
13694  new_table->SetNumberOfDeletedElements(0);
13695  return new_table;
13696}
13697
13698
13699template<typename Shape, typename Key>
13700MaybeObject* HashTable<Shape, Key>::EnsureCapacity(int n, Key key) {
13701  int capacity = Capacity();
13702  int nof = NumberOfElements() + n;
13703  int nod = NumberOfDeletedElements();
13704  // Return if:
13705  //   50% is still free after adding n elements and
13706  //   at most 50% of the free elements are deleted elements.
13707  if (nod <= (capacity - nof) >> 1) {
13708    int needed_free = nof >> 1;
13709    if (nof + needed_free <= capacity) return this;
13710  }
13711
13712  const int kMinCapacityForPretenure = 256;
13713  bool pretenure =
13714      (capacity > kMinCapacityForPretenure) && !GetHeap()->InNewSpace(this);
13715  Object* obj;
13716  { MaybeObject* maybe_obj =
13717        Allocate(GetHeap(),
13718                 nof * 2,
13719                 USE_DEFAULT_MINIMUM_CAPACITY,
13720                 pretenure ? TENURED : NOT_TENURED);
13721    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
13722  }
13723
13724  return Rehash(HashTable::cast(obj), key);
13725}
13726
13727
13728template<typename Shape, typename Key>
13729MaybeObject* HashTable<Shape, Key>::Shrink(Key key) {
13730  int capacity = Capacity();
13731  int nof = NumberOfElements();
13732
13733  // Shrink to fit the number of elements if only a quarter of the
13734  // capacity is filled with elements.
13735  if (nof > (capacity >> 2)) return this;
13736  // Allocate a new dictionary with room for at least the current
13737  // number of elements. The allocation method will make sure that
13738  // there is extra room in the dictionary for additions. Don't go
13739  // lower than room for 16 elements.
13740  int at_least_room_for = nof;
13741  if (at_least_room_for < 16) return this;
13742
13743  const int kMinCapacityForPretenure = 256;
13744  bool pretenure =
13745      (at_least_room_for > kMinCapacityForPretenure) &&
13746      !GetHeap()->InNewSpace(this);
13747  Object* obj;
13748  { MaybeObject* maybe_obj =
13749        Allocate(GetHeap(),
13750                 at_least_room_for,
13751                 USE_DEFAULT_MINIMUM_CAPACITY,
13752                 pretenure ? TENURED : NOT_TENURED);
13753    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
13754  }
13755
13756  return Rehash(HashTable::cast(obj), key);
13757}
13758
13759
13760template<typename Shape, typename Key>
13761uint32_t HashTable<Shape, Key>::FindInsertionEntry(uint32_t hash) {
13762  uint32_t capacity = Capacity();
13763  uint32_t entry = FirstProbe(hash, capacity);
13764  uint32_t count = 1;
13765  // EnsureCapacity will guarantee the hash table is never full.
13766  while (true) {
13767    Object* element = KeyAt(entry);
13768    if (element->IsUndefined() || element->IsTheHole()) break;
13769    entry = NextProbe(entry, count++, capacity);
13770  }
13771  return entry;
13772}
13773
13774
13775// Force instantiation of template instances class.
13776// Please note this list is compiler dependent.
13777
13778template class HashTable<StringTableShape, HashTableKey*>;
13779
13780template class HashTable<CompilationCacheShape, HashTableKey*>;
13781
13782template class HashTable<MapCacheShape, HashTableKey*>;
13783
13784template class HashTable<ObjectHashTableShape<1>, Object*>;
13785
13786template class HashTable<ObjectHashTableShape<2>, Object*>;
13787
13788template class Dictionary<NameDictionaryShape, Name*>;
13789
13790template class Dictionary<SeededNumberDictionaryShape, uint32_t>;
13791
13792template class Dictionary<UnseededNumberDictionaryShape, uint32_t>;
13793
13794template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>::
13795    Allocate(Heap* heap, int at_least_space_for, PretenureFlag pretenure);
13796
13797template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>::
13798    Allocate(Heap* heap, int at_least_space_for, PretenureFlag pretenure);
13799
13800template MaybeObject* Dictionary<NameDictionaryShape, Name*>::
13801    Allocate(Heap* heap, int n, PretenureFlag pretenure);
13802
13803template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>::AtPut(
13804    uint32_t, Object*);
13805
13806template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>::
13807    AtPut(uint32_t, Object*);
13808
13809template Object* Dictionary<SeededNumberDictionaryShape, uint32_t>::
13810    SlowReverseLookup(Object* value);
13811
13812template Object* Dictionary<UnseededNumberDictionaryShape, uint32_t>::
13813    SlowReverseLookup(Object* value);
13814
13815template Object* Dictionary<NameDictionaryShape, Name*>::SlowReverseLookup(
13816    Object*);
13817
13818template void Dictionary<SeededNumberDictionaryShape, uint32_t>::CopyKeysTo(
13819    FixedArray*,
13820    PropertyAttributes,
13821    Dictionary<SeededNumberDictionaryShape, uint32_t>::SortMode);
13822
13823template Object* Dictionary<NameDictionaryShape, Name*>::DeleteProperty(
13824    int, JSObject::DeleteMode);
13825
13826template Object* Dictionary<SeededNumberDictionaryShape, uint32_t>::
13827    DeleteProperty(int, JSObject::DeleteMode);
13828
13829template MaybeObject* Dictionary<NameDictionaryShape, Name*>::Shrink(Name* n);
13830
13831template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>::Shrink(
13832    uint32_t);
13833
13834template void Dictionary<NameDictionaryShape, Name*>::CopyKeysTo(
13835    FixedArray*,
13836    int,
13837    PropertyAttributes,
13838    Dictionary<NameDictionaryShape, Name*>::SortMode);
13839
13840template int
13841Dictionary<NameDictionaryShape, Name*>::NumberOfElementsFilterAttributes(
13842    PropertyAttributes);
13843
13844template MaybeObject* Dictionary<NameDictionaryShape, Name*>::Add(
13845    Name*, Object*, PropertyDetails);
13846
13847template MaybeObject*
13848Dictionary<NameDictionaryShape, Name*>::GenerateNewEnumerationIndices();
13849
13850template int
13851Dictionary<SeededNumberDictionaryShape, uint32_t>::
13852    NumberOfElementsFilterAttributes(PropertyAttributes);
13853
13854template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>::Add(
13855    uint32_t, Object*, PropertyDetails);
13856
13857template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>::Add(
13858    uint32_t, Object*, PropertyDetails);
13859
13860template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>::
13861    EnsureCapacity(int, uint32_t);
13862
13863template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>::
13864    EnsureCapacity(int, uint32_t);
13865
13866template MaybeObject* Dictionary<NameDictionaryShape, Name*>::
13867    EnsureCapacity(int, Name*);
13868
13869template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>::
13870    AddEntry(uint32_t, Object*, PropertyDetails, uint32_t);
13871
13872template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>::
13873    AddEntry(uint32_t, Object*, PropertyDetails, uint32_t);
13874
13875template MaybeObject* Dictionary<NameDictionaryShape, Name*>::AddEntry(
13876    Name*, Object*, PropertyDetails, uint32_t);
13877
13878template
13879int Dictionary<SeededNumberDictionaryShape, uint32_t>::NumberOfEnumElements();
13880
13881template
13882int Dictionary<NameDictionaryShape, Name*>::NumberOfEnumElements();
13883
13884template
13885int HashTable<SeededNumberDictionaryShape, uint32_t>::FindEntry(uint32_t);
13886
13887
13888// Collates undefined and unexisting elements below limit from position
13889// zero of the elements. The object stays in Dictionary mode.
13890MaybeObject* JSObject::PrepareSlowElementsForSort(uint32_t limit) {
13891  ASSERT(HasDictionaryElements());
13892  // Must stay in dictionary mode, either because of requires_slow_elements,
13893  // or because we are not going to sort (and therefore compact) all of the
13894  // elements.
13895  SeededNumberDictionary* dict = element_dictionary();
13896  HeapNumber* result_double = NULL;
13897  if (limit > static_cast<uint32_t>(Smi::kMaxValue)) {
13898    // Allocate space for result before we start mutating the object.
13899    Object* new_double;
13900    { MaybeObject* maybe_new_double = GetHeap()->AllocateHeapNumber(0.0);
13901      if (!maybe_new_double->ToObject(&new_double)) return maybe_new_double;
13902    }
13903    result_double = HeapNumber::cast(new_double);
13904  }
13905
13906  Object* obj;
13907  { MaybeObject* maybe_obj =
13908        SeededNumberDictionary::Allocate(GetHeap(), dict->NumberOfElements());
13909    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
13910  }
13911  SeededNumberDictionary* new_dict = SeededNumberDictionary::cast(obj);
13912
13913  DisallowHeapAllocation no_alloc;
13914
13915  uint32_t pos = 0;
13916  uint32_t undefs = 0;
13917  int capacity = dict->Capacity();
13918  for (int i = 0; i < capacity; i++) {
13919    Object* k = dict->KeyAt(i);
13920    if (dict->IsKey(k)) {
13921      ASSERT(k->IsNumber());
13922      ASSERT(!k->IsSmi() || Smi::cast(k)->value() >= 0);
13923      ASSERT(!k->IsHeapNumber() || HeapNumber::cast(k)->value() >= 0);
13924      ASSERT(!k->IsHeapNumber() || HeapNumber::cast(k)->value() <= kMaxUInt32);
13925      Object* value = dict->ValueAt(i);
13926      PropertyDetails details = dict->DetailsAt(i);
13927      if (details.type() == CALLBACKS || details.IsReadOnly()) {
13928        // Bail out and do the sorting of undefineds and array holes in JS.
13929        // Also bail out if the element is not supposed to be moved.
13930        return Smi::FromInt(-1);
13931      }
13932      uint32_t key = NumberToUint32(k);
13933      // In the following we assert that adding the entry to the new dictionary
13934      // does not cause GC.  This is the case because we made sure to allocate
13935      // the dictionary big enough above, so it need not grow.
13936      if (key < limit) {
13937        if (value->IsUndefined()) {
13938          undefs++;
13939        } else {
13940          if (pos > static_cast<uint32_t>(Smi::kMaxValue)) {
13941            // Adding an entry with the key beyond smi-range requires
13942            // allocation. Bailout.
13943            return Smi::FromInt(-1);
13944          }
13945          new_dict->AddNumberEntry(pos, value, details)->ToObjectUnchecked();
13946          pos++;
13947        }
13948      } else {
13949        if (key > static_cast<uint32_t>(Smi::kMaxValue)) {
13950          // Adding an entry with the key beyond smi-range requires
13951          // allocation. Bailout.
13952          return Smi::FromInt(-1);
13953        }
13954        new_dict->AddNumberEntry(key, value, details)->ToObjectUnchecked();
13955      }
13956    }
13957  }
13958
13959  uint32_t result = pos;
13960  PropertyDetails no_details = PropertyDetails(NONE, NORMAL, 0);
13961  Heap* heap = GetHeap();
13962  while (undefs > 0) {
13963    if (pos > static_cast<uint32_t>(Smi::kMaxValue)) {
13964      // Adding an entry with the key beyond smi-range requires
13965      // allocation. Bailout.
13966      return Smi::FromInt(-1);
13967    }
13968    new_dict->AddNumberEntry(pos, heap->undefined_value(), no_details)->
13969        ToObjectUnchecked();
13970    pos++;
13971    undefs--;
13972  }
13973
13974  set_elements(new_dict);
13975
13976  if (result <= static_cast<uint32_t>(Smi::kMaxValue)) {
13977    return Smi::FromInt(static_cast<int>(result));
13978  }
13979
13980  ASSERT_NE(NULL, result_double);
13981  result_double->set_value(static_cast<double>(result));
13982  return result_double;
13983}
13984
13985
13986// Collects all defined (non-hole) and non-undefined (array) elements at
13987// the start of the elements array.
13988// If the object is in dictionary mode, it is converted to fast elements
13989// mode.
13990MaybeObject* JSObject::PrepareElementsForSort(uint32_t limit) {
13991  Heap* heap = GetHeap();
13992
13993  ASSERT(!map()->is_observed());
13994  if (HasDictionaryElements()) {
13995    // Convert to fast elements containing only the existing properties.
13996    // Ordering is irrelevant, since we are going to sort anyway.
13997    SeededNumberDictionary* dict = element_dictionary();
13998    if (IsJSArray() || dict->requires_slow_elements() ||
13999        dict->max_number_key() >= limit) {
14000      return PrepareSlowElementsForSort(limit);
14001    }
14002    // Convert to fast elements.
14003
14004    Object* obj;
14005    MaybeObject* maybe_obj = GetElementsTransitionMap(GetIsolate(),
14006                                                      FAST_HOLEY_ELEMENTS);
14007    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
14008    Map* new_map = Map::cast(obj);
14009
14010    PretenureFlag tenure = heap->InNewSpace(this) ? NOT_TENURED: TENURED;
14011    Object* new_array;
14012    { MaybeObject* maybe_new_array =
14013          heap->AllocateFixedArray(dict->NumberOfElements(), tenure);
14014      if (!maybe_new_array->ToObject(&new_array)) return maybe_new_array;
14015    }
14016    FixedArray* fast_elements = FixedArray::cast(new_array);
14017    dict->CopyValuesTo(fast_elements);
14018    ValidateElements();
14019
14020    set_map_and_elements(new_map, fast_elements);
14021  } else if (HasExternalArrayElements()) {
14022    // External arrays cannot have holes or undefined elements.
14023    return Smi::FromInt(ExternalArray::cast(elements())->length());
14024  } else if (!HasFastDoubleElements()) {
14025    Object* obj;
14026    { MaybeObject* maybe_obj = EnsureWritableFastElements();
14027      if (!maybe_obj->ToObject(&obj)) return maybe_obj;
14028    }
14029  }
14030  ASSERT(HasFastSmiOrObjectElements() || HasFastDoubleElements());
14031
14032  // Collect holes at the end, undefined before that and the rest at the
14033  // start, and return the number of non-hole, non-undefined values.
14034
14035  FixedArrayBase* elements_base = FixedArrayBase::cast(this->elements());
14036  uint32_t elements_length = static_cast<uint32_t>(elements_base->length());
14037  if (limit > elements_length) {
14038    limit = elements_length ;
14039  }
14040  if (limit == 0) {
14041    return Smi::FromInt(0);
14042  }
14043
14044  HeapNumber* result_double = NULL;
14045  if (limit > static_cast<uint32_t>(Smi::kMaxValue)) {
14046    // Pessimistically allocate space for return value before
14047    // we start mutating the array.
14048    Object* new_double;
14049    { MaybeObject* maybe_new_double = heap->AllocateHeapNumber(0.0);
14050      if (!maybe_new_double->ToObject(&new_double)) return maybe_new_double;
14051    }
14052    result_double = HeapNumber::cast(new_double);
14053  }
14054
14055  uint32_t result = 0;
14056  if (elements_base->map() == heap->fixed_double_array_map()) {
14057    FixedDoubleArray* elements = FixedDoubleArray::cast(elements_base);
14058    // Split elements into defined and the_hole, in that order.
14059    unsigned int holes = limit;
14060    // Assume most arrays contain no holes and undefined values, so minimize the
14061    // number of stores of non-undefined, non-the-hole values.
14062    for (unsigned int i = 0; i < holes; i++) {
14063      if (elements->is_the_hole(i)) {
14064        holes--;
14065      } else {
14066        continue;
14067      }
14068      // Position i needs to be filled.
14069      while (holes > i) {
14070        if (elements->is_the_hole(holes)) {
14071          holes--;
14072        } else {
14073          elements->set(i, elements->get_scalar(holes));
14074          break;
14075        }
14076      }
14077    }
14078    result = holes;
14079    while (holes < limit) {
14080      elements->set_the_hole(holes);
14081      holes++;
14082    }
14083  } else {
14084    FixedArray* elements = FixedArray::cast(elements_base);
14085    DisallowHeapAllocation no_gc;
14086
14087    // Split elements into defined, undefined and the_hole, in that order.  Only
14088    // count locations for undefined and the hole, and fill them afterwards.
14089    WriteBarrierMode write_barrier = elements->GetWriteBarrierMode(no_gc);
14090    unsigned int undefs = limit;
14091    unsigned int holes = limit;
14092    // Assume most arrays contain no holes and undefined values, so minimize the
14093    // number of stores of non-undefined, non-the-hole values.
14094    for (unsigned int i = 0; i < undefs; i++) {
14095      Object* current = elements->get(i);
14096      if (current->IsTheHole()) {
14097        holes--;
14098        undefs--;
14099      } else if (current->IsUndefined()) {
14100        undefs--;
14101      } else {
14102        continue;
14103      }
14104      // Position i needs to be filled.
14105      while (undefs > i) {
14106        current = elements->get(undefs);
14107        if (current->IsTheHole()) {
14108          holes--;
14109          undefs--;
14110        } else if (current->IsUndefined()) {
14111          undefs--;
14112        } else {
14113          elements->set(i, current, write_barrier);
14114          break;
14115        }
14116      }
14117    }
14118    result = undefs;
14119    while (undefs < holes) {
14120      elements->set_undefined(undefs);
14121      undefs++;
14122    }
14123    while (holes < limit) {
14124      elements->set_the_hole(holes);
14125      holes++;
14126    }
14127  }
14128
14129  if (result <= static_cast<uint32_t>(Smi::kMaxValue)) {
14130    return Smi::FromInt(static_cast<int>(result));
14131  }
14132  ASSERT_NE(NULL, result_double);
14133  result_double->set_value(static_cast<double>(result));
14134  return result_double;
14135}
14136
14137
14138ExternalArrayType JSTypedArray::type() {
14139  switch (elements()->map()->instance_type()) {
14140    case EXTERNAL_BYTE_ARRAY_TYPE:
14141      return kExternalByteArray;
14142    case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
14143      return kExternalUnsignedByteArray;
14144    case EXTERNAL_SHORT_ARRAY_TYPE:
14145      return kExternalShortArray;
14146    case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE:
14147      return kExternalUnsignedShortArray;
14148    case EXTERNAL_INT_ARRAY_TYPE:
14149      return kExternalIntArray;
14150    case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
14151      return kExternalUnsignedIntArray;
14152    case EXTERNAL_FLOAT_ARRAY_TYPE:
14153      return kExternalFloatArray;
14154    case EXTERNAL_DOUBLE_ARRAY_TYPE:
14155      return kExternalDoubleArray;
14156    case EXTERNAL_PIXEL_ARRAY_TYPE:
14157      return kExternalPixelArray;
14158    default:
14159      return static_cast<ExternalArrayType>(-1);
14160  }
14161}
14162
14163
14164size_t JSTypedArray::element_size() {
14165  switch (elements()->map()->instance_type()) {
14166    case EXTERNAL_BYTE_ARRAY_TYPE:
14167      return 1;
14168    case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
14169      return 1;
14170    case EXTERNAL_SHORT_ARRAY_TYPE:
14171      return 2;
14172    case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE:
14173      return 2;
14174    case EXTERNAL_INT_ARRAY_TYPE:
14175      return 4;
14176    case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
14177      return 4;
14178    case EXTERNAL_FLOAT_ARRAY_TYPE:
14179      return 4;
14180    case EXTERNAL_DOUBLE_ARRAY_TYPE:
14181      return 8;
14182    case EXTERNAL_PIXEL_ARRAY_TYPE:
14183      return 1;
14184    default:
14185      UNREACHABLE();
14186      return 0;
14187  }
14188}
14189
14190
14191Object* ExternalPixelArray::SetValue(uint32_t index, Object* value) {
14192  uint8_t clamped_value = 0;
14193  if (index < static_cast<uint32_t>(length())) {
14194    if (value->IsSmi()) {
14195      int int_value = Smi::cast(value)->value();
14196      if (int_value < 0) {
14197        clamped_value = 0;
14198      } else if (int_value > 255) {
14199        clamped_value = 255;
14200      } else {
14201        clamped_value = static_cast<uint8_t>(int_value);
14202      }
14203    } else if (value->IsHeapNumber()) {
14204      double double_value = HeapNumber::cast(value)->value();
14205      if (!(double_value > 0)) {
14206        // NaN and less than zero clamp to zero.
14207        clamped_value = 0;
14208      } else if (double_value > 255) {
14209        // Greater than 255 clamp to 255.
14210        clamped_value = 255;
14211      } else {
14212        // Other doubles are rounded to the nearest integer.
14213        clamped_value = static_cast<uint8_t>(lrint(double_value));
14214      }
14215    } else {
14216      // Clamp undefined to zero (default). All other types have been
14217      // converted to a number type further up in the call chain.
14218      ASSERT(value->IsUndefined());
14219    }
14220    set(index, clamped_value);
14221  }
14222  return Smi::FromInt(clamped_value);
14223}
14224
14225
14226template<typename ExternalArrayClass, typename ValueType>
14227static MaybeObject* ExternalArrayIntSetter(Heap* heap,
14228                                           ExternalArrayClass* receiver,
14229                                           uint32_t index,
14230                                           Object* value) {
14231  ValueType cast_value = 0;
14232  if (index < static_cast<uint32_t>(receiver->length())) {
14233    if (value->IsSmi()) {
14234      int int_value = Smi::cast(value)->value();
14235      cast_value = static_cast<ValueType>(int_value);
14236    } else if (value->IsHeapNumber()) {
14237      double double_value = HeapNumber::cast(value)->value();
14238      cast_value = static_cast<ValueType>(DoubleToInt32(double_value));
14239    } else {
14240      // Clamp undefined to zero (default). All other types have been
14241      // converted to a number type further up in the call chain.
14242      ASSERT(value->IsUndefined());
14243    }
14244    receiver->set(index, cast_value);
14245  }
14246  return heap->NumberFromInt32(cast_value);
14247}
14248
14249
14250MaybeObject* ExternalByteArray::SetValue(uint32_t index, Object* value) {
14251  return ExternalArrayIntSetter<ExternalByteArray, int8_t>
14252      (GetHeap(), this, index, value);
14253}
14254
14255
14256MaybeObject* ExternalUnsignedByteArray::SetValue(uint32_t index,
14257                                                 Object* value) {
14258  return ExternalArrayIntSetter<ExternalUnsignedByteArray, uint8_t>
14259      (GetHeap(), this, index, value);
14260}
14261
14262
14263MaybeObject* ExternalShortArray::SetValue(uint32_t index,
14264                                          Object* value) {
14265  return ExternalArrayIntSetter<ExternalShortArray, int16_t>
14266      (GetHeap(), this, index, value);
14267}
14268
14269
14270MaybeObject* ExternalUnsignedShortArray::SetValue(uint32_t index,
14271                                                  Object* value) {
14272  return ExternalArrayIntSetter<ExternalUnsignedShortArray, uint16_t>
14273      (GetHeap(), this, index, value);
14274}
14275
14276
14277MaybeObject* ExternalIntArray::SetValue(uint32_t index, Object* value) {
14278  return ExternalArrayIntSetter<ExternalIntArray, int32_t>
14279      (GetHeap(), this, index, value);
14280}
14281
14282
14283MaybeObject* ExternalUnsignedIntArray::SetValue(uint32_t index, Object* value) {
14284  uint32_t cast_value = 0;
14285  Heap* heap = GetHeap();
14286  if (index < static_cast<uint32_t>(length())) {
14287    if (value->IsSmi()) {
14288      int int_value = Smi::cast(value)->value();
14289      cast_value = static_cast<uint32_t>(int_value);
14290    } else if (value->IsHeapNumber()) {
14291      double double_value = HeapNumber::cast(value)->value();
14292      cast_value = static_cast<uint32_t>(DoubleToUint32(double_value));
14293    } else {
14294      // Clamp undefined to zero (default). All other types have been
14295      // converted to a number type further up in the call chain.
14296      ASSERT(value->IsUndefined());
14297    }
14298    set(index, cast_value);
14299  }
14300  return heap->NumberFromUint32(cast_value);
14301}
14302
14303
14304MaybeObject* ExternalFloatArray::SetValue(uint32_t index, Object* value) {
14305  float cast_value = static_cast<float>(OS::nan_value());
14306  Heap* heap = GetHeap();
14307  if (index < static_cast<uint32_t>(length())) {
14308    if (value->IsSmi()) {
14309      int int_value = Smi::cast(value)->value();
14310      cast_value = static_cast<float>(int_value);
14311    } else if (value->IsHeapNumber()) {
14312      double double_value = HeapNumber::cast(value)->value();
14313      cast_value = static_cast<float>(double_value);
14314    } else {
14315      // Clamp undefined to NaN (default). All other types have been
14316      // converted to a number type further up in the call chain.
14317      ASSERT(value->IsUndefined());
14318    }
14319    set(index, cast_value);
14320  }
14321  return heap->AllocateHeapNumber(cast_value);
14322}
14323
14324
14325MaybeObject* ExternalDoubleArray::SetValue(uint32_t index, Object* value) {
14326  double double_value = OS::nan_value();
14327  Heap* heap = GetHeap();
14328  if (index < static_cast<uint32_t>(length())) {
14329    if (value->IsSmi()) {
14330      int int_value = Smi::cast(value)->value();
14331      double_value = static_cast<double>(int_value);
14332    } else if (value->IsHeapNumber()) {
14333      double_value = HeapNumber::cast(value)->value();
14334    } else {
14335      // Clamp undefined to NaN (default). All other types have been
14336      // converted to a number type further up in the call chain.
14337      ASSERT(value->IsUndefined());
14338    }
14339    set(index, double_value);
14340  }
14341  return heap->AllocateHeapNumber(double_value);
14342}
14343
14344
14345PropertyCell* GlobalObject::GetPropertyCell(LookupResult* result) {
14346  ASSERT(!HasFastProperties());
14347  Object* value = property_dictionary()->ValueAt(result->GetDictionaryEntry());
14348  return PropertyCell::cast(value);
14349}
14350
14351
14352// TODO(mstarzinger): Temporary wrapper until handlified.
14353static Handle<NameDictionary> NameDictionaryAdd(Handle<NameDictionary> dict,
14354                                                Handle<Name> name,
14355                                                Handle<Object> value,
14356                                                PropertyDetails details) {
14357  CALL_HEAP_FUNCTION(dict->GetIsolate(),
14358                     dict->Add(*name, *value, details),
14359                     NameDictionary);
14360}
14361
14362
14363Handle<PropertyCell> GlobalObject::EnsurePropertyCell(
14364    Handle<GlobalObject> global,
14365    Handle<Name> name) {
14366  ASSERT(!global->HasFastProperties());
14367  int entry = global->property_dictionary()->FindEntry(*name);
14368  if (entry == NameDictionary::kNotFound) {
14369    Isolate* isolate = global->GetIsolate();
14370    Handle<PropertyCell> cell = isolate->factory()->NewPropertyCell(
14371        isolate->factory()->the_hole_value());
14372    PropertyDetails details(NONE, NORMAL, 0);
14373    details = details.AsDeleted();
14374    Handle<NameDictionary> dictionary = NameDictionaryAdd(
14375        handle(global->property_dictionary()), name, cell, details);
14376    global->set_properties(*dictionary);
14377    return cell;
14378  } else {
14379    Object* value = global->property_dictionary()->ValueAt(entry);
14380    ASSERT(value->IsPropertyCell());
14381    return handle(PropertyCell::cast(value));
14382  }
14383}
14384
14385
14386MaybeObject* StringTable::LookupString(String* string, Object** s) {
14387  InternalizedStringKey key(string);
14388  return LookupKey(&key, s);
14389}
14390
14391
14392// This class is used for looking up two character strings in the string table.
14393// If we don't have a hit we don't want to waste much time so we unroll the
14394// string hash calculation loop here for speed.  Doesn't work if the two
14395// characters form a decimal integer, since such strings have a different hash
14396// algorithm.
14397class TwoCharHashTableKey : public HashTableKey {
14398 public:
14399  TwoCharHashTableKey(uint16_t c1, uint16_t c2, uint32_t seed)
14400    : c1_(c1), c2_(c2) {
14401    // Char 1.
14402    uint32_t hash = seed;
14403    hash += c1;
14404    hash += hash << 10;
14405    hash ^= hash >> 6;
14406    // Char 2.
14407    hash += c2;
14408    hash += hash << 10;
14409    hash ^= hash >> 6;
14410    // GetHash.
14411    hash += hash << 3;
14412    hash ^= hash >> 11;
14413    hash += hash << 15;
14414    if ((hash & String::kHashBitMask) == 0) hash = StringHasher::kZeroHash;
14415    hash_ = hash;
14416#ifdef DEBUG
14417    // If this assert fails then we failed to reproduce the two-character
14418    // version of the string hashing algorithm above.  One reason could be
14419    // that we were passed two digits as characters, since the hash
14420    // algorithm is different in that case.
14421    uint16_t chars[2] = {c1, c2};
14422    uint32_t check_hash = StringHasher::HashSequentialString(chars, 2, seed);
14423    hash = (hash << String::kHashShift) | String::kIsNotArrayIndexMask;
14424    ASSERT_EQ(static_cast<int32_t>(hash), static_cast<int32_t>(check_hash));
14425#endif
14426  }
14427
14428  bool IsMatch(Object* o) {
14429    if (!o->IsString()) return false;
14430    String* other = String::cast(o);
14431    if (other->length() != 2) return false;
14432    if (other->Get(0) != c1_) return false;
14433    return other->Get(1) == c2_;
14434  }
14435
14436  uint32_t Hash() { return hash_; }
14437  uint32_t HashForObject(Object* key) {
14438    if (!key->IsString()) return 0;
14439    return String::cast(key)->Hash();
14440  }
14441
14442  Object* AsObject(Heap* heap) {
14443    // The TwoCharHashTableKey is only used for looking in the string
14444    // table, not for adding to it.
14445    UNREACHABLE();
14446    return NULL;
14447  }
14448
14449 private:
14450  uint16_t c1_;
14451  uint16_t c2_;
14452  uint32_t hash_;
14453};
14454
14455
14456bool StringTable::LookupStringIfExists(String* string, String** result) {
14457  InternalizedStringKey key(string);
14458  int entry = FindEntry(&key);
14459  if (entry == kNotFound) {
14460    return false;
14461  } else {
14462    *result = String::cast(KeyAt(entry));
14463    ASSERT(StringShape(*result).IsInternalized());
14464    return true;
14465  }
14466}
14467
14468
14469bool StringTable::LookupTwoCharsStringIfExists(uint16_t c1,
14470                                               uint16_t c2,
14471                                               String** result) {
14472  TwoCharHashTableKey key(c1, c2, GetHeap()->HashSeed());
14473  int entry = FindEntry(&key);
14474  if (entry == kNotFound) {
14475    return false;
14476  } else {
14477    *result = String::cast(KeyAt(entry));
14478    ASSERT(StringShape(*result).IsInternalized());
14479    return true;
14480  }
14481}
14482
14483
14484MaybeObject* StringTable::LookupUtf8String(Vector<const char> str,
14485                                           Object** s) {
14486  Utf8StringKey key(str, GetHeap()->HashSeed());
14487  return LookupKey(&key, s);
14488}
14489
14490
14491MaybeObject* StringTable::LookupOneByteString(Vector<const uint8_t> str,
14492                                              Object** s) {
14493  OneByteStringKey key(str, GetHeap()->HashSeed());
14494  return LookupKey(&key, s);
14495}
14496
14497
14498MaybeObject* StringTable::LookupSubStringOneByteString(
14499    Handle<SeqOneByteString> str,
14500    int from,
14501    int length,
14502    Object** s) {
14503  SubStringOneByteStringKey key(str, from, length);
14504  return LookupKey(&key, s);
14505}
14506
14507
14508MaybeObject* StringTable::LookupTwoByteString(Vector<const uc16> str,
14509                                              Object** s) {
14510  TwoByteStringKey key(str, GetHeap()->HashSeed());
14511  return LookupKey(&key, s);
14512}
14513
14514
14515MaybeObject* StringTable::LookupKey(HashTableKey* key, Object** s) {
14516  int entry = FindEntry(key);
14517
14518  // String already in table.
14519  if (entry != kNotFound) {
14520    *s = KeyAt(entry);
14521    return this;
14522  }
14523
14524  // Adding new string. Grow table if needed.
14525  Object* obj;
14526  { MaybeObject* maybe_obj = EnsureCapacity(1, key);
14527    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
14528  }
14529
14530  // Create string object.
14531  Object* string;
14532  { MaybeObject* maybe_string = key->AsObject(GetHeap());
14533    if (!maybe_string->ToObject(&string)) return maybe_string;
14534  }
14535
14536  // If the string table grew as part of EnsureCapacity, obj is not
14537  // the current string table and therefore we cannot use
14538  // StringTable::cast here.
14539  StringTable* table = reinterpret_cast<StringTable*>(obj);
14540
14541  // Add the new string and return it along with the string table.
14542  entry = table->FindInsertionEntry(key->Hash());
14543  table->set(EntryToIndex(entry), string);
14544  table->ElementAdded();
14545  *s = string;
14546  return table;
14547}
14548
14549
14550// The key for the script compilation cache is dependent on the mode flags,
14551// because they change the global language mode and thus binding behaviour.
14552// If flags change at some point, we must ensure that we do not hit the cache
14553// for code compiled with different settings.
14554static LanguageMode CurrentGlobalLanguageMode() {
14555  return FLAG_use_strict
14556      ? (FLAG_harmony_scoping ? EXTENDED_MODE : STRICT_MODE)
14557      : CLASSIC_MODE;
14558}
14559
14560
14561Object* CompilationCacheTable::Lookup(String* src, Context* context) {
14562  SharedFunctionInfo* shared = context->closure()->shared();
14563  StringSharedKey key(src,
14564                      shared,
14565                      CurrentGlobalLanguageMode(),
14566                      RelocInfo::kNoPosition);
14567  int entry = FindEntry(&key);
14568  if (entry == kNotFound) return GetHeap()->undefined_value();
14569  return get(EntryToIndex(entry) + 1);
14570}
14571
14572
14573Object* CompilationCacheTable::LookupEval(String* src,
14574                                          Context* context,
14575                                          LanguageMode language_mode,
14576                                          int scope_position) {
14577  StringSharedKey key(src,
14578                      context->closure()->shared(),
14579                      language_mode,
14580                      scope_position);
14581  int entry = FindEntry(&key);
14582  if (entry == kNotFound) return GetHeap()->undefined_value();
14583  return get(EntryToIndex(entry) + 1);
14584}
14585
14586
14587Object* CompilationCacheTable::LookupRegExp(String* src,
14588                                            JSRegExp::Flags flags) {
14589  RegExpKey key(src, flags);
14590  int entry = FindEntry(&key);
14591  if (entry == kNotFound) return GetHeap()->undefined_value();
14592  return get(EntryToIndex(entry) + 1);
14593}
14594
14595
14596MaybeObject* CompilationCacheTable::Put(String* src,
14597                                        Context* context,
14598                                        Object* value) {
14599  SharedFunctionInfo* shared = context->closure()->shared();
14600  StringSharedKey key(src,
14601                      shared,
14602                      CurrentGlobalLanguageMode(),
14603                      RelocInfo::kNoPosition);
14604  CompilationCacheTable* cache;
14605  MaybeObject* maybe_cache = EnsureCapacity(1, &key);
14606  if (!maybe_cache->To(&cache)) return maybe_cache;
14607
14608  Object* k;
14609  MaybeObject* maybe_k = key.AsObject(GetHeap());
14610  if (!maybe_k->To(&k)) return maybe_k;
14611
14612  int entry = cache->FindInsertionEntry(key.Hash());
14613  cache->set(EntryToIndex(entry), k);
14614  cache->set(EntryToIndex(entry) + 1, value);
14615  cache->ElementAdded();
14616  return cache;
14617}
14618
14619
14620MaybeObject* CompilationCacheTable::PutEval(String* src,
14621                                            Context* context,
14622                                            SharedFunctionInfo* value,
14623                                            int scope_position) {
14624  StringSharedKey key(src,
14625                      context->closure()->shared(),
14626                      value->language_mode(),
14627                      scope_position);
14628  Object* obj;
14629  { MaybeObject* maybe_obj = EnsureCapacity(1, &key);
14630    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
14631  }
14632
14633  CompilationCacheTable* cache =
14634      reinterpret_cast<CompilationCacheTable*>(obj);
14635  int entry = cache->FindInsertionEntry(key.Hash());
14636
14637  Object* k;
14638  { MaybeObject* maybe_k = key.AsObject(GetHeap());
14639    if (!maybe_k->ToObject(&k)) return maybe_k;
14640  }
14641
14642  cache->set(EntryToIndex(entry), k);
14643  cache->set(EntryToIndex(entry) + 1, value);
14644  cache->ElementAdded();
14645  return cache;
14646}
14647
14648
14649MaybeObject* CompilationCacheTable::PutRegExp(String* src,
14650                                              JSRegExp::Flags flags,
14651                                              FixedArray* value) {
14652  RegExpKey key(src, flags);
14653  Object* obj;
14654  { MaybeObject* maybe_obj = EnsureCapacity(1, &key);
14655    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
14656  }
14657
14658  CompilationCacheTable* cache =
14659      reinterpret_cast<CompilationCacheTable*>(obj);
14660  int entry = cache->FindInsertionEntry(key.Hash());
14661  // We store the value in the key slot, and compare the search key
14662  // to the stored value with a custon IsMatch function during lookups.
14663  cache->set(EntryToIndex(entry), value);
14664  cache->set(EntryToIndex(entry) + 1, value);
14665  cache->ElementAdded();
14666  return cache;
14667}
14668
14669
14670void CompilationCacheTable::Remove(Object* value) {
14671  Object* the_hole_value = GetHeap()->the_hole_value();
14672  for (int entry = 0, size = Capacity(); entry < size; entry++) {
14673    int entry_index = EntryToIndex(entry);
14674    int value_index = entry_index + 1;
14675    if (get(value_index) == value) {
14676      NoWriteBarrierSet(this, entry_index, the_hole_value);
14677      NoWriteBarrierSet(this, value_index, the_hole_value);
14678      ElementRemoved();
14679    }
14680  }
14681  return;
14682}
14683
14684
14685// StringsKey used for HashTable where key is array of internalized strings.
14686class StringsKey : public HashTableKey {
14687 public:
14688  explicit StringsKey(FixedArray* strings) : strings_(strings) { }
14689
14690  bool IsMatch(Object* strings) {
14691    FixedArray* o = FixedArray::cast(strings);
14692    int len = strings_->length();
14693    if (o->length() != len) return false;
14694    for (int i = 0; i < len; i++) {
14695      if (o->get(i) != strings_->get(i)) return false;
14696    }
14697    return true;
14698  }
14699
14700  uint32_t Hash() { return HashForObject(strings_); }
14701
14702  uint32_t HashForObject(Object* obj) {
14703    FixedArray* strings = FixedArray::cast(obj);
14704    int len = strings->length();
14705    uint32_t hash = 0;
14706    for (int i = 0; i < len; i++) {
14707      hash ^= String::cast(strings->get(i))->Hash();
14708    }
14709    return hash;
14710  }
14711
14712  Object* AsObject(Heap* heap) { return strings_; }
14713
14714 private:
14715  FixedArray* strings_;
14716};
14717
14718
14719Object* MapCache::Lookup(FixedArray* array) {
14720  StringsKey key(array);
14721  int entry = FindEntry(&key);
14722  if (entry == kNotFound) return GetHeap()->undefined_value();
14723  return get(EntryToIndex(entry) + 1);
14724}
14725
14726
14727MaybeObject* MapCache::Put(FixedArray* array, Map* value) {
14728  StringsKey key(array);
14729  Object* obj;
14730  { MaybeObject* maybe_obj = EnsureCapacity(1, &key);
14731    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
14732  }
14733
14734  MapCache* cache = reinterpret_cast<MapCache*>(obj);
14735  int entry = cache->FindInsertionEntry(key.Hash());
14736  cache->set(EntryToIndex(entry), array);
14737  cache->set(EntryToIndex(entry) + 1, value);
14738  cache->ElementAdded();
14739  return cache;
14740}
14741
14742
14743template<typename Shape, typename Key>
14744MaybeObject* Dictionary<Shape, Key>::Allocate(Heap* heap,
14745                                              int at_least_space_for,
14746                                              PretenureFlag pretenure) {
14747  Object* obj;
14748  { MaybeObject* maybe_obj =
14749      HashTable<Shape, Key>::Allocate(
14750          heap,
14751          at_least_space_for,
14752          HashTable<Shape, Key>::USE_DEFAULT_MINIMUM_CAPACITY,
14753          pretenure);
14754    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
14755  }
14756  // Initialize the next enumeration index.
14757  Dictionary<Shape, Key>::cast(obj)->
14758      SetNextEnumerationIndex(PropertyDetails::kInitialIndex);
14759  return obj;
14760}
14761
14762
14763void NameDictionary::DoGenerateNewEnumerationIndices(
14764    Handle<NameDictionary> dictionary) {
14765  CALL_HEAP_FUNCTION_VOID(dictionary->GetIsolate(),
14766                          dictionary->GenerateNewEnumerationIndices());
14767}
14768
14769template<typename Shape, typename Key>
14770MaybeObject* Dictionary<Shape, Key>::GenerateNewEnumerationIndices() {
14771  Heap* heap = Dictionary<Shape, Key>::GetHeap();
14772  int length = HashTable<Shape, Key>::NumberOfElements();
14773
14774  // Allocate and initialize iteration order array.
14775  Object* obj;
14776  { MaybeObject* maybe_obj = heap->AllocateFixedArray(length);
14777    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
14778  }
14779  FixedArray* iteration_order = FixedArray::cast(obj);
14780  for (int i = 0; i < length; i++) {
14781    iteration_order->set(i, Smi::FromInt(i));
14782  }
14783
14784  // Allocate array with enumeration order.
14785  { MaybeObject* maybe_obj = heap->AllocateFixedArray(length);
14786    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
14787  }
14788  FixedArray* enumeration_order = FixedArray::cast(obj);
14789
14790  // Fill the enumeration order array with property details.
14791  int capacity = HashTable<Shape, Key>::Capacity();
14792  int pos = 0;
14793  for (int i = 0; i < capacity; i++) {
14794    if (Dictionary<Shape, Key>::IsKey(Dictionary<Shape, Key>::KeyAt(i))) {
14795      int index = DetailsAt(i).dictionary_index();
14796      enumeration_order->set(pos++, Smi::FromInt(index));
14797    }
14798  }
14799
14800  // Sort the arrays wrt. enumeration order.
14801  iteration_order->SortPairs(enumeration_order, enumeration_order->length());
14802
14803  // Overwrite the enumeration_order with the enumeration indices.
14804  for (int i = 0; i < length; i++) {
14805    int index = Smi::cast(iteration_order->get(i))->value();
14806    int enum_index = PropertyDetails::kInitialIndex + i;
14807    enumeration_order->set(index, Smi::FromInt(enum_index));
14808  }
14809
14810  // Update the dictionary with new indices.
14811  capacity = HashTable<Shape, Key>::Capacity();
14812  pos = 0;
14813  for (int i = 0; i < capacity; i++) {
14814    if (Dictionary<Shape, Key>::IsKey(Dictionary<Shape, Key>::KeyAt(i))) {
14815      int enum_index = Smi::cast(enumeration_order->get(pos++))->value();
14816      PropertyDetails details = DetailsAt(i);
14817      PropertyDetails new_details = PropertyDetails(
14818          details.attributes(), details.type(), enum_index);
14819      DetailsAtPut(i, new_details);
14820    }
14821  }
14822
14823  // Set the next enumeration index.
14824  SetNextEnumerationIndex(PropertyDetails::kInitialIndex+length);
14825  return this;
14826}
14827
14828template<typename Shape, typename Key>
14829MaybeObject* Dictionary<Shape, Key>::EnsureCapacity(int n, Key key) {
14830  // Check whether there are enough enumeration indices to add n elements.
14831  if (Shape::kIsEnumerable &&
14832      !PropertyDetails::IsValidIndex(NextEnumerationIndex() + n)) {
14833    // If not, we generate new indices for the properties.
14834    Object* result;
14835    { MaybeObject* maybe_result = GenerateNewEnumerationIndices();
14836      if (!maybe_result->ToObject(&result)) return maybe_result;
14837    }
14838  }
14839  return HashTable<Shape, Key>::EnsureCapacity(n, key);
14840}
14841
14842
14843template<typename Shape, typename Key>
14844Object* Dictionary<Shape, Key>::DeleteProperty(int entry,
14845                                               JSReceiver::DeleteMode mode) {
14846  Heap* heap = Dictionary<Shape, Key>::GetHeap();
14847  PropertyDetails details = DetailsAt(entry);
14848  // Ignore attributes if forcing a deletion.
14849  if (details.IsDontDelete() && mode != JSReceiver::FORCE_DELETION) {
14850    return heap->false_value();
14851  }
14852  SetEntry(entry, heap->the_hole_value(), heap->the_hole_value());
14853  HashTable<Shape, Key>::ElementRemoved();
14854  return heap->true_value();
14855}
14856
14857
14858template<typename Shape, typename Key>
14859MaybeObject* Dictionary<Shape, Key>::Shrink(Key key) {
14860  return HashTable<Shape, Key>::Shrink(key);
14861}
14862
14863
14864template<typename Shape, typename Key>
14865MaybeObject* Dictionary<Shape, Key>::AtPut(Key key, Object* value) {
14866  int entry = this->FindEntry(key);
14867
14868  // If the entry is present set the value;
14869  if (entry != Dictionary<Shape, Key>::kNotFound) {
14870    ValueAtPut(entry, value);
14871    return this;
14872  }
14873
14874  // Check whether the dictionary should be extended.
14875  Object* obj;
14876  { MaybeObject* maybe_obj = EnsureCapacity(1, key);
14877    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
14878  }
14879
14880  Object* k;
14881  { MaybeObject* maybe_k = Shape::AsObject(this->GetHeap(), key);
14882    if (!maybe_k->ToObject(&k)) return maybe_k;
14883  }
14884  PropertyDetails details = PropertyDetails(NONE, NORMAL, 0);
14885
14886  return Dictionary<Shape, Key>::cast(obj)->AddEntry(key, value, details,
14887      Dictionary<Shape, Key>::Hash(key));
14888}
14889
14890
14891template<typename Shape, typename Key>
14892MaybeObject* Dictionary<Shape, Key>::Add(Key key,
14893                                         Object* value,
14894                                         PropertyDetails details) {
14895  // Valdate key is absent.
14896  SLOW_ASSERT((this->FindEntry(key) == Dictionary<Shape, Key>::kNotFound));
14897  // Check whether the dictionary should be extended.
14898  Object* obj;
14899  { MaybeObject* maybe_obj = EnsureCapacity(1, key);
14900    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
14901  }
14902
14903  return Dictionary<Shape, Key>::cast(obj)->AddEntry(key, value, details,
14904      Dictionary<Shape, Key>::Hash(key));
14905}
14906
14907
14908// Add a key, value pair to the dictionary.
14909template<typename Shape, typename Key>
14910MaybeObject* Dictionary<Shape, Key>::AddEntry(Key key,
14911                                              Object* value,
14912                                              PropertyDetails details,
14913                                              uint32_t hash) {
14914  // Compute the key object.
14915  Object* k;
14916  { MaybeObject* maybe_k = Shape::AsObject(this->GetHeap(), key);
14917    if (!maybe_k->ToObject(&k)) return maybe_k;
14918  }
14919
14920  uint32_t entry = Dictionary<Shape, Key>::FindInsertionEntry(hash);
14921  // Insert element at empty or deleted entry
14922  if (!details.IsDeleted() &&
14923      details.dictionary_index() == 0 &&
14924      Shape::kIsEnumerable) {
14925    // Assign an enumeration index to the property and update
14926    // SetNextEnumerationIndex.
14927    int index = NextEnumerationIndex();
14928    details = PropertyDetails(details.attributes(), details.type(), index);
14929    SetNextEnumerationIndex(index + 1);
14930  }
14931  SetEntry(entry, k, value, details);
14932  ASSERT((Dictionary<Shape, Key>::KeyAt(entry)->IsNumber() ||
14933          Dictionary<Shape, Key>::KeyAt(entry)->IsName()));
14934  HashTable<Shape, Key>::ElementAdded();
14935  return this;
14936}
14937
14938
14939void SeededNumberDictionary::UpdateMaxNumberKey(uint32_t key) {
14940  // If the dictionary requires slow elements an element has already
14941  // been added at a high index.
14942  if (requires_slow_elements()) return;
14943  // Check if this index is high enough that we should require slow
14944  // elements.
14945  if (key > kRequiresSlowElementsLimit) {
14946    set_requires_slow_elements();
14947    return;
14948  }
14949  // Update max key value.
14950  Object* max_index_object = get(kMaxNumberKeyIndex);
14951  if (!max_index_object->IsSmi() || max_number_key() < key) {
14952    FixedArray::set(kMaxNumberKeyIndex,
14953                    Smi::FromInt(key << kRequiresSlowElementsTagSize));
14954  }
14955}
14956
14957
14958MaybeObject* SeededNumberDictionary::AddNumberEntry(uint32_t key,
14959                                                    Object* value,
14960                                                    PropertyDetails details) {
14961  UpdateMaxNumberKey(key);
14962  SLOW_ASSERT(this->FindEntry(key) == kNotFound);
14963  return Add(key, value, details);
14964}
14965
14966
14967MaybeObject* UnseededNumberDictionary::AddNumberEntry(uint32_t key,
14968                                                      Object* value) {
14969  SLOW_ASSERT(this->FindEntry(key) == kNotFound);
14970  return Add(key, value, PropertyDetails(NONE, NORMAL, 0));
14971}
14972
14973
14974MaybeObject* SeededNumberDictionary::AtNumberPut(uint32_t key, Object* value) {
14975  UpdateMaxNumberKey(key);
14976  return AtPut(key, value);
14977}
14978
14979
14980MaybeObject* UnseededNumberDictionary::AtNumberPut(uint32_t key,
14981                                                   Object* value) {
14982  return AtPut(key, value);
14983}
14984
14985
14986Handle<SeededNumberDictionary> SeededNumberDictionary::Set(
14987    Handle<SeededNumberDictionary> dictionary,
14988    uint32_t index,
14989    Handle<Object> value,
14990    PropertyDetails details) {
14991  CALL_HEAP_FUNCTION(dictionary->GetIsolate(),
14992                     dictionary->Set(index, *value, details),
14993                     SeededNumberDictionary);
14994}
14995
14996
14997Handle<UnseededNumberDictionary> UnseededNumberDictionary::Set(
14998    Handle<UnseededNumberDictionary> dictionary,
14999    uint32_t index,
15000    Handle<Object> value) {
15001  CALL_HEAP_FUNCTION(dictionary->GetIsolate(),
15002                     dictionary->Set(index, *value),
15003                     UnseededNumberDictionary);
15004}
15005
15006
15007MaybeObject* SeededNumberDictionary::Set(uint32_t key,
15008                                         Object* value,
15009                                         PropertyDetails details) {
15010  int entry = FindEntry(key);
15011  if (entry == kNotFound) return AddNumberEntry(key, value, details);
15012  // Preserve enumeration index.
15013  details = PropertyDetails(details.attributes(),
15014                            details.type(),
15015                            DetailsAt(entry).dictionary_index());
15016  MaybeObject* maybe_object_key =
15017      SeededNumberDictionaryShape::AsObject(GetHeap(), key);
15018  Object* object_key;
15019  if (!maybe_object_key->ToObject(&object_key)) return maybe_object_key;
15020  SetEntry(entry, object_key, value, details);
15021  return this;
15022}
15023
15024
15025MaybeObject* UnseededNumberDictionary::Set(uint32_t key,
15026                                           Object* value) {
15027  int entry = FindEntry(key);
15028  if (entry == kNotFound) return AddNumberEntry(key, value);
15029  MaybeObject* maybe_object_key =
15030      UnseededNumberDictionaryShape::AsObject(GetHeap(), key);
15031  Object* object_key;
15032  if (!maybe_object_key->ToObject(&object_key)) return maybe_object_key;
15033  SetEntry(entry, object_key, value);
15034  return this;
15035}
15036
15037
15038
15039template<typename Shape, typename Key>
15040int Dictionary<Shape, Key>::NumberOfElementsFilterAttributes(
15041    PropertyAttributes filter) {
15042  int capacity = HashTable<Shape, Key>::Capacity();
15043  int result = 0;
15044  for (int i = 0; i < capacity; i++) {
15045    Object* k = HashTable<Shape, Key>::KeyAt(i);
15046    if (HashTable<Shape, Key>::IsKey(k) &&
15047        ((filter & SYMBOLIC) == 0 || !k->IsSymbol())) {
15048      PropertyDetails details = DetailsAt(i);
15049      if (details.IsDeleted()) continue;
15050      PropertyAttributes attr = details.attributes();
15051      if ((attr & filter) == 0) result++;
15052    }
15053  }
15054  return result;
15055}
15056
15057
15058template<typename Shape, typename Key>
15059int Dictionary<Shape, Key>::NumberOfEnumElements() {
15060  return NumberOfElementsFilterAttributes(
15061      static_cast<PropertyAttributes>(DONT_ENUM));
15062}
15063
15064
15065template<typename Shape, typename Key>
15066void Dictionary<Shape, Key>::CopyKeysTo(
15067    FixedArray* storage,
15068    PropertyAttributes filter,
15069    typename Dictionary<Shape, Key>::SortMode sort_mode) {
15070  ASSERT(storage->length() >= NumberOfEnumElements());
15071  int capacity = HashTable<Shape, Key>::Capacity();
15072  int index = 0;
15073  for (int i = 0; i < capacity; i++) {
15074     Object* k = HashTable<Shape, Key>::KeyAt(i);
15075     if (HashTable<Shape, Key>::IsKey(k)) {
15076       PropertyDetails details = DetailsAt(i);
15077       if (details.IsDeleted()) continue;
15078       PropertyAttributes attr = details.attributes();
15079       if ((attr & filter) == 0) storage->set(index++, k);
15080     }
15081  }
15082  if (sort_mode == Dictionary<Shape, Key>::SORTED) {
15083    storage->SortPairs(storage, index);
15084  }
15085  ASSERT(storage->length() >= index);
15086}
15087
15088
15089FixedArray* NameDictionary::CopyEnumKeysTo(FixedArray* storage) {
15090  int length = storage->length();
15091  ASSERT(length >= NumberOfEnumElements());
15092  Heap* heap = GetHeap();
15093  Object* undefined_value = heap->undefined_value();
15094  int capacity = Capacity();
15095  int properties = 0;
15096
15097  // Fill in the enumeration array by assigning enumerable keys at their
15098  // enumeration index. This will leave holes in the array if there are keys
15099  // that are deleted or not enumerable.
15100  for (int i = 0; i < capacity; i++) {
15101     Object* k = KeyAt(i);
15102     if (IsKey(k) && !k->IsSymbol()) {
15103       PropertyDetails details = DetailsAt(i);
15104       if (details.IsDeleted() || details.IsDontEnum()) continue;
15105       properties++;
15106       storage->set(details.dictionary_index() - 1, k);
15107       if (properties == length) break;
15108     }
15109  }
15110
15111  // There are holes in the enumeration array if less properties were assigned
15112  // than the length of the array. If so, crunch all the existing properties
15113  // together by shifting them to the left (maintaining the enumeration order),
15114  // and trimming of the right side of the array.
15115  if (properties < length) {
15116    if (properties == 0) return heap->empty_fixed_array();
15117    properties = 0;
15118    for (int i = 0; i < length; ++i) {
15119      Object* value = storage->get(i);
15120      if (value != undefined_value) {
15121        storage->set(properties, value);
15122        ++properties;
15123      }
15124    }
15125    RightTrimFixedArray<FROM_MUTATOR>(heap, storage, length - properties);
15126  }
15127  return storage;
15128}
15129
15130
15131template<typename Shape, typename Key>
15132void Dictionary<Shape, Key>::CopyKeysTo(
15133    FixedArray* storage,
15134    int index,
15135    PropertyAttributes filter,
15136    typename Dictionary<Shape, Key>::SortMode sort_mode) {
15137  ASSERT(storage->length() >= NumberOfElementsFilterAttributes(
15138      static_cast<PropertyAttributes>(NONE)));
15139  int capacity = HashTable<Shape, Key>::Capacity();
15140  for (int i = 0; i < capacity; i++) {
15141    Object* k = HashTable<Shape, Key>::KeyAt(i);
15142    if (HashTable<Shape, Key>::IsKey(k)) {
15143      PropertyDetails details = DetailsAt(i);
15144      if (details.IsDeleted()) continue;
15145      PropertyAttributes attr = details.attributes();
15146      if ((attr & filter) == 0) storage->set(index++, k);
15147    }
15148  }
15149  if (sort_mode == Dictionary<Shape, Key>::SORTED) {
15150    storage->SortPairs(storage, index);
15151  }
15152  ASSERT(storage->length() >= index);
15153}
15154
15155
15156// Backwards lookup (slow).
15157template<typename Shape, typename Key>
15158Object* Dictionary<Shape, Key>::SlowReverseLookup(Object* value) {
15159  int capacity = HashTable<Shape, Key>::Capacity();
15160  for (int i = 0; i < capacity; i++) {
15161    Object* k =  HashTable<Shape, Key>::KeyAt(i);
15162    if (Dictionary<Shape, Key>::IsKey(k)) {
15163      Object* e = ValueAt(i);
15164      if (e->IsPropertyCell()) {
15165        e = PropertyCell::cast(e)->value();
15166      }
15167      if (e == value) return k;
15168    }
15169  }
15170  Heap* heap = Dictionary<Shape, Key>::GetHeap();
15171  return heap->undefined_value();
15172}
15173
15174
15175MaybeObject* NameDictionary::TransformPropertiesToFastFor(
15176    JSObject* obj, int unused_property_fields) {
15177  // Make sure we preserve dictionary representation if there are too many
15178  // descriptors.
15179  int number_of_elements = NumberOfElements();
15180  if (number_of_elements > DescriptorArray::kMaxNumberOfDescriptors) return obj;
15181
15182  if (number_of_elements != NextEnumerationIndex()) {
15183    MaybeObject* maybe_result = GenerateNewEnumerationIndices();
15184    if (maybe_result->IsFailure()) return maybe_result;
15185  }
15186
15187  int instance_descriptor_length = 0;
15188  int number_of_fields = 0;
15189
15190  Heap* heap = GetHeap();
15191
15192  // Compute the length of the instance descriptor.
15193  int capacity = Capacity();
15194  for (int i = 0; i < capacity; i++) {
15195    Object* k = KeyAt(i);
15196    if (IsKey(k)) {
15197      Object* value = ValueAt(i);
15198      PropertyType type = DetailsAt(i).type();
15199      ASSERT(type != FIELD);
15200      instance_descriptor_length++;
15201      if (type == NORMAL && !value->IsJSFunction()) {
15202        number_of_fields += 1;
15203      }
15204    }
15205  }
15206
15207  int inobject_props = obj->map()->inobject_properties();
15208
15209  // Allocate new map.
15210  Map* new_map;
15211  MaybeObject* maybe_new_map = obj->map()->CopyDropDescriptors();
15212  if (!maybe_new_map->To(&new_map)) return maybe_new_map;
15213  new_map->set_dictionary_map(false);
15214
15215  if (instance_descriptor_length == 0) {
15216    ASSERT_LE(unused_property_fields, inobject_props);
15217    // Transform the object.
15218    new_map->set_unused_property_fields(inobject_props);
15219    obj->set_map(new_map);
15220    obj->set_properties(heap->empty_fixed_array());
15221    // Check that it really works.
15222    ASSERT(obj->HasFastProperties());
15223    return obj;
15224  }
15225
15226  // Allocate the instance descriptor.
15227  DescriptorArray* descriptors;
15228  MaybeObject* maybe_descriptors =
15229      DescriptorArray::Allocate(instance_descriptor_length);
15230  if (!maybe_descriptors->To(&descriptors)) {
15231    return maybe_descriptors;
15232  }
15233
15234  DescriptorArray::WhitenessWitness witness(descriptors);
15235
15236  int number_of_allocated_fields =
15237      number_of_fields + unused_property_fields - inobject_props;
15238  if (number_of_allocated_fields < 0) {
15239    // There is enough inobject space for all fields (including unused).
15240    number_of_allocated_fields = 0;
15241    unused_property_fields = inobject_props - number_of_fields;
15242  }
15243
15244  // Allocate the fixed array for the fields.
15245  FixedArray* fields;
15246  MaybeObject* maybe_fields =
15247      heap->AllocateFixedArray(number_of_allocated_fields);
15248  if (!maybe_fields->To(&fields)) return maybe_fields;
15249
15250  // Fill in the instance descriptor and the fields.
15251  int current_offset = 0;
15252  for (int i = 0; i < capacity; i++) {
15253    Object* k = KeyAt(i);
15254    if (IsKey(k)) {
15255      Object* value = ValueAt(i);
15256      Name* key;
15257      if (k->IsSymbol()) {
15258        key = Symbol::cast(k);
15259      } else {
15260        // Ensure the key is a unique name before writing into the
15261        // instance descriptor.
15262        MaybeObject* maybe_key = heap->InternalizeString(String::cast(k));
15263        if (!maybe_key->To(&key)) return maybe_key;
15264      }
15265
15266      PropertyDetails details = DetailsAt(i);
15267      int enumeration_index = details.dictionary_index();
15268      PropertyType type = details.type();
15269
15270      if (value->IsJSFunction()) {
15271        ConstantDescriptor d(key, value, details.attributes());
15272        descriptors->Set(enumeration_index - 1, &d, witness);
15273      } else if (type == NORMAL) {
15274        if (current_offset < inobject_props) {
15275          obj->InObjectPropertyAtPut(current_offset,
15276                                     value,
15277                                     UPDATE_WRITE_BARRIER);
15278        } else {
15279          int offset = current_offset - inobject_props;
15280          fields->set(offset, value);
15281        }
15282        FieldDescriptor d(key,
15283                          current_offset++,
15284                          details.attributes(),
15285                          // TODO(verwaest): value->OptimalRepresentation();
15286                          Representation::Tagged());
15287        descriptors->Set(enumeration_index - 1, &d, witness);
15288      } else if (type == CALLBACKS) {
15289        CallbacksDescriptor d(key,
15290                              value,
15291                              details.attributes());
15292        descriptors->Set(enumeration_index - 1, &d, witness);
15293      } else {
15294        UNREACHABLE();
15295      }
15296    }
15297  }
15298  ASSERT(current_offset == number_of_fields);
15299
15300  descriptors->Sort();
15301
15302  new_map->InitializeDescriptors(descriptors);
15303  new_map->set_unused_property_fields(unused_property_fields);
15304
15305  // Transform the object.
15306  obj->set_map(new_map);
15307
15308  obj->set_properties(fields);
15309  ASSERT(obj->IsJSObject());
15310
15311  // Check that it really works.
15312  ASSERT(obj->HasFastProperties());
15313
15314  return obj;
15315}
15316
15317
15318bool ObjectHashSet::Contains(Object* key) {
15319  ASSERT(IsKey(key));
15320
15321  // If the object does not have an identity hash, it was never used as a key.
15322  { MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
15323    if (maybe_hash->ToObjectUnchecked()->IsUndefined()) return false;
15324  }
15325  return (FindEntry(key) != kNotFound);
15326}
15327
15328
15329MaybeObject* ObjectHashSet::Add(Object* key) {
15330  ASSERT(IsKey(key));
15331
15332  // Make sure the key object has an identity hash code.
15333  int hash;
15334  { MaybeObject* maybe_hash = key->GetHash(ALLOW_CREATION);
15335    if (maybe_hash->IsFailure()) return maybe_hash;
15336    hash = Smi::cast(maybe_hash->ToObjectUnchecked())->value();
15337  }
15338  int entry = FindEntry(key);
15339
15340  // Check whether key is already present.
15341  if (entry != kNotFound) return this;
15342
15343  // Check whether the hash set should be extended and add entry.
15344  Object* obj;
15345  { MaybeObject* maybe_obj = EnsureCapacity(1, key);
15346    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
15347  }
15348  ObjectHashSet* table = ObjectHashSet::cast(obj);
15349  entry = table->FindInsertionEntry(hash);
15350  table->set(EntryToIndex(entry), key);
15351  table->ElementAdded();
15352  return table;
15353}
15354
15355
15356MaybeObject* ObjectHashSet::Remove(Object* key) {
15357  ASSERT(IsKey(key));
15358
15359  // If the object does not have an identity hash, it was never used as a key.
15360  { MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
15361    if (maybe_hash->ToObjectUnchecked()->IsUndefined()) return this;
15362  }
15363  int entry = FindEntry(key);
15364
15365  // Check whether key is actually present.
15366  if (entry == kNotFound) return this;
15367
15368  // Remove entry and try to shrink this hash set.
15369  set_the_hole(EntryToIndex(entry));
15370  ElementRemoved();
15371  return Shrink(key);
15372}
15373
15374
15375Object* ObjectHashTable::Lookup(Object* key) {
15376  ASSERT(IsKey(key));
15377
15378  // If the object does not have an identity hash, it was never used as a key.
15379  { MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
15380    if (maybe_hash->ToObjectUnchecked()->IsUndefined()) {
15381      return GetHeap()->the_hole_value();
15382    }
15383  }
15384  int entry = FindEntry(key);
15385  if (entry == kNotFound) return GetHeap()->the_hole_value();
15386  return get(EntryToIndex(entry) + 1);
15387}
15388
15389
15390MaybeObject* ObjectHashTable::Put(Object* key, Object* value) {
15391  ASSERT(IsKey(key));
15392
15393  // Make sure the key object has an identity hash code.
15394  int hash;
15395  { MaybeObject* maybe_hash = key->GetHash(ALLOW_CREATION);
15396    if (maybe_hash->IsFailure()) return maybe_hash;
15397    hash = Smi::cast(maybe_hash->ToObjectUnchecked())->value();
15398  }
15399  int entry = FindEntry(key);
15400
15401  // Check whether to perform removal operation.
15402  if (value->IsTheHole()) {
15403    if (entry == kNotFound) return this;
15404    RemoveEntry(entry);
15405    return Shrink(key);
15406  }
15407
15408  // Key is already in table, just overwrite value.
15409  if (entry != kNotFound) {
15410    set(EntryToIndex(entry) + 1, value);
15411    return this;
15412  }
15413
15414  // Check whether the hash table should be extended.
15415  Object* obj;
15416  { MaybeObject* maybe_obj = EnsureCapacity(1, key);
15417    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
15418  }
15419  ObjectHashTable* table = ObjectHashTable::cast(obj);
15420  table->AddEntry(table->FindInsertionEntry(hash), key, value);
15421  return table;
15422}
15423
15424
15425void ObjectHashTable::AddEntry(int entry, Object* key, Object* value) {
15426  set(EntryToIndex(entry), key);
15427  set(EntryToIndex(entry) + 1, value);
15428  ElementAdded();
15429}
15430
15431
15432void ObjectHashTable::RemoveEntry(int entry) {
15433  set_the_hole(EntryToIndex(entry));
15434  set_the_hole(EntryToIndex(entry) + 1);
15435  ElementRemoved();
15436}
15437
15438
15439DeclaredAccessorDescriptorIterator::DeclaredAccessorDescriptorIterator(
15440    DeclaredAccessorDescriptor* descriptor)
15441    : array_(descriptor->serialized_data()->GetDataStartAddress()),
15442      length_(descriptor->serialized_data()->length()),
15443      offset_(0) {
15444}
15445
15446
15447const DeclaredAccessorDescriptorData*
15448  DeclaredAccessorDescriptorIterator::Next() {
15449  ASSERT(offset_ < length_);
15450  uint8_t* ptr = &array_[offset_];
15451  ASSERT(reinterpret_cast<uintptr_t>(ptr) % sizeof(uintptr_t) == 0);
15452  const DeclaredAccessorDescriptorData* data =
15453      reinterpret_cast<const DeclaredAccessorDescriptorData*>(ptr);
15454  offset_ += sizeof(*data);
15455  ASSERT(offset_ <= length_);
15456  return data;
15457}
15458
15459
15460Handle<DeclaredAccessorDescriptor> DeclaredAccessorDescriptor::Create(
15461    Isolate* isolate,
15462    const DeclaredAccessorDescriptorData& descriptor,
15463    Handle<DeclaredAccessorDescriptor> previous) {
15464  int previous_length =
15465      previous.is_null() ? 0 : previous->serialized_data()->length();
15466  int length = sizeof(descriptor) + previous_length;
15467  Handle<ByteArray> serialized_descriptor =
15468      isolate->factory()->NewByteArray(length);
15469  Handle<DeclaredAccessorDescriptor> value =
15470      isolate->factory()->NewDeclaredAccessorDescriptor();
15471  value->set_serialized_data(*serialized_descriptor);
15472  // Copy in the data.
15473  {
15474    DisallowHeapAllocation no_allocation;
15475    uint8_t* array = serialized_descriptor->GetDataStartAddress();
15476    if (previous_length != 0) {
15477      uint8_t* previous_array =
15478          previous->serialized_data()->GetDataStartAddress();
15479      OS::MemCopy(array, previous_array, previous_length);
15480      array += previous_length;
15481    }
15482    ASSERT(reinterpret_cast<uintptr_t>(array) % sizeof(uintptr_t) == 0);
15483    DeclaredAccessorDescriptorData* data =
15484        reinterpret_cast<DeclaredAccessorDescriptorData*>(array);
15485    *data = descriptor;
15486  }
15487  return value;
15488}
15489
15490
15491#ifdef ENABLE_DEBUGGER_SUPPORT
15492// Check if there is a break point at this code position.
15493bool DebugInfo::HasBreakPoint(int code_position) {
15494  // Get the break point info object for this code position.
15495  Object* break_point_info = GetBreakPointInfo(code_position);
15496
15497  // If there is no break point info object or no break points in the break
15498  // point info object there is no break point at this code position.
15499  if (break_point_info->IsUndefined()) return false;
15500  return BreakPointInfo::cast(break_point_info)->GetBreakPointCount() > 0;
15501}
15502
15503
15504// Get the break point info object for this code position.
15505Object* DebugInfo::GetBreakPointInfo(int code_position) {
15506  // Find the index of the break point info object for this code position.
15507  int index = GetBreakPointInfoIndex(code_position);
15508
15509  // Return the break point info object if any.
15510  if (index == kNoBreakPointInfo) return GetHeap()->undefined_value();
15511  return BreakPointInfo::cast(break_points()->get(index));
15512}
15513
15514
15515// Clear a break point at the specified code position.
15516void DebugInfo::ClearBreakPoint(Handle<DebugInfo> debug_info,
15517                                int code_position,
15518                                Handle<Object> break_point_object) {
15519  Handle<Object> break_point_info(debug_info->GetBreakPointInfo(code_position),
15520                                  Isolate::Current());
15521  if (break_point_info->IsUndefined()) return;
15522  BreakPointInfo::ClearBreakPoint(
15523      Handle<BreakPointInfo>::cast(break_point_info),
15524      break_point_object);
15525}
15526
15527
15528void DebugInfo::SetBreakPoint(Handle<DebugInfo> debug_info,
15529                              int code_position,
15530                              int source_position,
15531                              int statement_position,
15532                              Handle<Object> break_point_object) {
15533  Isolate* isolate = Isolate::Current();
15534  Handle<Object> break_point_info(debug_info->GetBreakPointInfo(code_position),
15535                                  isolate);
15536  if (!break_point_info->IsUndefined()) {
15537    BreakPointInfo::SetBreakPoint(
15538        Handle<BreakPointInfo>::cast(break_point_info),
15539        break_point_object);
15540    return;
15541  }
15542
15543  // Adding a new break point for a code position which did not have any
15544  // break points before. Try to find a free slot.
15545  int index = kNoBreakPointInfo;
15546  for (int i = 0; i < debug_info->break_points()->length(); i++) {
15547    if (debug_info->break_points()->get(i)->IsUndefined()) {
15548      index = i;
15549      break;
15550    }
15551  }
15552  if (index == kNoBreakPointInfo) {
15553    // No free slot - extend break point info array.
15554    Handle<FixedArray> old_break_points =
15555        Handle<FixedArray>(FixedArray::cast(debug_info->break_points()));
15556    Handle<FixedArray> new_break_points =
15557        isolate->factory()->NewFixedArray(
15558            old_break_points->length() +
15559            Debug::kEstimatedNofBreakPointsInFunction);
15560
15561    debug_info->set_break_points(*new_break_points);
15562    for (int i = 0; i < old_break_points->length(); i++) {
15563      new_break_points->set(i, old_break_points->get(i));
15564    }
15565    index = old_break_points->length();
15566  }
15567  ASSERT(index != kNoBreakPointInfo);
15568
15569  // Allocate new BreakPointInfo object and set the break point.
15570  Handle<BreakPointInfo> new_break_point_info = Handle<BreakPointInfo>::cast(
15571      isolate->factory()->NewStruct(BREAK_POINT_INFO_TYPE));
15572  new_break_point_info->set_code_position(Smi::FromInt(code_position));
15573  new_break_point_info->set_source_position(Smi::FromInt(source_position));
15574  new_break_point_info->
15575      set_statement_position(Smi::FromInt(statement_position));
15576  new_break_point_info->set_break_point_objects(
15577      isolate->heap()->undefined_value());
15578  BreakPointInfo::SetBreakPoint(new_break_point_info, break_point_object);
15579  debug_info->break_points()->set(index, *new_break_point_info);
15580}
15581
15582
15583// Get the break point objects for a code position.
15584Object* DebugInfo::GetBreakPointObjects(int code_position) {
15585  Object* break_point_info = GetBreakPointInfo(code_position);
15586  if (break_point_info->IsUndefined()) {
15587    return GetHeap()->undefined_value();
15588  }
15589  return BreakPointInfo::cast(break_point_info)->break_point_objects();
15590}
15591
15592
15593// Get the total number of break points.
15594int DebugInfo::GetBreakPointCount() {
15595  if (break_points()->IsUndefined()) return 0;
15596  int count = 0;
15597  for (int i = 0; i < break_points()->length(); i++) {
15598    if (!break_points()->get(i)->IsUndefined()) {
15599      BreakPointInfo* break_point_info =
15600          BreakPointInfo::cast(break_points()->get(i));
15601      count += break_point_info->GetBreakPointCount();
15602    }
15603  }
15604  return count;
15605}
15606
15607
15608Object* DebugInfo::FindBreakPointInfo(Handle<DebugInfo> debug_info,
15609                                      Handle<Object> break_point_object) {
15610  Heap* heap = debug_info->GetHeap();
15611  if (debug_info->break_points()->IsUndefined()) return heap->undefined_value();
15612  for (int i = 0; i < debug_info->break_points()->length(); i++) {
15613    if (!debug_info->break_points()->get(i)->IsUndefined()) {
15614      Handle<BreakPointInfo> break_point_info =
15615          Handle<BreakPointInfo>(BreakPointInfo::cast(
15616              debug_info->break_points()->get(i)));
15617      if (BreakPointInfo::HasBreakPointObject(break_point_info,
15618                                              break_point_object)) {
15619        return *break_point_info;
15620      }
15621    }
15622  }
15623  return heap->undefined_value();
15624}
15625
15626
15627// Find the index of the break point info object for the specified code
15628// position.
15629int DebugInfo::GetBreakPointInfoIndex(int code_position) {
15630  if (break_points()->IsUndefined()) return kNoBreakPointInfo;
15631  for (int i = 0; i < break_points()->length(); i++) {
15632    if (!break_points()->get(i)->IsUndefined()) {
15633      BreakPointInfo* break_point_info =
15634          BreakPointInfo::cast(break_points()->get(i));
15635      if (break_point_info->code_position()->value() == code_position) {
15636        return i;
15637      }
15638    }
15639  }
15640  return kNoBreakPointInfo;
15641}
15642
15643
15644// Remove the specified break point object.
15645void BreakPointInfo::ClearBreakPoint(Handle<BreakPointInfo> break_point_info,
15646                                     Handle<Object> break_point_object) {
15647  Isolate* isolate = Isolate::Current();
15648  // If there are no break points just ignore.
15649  if (break_point_info->break_point_objects()->IsUndefined()) return;
15650  // If there is a single break point clear it if it is the same.
15651  if (!break_point_info->break_point_objects()->IsFixedArray()) {
15652    if (break_point_info->break_point_objects() == *break_point_object) {
15653      break_point_info->set_break_point_objects(
15654          isolate->heap()->undefined_value());
15655    }
15656    return;
15657  }
15658  // If there are multiple break points shrink the array
15659  ASSERT(break_point_info->break_point_objects()->IsFixedArray());
15660  Handle<FixedArray> old_array =
15661      Handle<FixedArray>(
15662          FixedArray::cast(break_point_info->break_point_objects()));
15663  Handle<FixedArray> new_array =
15664      isolate->factory()->NewFixedArray(old_array->length() - 1);
15665  int found_count = 0;
15666  for (int i = 0; i < old_array->length(); i++) {
15667    if (old_array->get(i) == *break_point_object) {
15668      ASSERT(found_count == 0);
15669      found_count++;
15670    } else {
15671      new_array->set(i - found_count, old_array->get(i));
15672    }
15673  }
15674  // If the break point was found in the list change it.
15675  if (found_count > 0) break_point_info->set_break_point_objects(*new_array);
15676}
15677
15678
15679// Add the specified break point object.
15680void BreakPointInfo::SetBreakPoint(Handle<BreakPointInfo> break_point_info,
15681                                   Handle<Object> break_point_object) {
15682  Isolate* isolate = break_point_info->GetIsolate();
15683
15684  // If there was no break point objects before just set it.
15685  if (break_point_info->break_point_objects()->IsUndefined()) {
15686    break_point_info->set_break_point_objects(*break_point_object);
15687    return;
15688  }
15689  // If the break point object is the same as before just ignore.
15690  if (break_point_info->break_point_objects() == *break_point_object) return;
15691  // If there was one break point object before replace with array.
15692  if (!break_point_info->break_point_objects()->IsFixedArray()) {
15693    Handle<FixedArray> array = isolate->factory()->NewFixedArray(2);
15694    array->set(0, break_point_info->break_point_objects());
15695    array->set(1, *break_point_object);
15696    break_point_info->set_break_point_objects(*array);
15697    return;
15698  }
15699  // If there was more than one break point before extend array.
15700  Handle<FixedArray> old_array =
15701      Handle<FixedArray>(
15702          FixedArray::cast(break_point_info->break_point_objects()));
15703  Handle<FixedArray> new_array =
15704      isolate->factory()->NewFixedArray(old_array->length() + 1);
15705  for (int i = 0; i < old_array->length(); i++) {
15706    // If the break point was there before just ignore.
15707    if (old_array->get(i) == *break_point_object) return;
15708    new_array->set(i, old_array->get(i));
15709  }
15710  // Add the new break point.
15711  new_array->set(old_array->length(), *break_point_object);
15712  break_point_info->set_break_point_objects(*new_array);
15713}
15714
15715
15716bool BreakPointInfo::HasBreakPointObject(
15717    Handle<BreakPointInfo> break_point_info,
15718    Handle<Object> break_point_object) {
15719  // No break point.
15720  if (break_point_info->break_point_objects()->IsUndefined()) return false;
15721  // Single break point.
15722  if (!break_point_info->break_point_objects()->IsFixedArray()) {
15723    return break_point_info->break_point_objects() == *break_point_object;
15724  }
15725  // Multiple break points.
15726  FixedArray* array = FixedArray::cast(break_point_info->break_point_objects());
15727  for (int i = 0; i < array->length(); i++) {
15728    if (array->get(i) == *break_point_object) {
15729      return true;
15730    }
15731  }
15732  return false;
15733}
15734
15735
15736// Get the number of break points.
15737int BreakPointInfo::GetBreakPointCount() {
15738  // No break point.
15739  if (break_point_objects()->IsUndefined()) return 0;
15740  // Single break point.
15741  if (!break_point_objects()->IsFixedArray()) return 1;
15742  // Multiple break points.
15743  return FixedArray::cast(break_point_objects())->length();
15744}
15745#endif  // ENABLE_DEBUGGER_SUPPORT
15746
15747
15748Object* JSDate::GetField(Object* object, Smi* index) {
15749  return JSDate::cast(object)->DoGetField(
15750      static_cast<FieldIndex>(index->value()));
15751}
15752
15753
15754Object* JSDate::DoGetField(FieldIndex index) {
15755  ASSERT(index != kDateValue);
15756
15757  DateCache* date_cache = GetIsolate()->date_cache();
15758
15759  if (index < kFirstUncachedField) {
15760    Object* stamp = cache_stamp();
15761    if (stamp != date_cache->stamp() && stamp->IsSmi()) {
15762      // Since the stamp is not NaN, the value is also not NaN.
15763      int64_t local_time_ms =
15764          date_cache->ToLocal(static_cast<int64_t>(value()->Number()));
15765      SetLocalFields(local_time_ms, date_cache);
15766    }
15767    switch (index) {
15768      case kYear: return year();
15769      case kMonth: return month();
15770      case kDay: return day();
15771      case kWeekday: return weekday();
15772      case kHour: return hour();
15773      case kMinute: return min();
15774      case kSecond: return sec();
15775      default: UNREACHABLE();
15776    }
15777  }
15778
15779  if (index >= kFirstUTCField) {
15780    return GetUTCField(index, value()->Number(), date_cache);
15781  }
15782
15783  double time = value()->Number();
15784  if (std::isnan(time)) return GetIsolate()->heap()->nan_value();
15785
15786  int64_t local_time_ms = date_cache->ToLocal(static_cast<int64_t>(time));
15787  int days = DateCache::DaysFromTime(local_time_ms);
15788
15789  if (index == kDays) return Smi::FromInt(days);
15790
15791  int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days);
15792  if (index == kMillisecond) return Smi::FromInt(time_in_day_ms % 1000);
15793  ASSERT(index == kTimeInDay);
15794  return Smi::FromInt(time_in_day_ms);
15795}
15796
15797
15798Object* JSDate::GetUTCField(FieldIndex index,
15799                            double value,
15800                            DateCache* date_cache) {
15801  ASSERT(index >= kFirstUTCField);
15802
15803  if (std::isnan(value)) return GetIsolate()->heap()->nan_value();
15804
15805  int64_t time_ms = static_cast<int64_t>(value);
15806
15807  if (index == kTimezoneOffset) {
15808    return Smi::FromInt(date_cache->TimezoneOffset(time_ms));
15809  }
15810
15811  int days = DateCache::DaysFromTime(time_ms);
15812
15813  if (index == kWeekdayUTC) return Smi::FromInt(date_cache->Weekday(days));
15814
15815  if (index <= kDayUTC) {
15816    int year, month, day;
15817    date_cache->YearMonthDayFromDays(days, &year, &month, &day);
15818    if (index == kYearUTC) return Smi::FromInt(year);
15819    if (index == kMonthUTC) return Smi::FromInt(month);
15820    ASSERT(index == kDayUTC);
15821    return Smi::FromInt(day);
15822  }
15823
15824  int time_in_day_ms = DateCache::TimeInDay(time_ms, days);
15825  switch (index) {
15826    case kHourUTC: return Smi::FromInt(time_in_day_ms / (60 * 60 * 1000));
15827    case kMinuteUTC: return Smi::FromInt((time_in_day_ms / (60 * 1000)) % 60);
15828    case kSecondUTC: return Smi::FromInt((time_in_day_ms / 1000) % 60);
15829    case kMillisecondUTC: return Smi::FromInt(time_in_day_ms % 1000);
15830    case kDaysUTC: return Smi::FromInt(days);
15831    case kTimeInDayUTC: return Smi::FromInt(time_in_day_ms);
15832    default: UNREACHABLE();
15833  }
15834
15835  UNREACHABLE();
15836  return NULL;
15837}
15838
15839
15840void JSDate::SetValue(Object* value, bool is_value_nan) {
15841  set_value(value);
15842  if (is_value_nan) {
15843    HeapNumber* nan = GetIsolate()->heap()->nan_value();
15844    set_cache_stamp(nan, SKIP_WRITE_BARRIER);
15845    set_year(nan, SKIP_WRITE_BARRIER);
15846    set_month(nan, SKIP_WRITE_BARRIER);
15847    set_day(nan, SKIP_WRITE_BARRIER);
15848    set_hour(nan, SKIP_WRITE_BARRIER);
15849    set_min(nan, SKIP_WRITE_BARRIER);
15850    set_sec(nan, SKIP_WRITE_BARRIER);
15851    set_weekday(nan, SKIP_WRITE_BARRIER);
15852  } else {
15853    set_cache_stamp(Smi::FromInt(DateCache::kInvalidStamp), SKIP_WRITE_BARRIER);
15854  }
15855}
15856
15857
15858void JSDate::SetLocalFields(int64_t local_time_ms, DateCache* date_cache) {
15859  int days = DateCache::DaysFromTime(local_time_ms);
15860  int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days);
15861  int year, month, day;
15862  date_cache->YearMonthDayFromDays(days, &year, &month, &day);
15863  int weekday = date_cache->Weekday(days);
15864  int hour = time_in_day_ms / (60 * 60 * 1000);
15865  int min = (time_in_day_ms / (60 * 1000)) % 60;
15866  int sec = (time_in_day_ms / 1000) % 60;
15867  set_cache_stamp(date_cache->stamp());
15868  set_year(Smi::FromInt(year), SKIP_WRITE_BARRIER);
15869  set_month(Smi::FromInt(month), SKIP_WRITE_BARRIER);
15870  set_day(Smi::FromInt(day), SKIP_WRITE_BARRIER);
15871  set_weekday(Smi::FromInt(weekday), SKIP_WRITE_BARRIER);
15872  set_hour(Smi::FromInt(hour), SKIP_WRITE_BARRIER);
15873  set_min(Smi::FromInt(min), SKIP_WRITE_BARRIER);
15874  set_sec(Smi::FromInt(sec), SKIP_WRITE_BARRIER);
15875}
15876
15877
15878void JSArrayBuffer::Neuter() {
15879  ASSERT(is_external());
15880  set_backing_store(NULL);
15881  set_byte_length(Smi::FromInt(0));
15882}
15883
15884
15885void JSArrayBufferView::NeuterView() {
15886  set_byte_offset(Smi::FromInt(0));
15887  set_byte_length(Smi::FromInt(0));
15888}
15889
15890
15891void JSDataView::Neuter() {
15892  NeuterView();
15893}
15894
15895
15896void JSTypedArray::Neuter() {
15897  NeuterView();
15898  set_length(Smi::FromInt(0));
15899  set_elements(GetHeap()->EmptyExternalArrayForMap(map()));
15900}
15901
15902
15903Type* PropertyCell::type() {
15904  return static_cast<Type*>(type_raw());
15905}
15906
15907
15908void PropertyCell::set_type(Type* type, WriteBarrierMode ignored) {
15909  ASSERT(IsPropertyCell());
15910  set_type_raw(type, ignored);
15911}
15912
15913
15914Type* PropertyCell::UpdateType(Handle<PropertyCell> cell,
15915                               Handle<Object> value) {
15916  Isolate* isolate = cell->GetIsolate();
15917  Handle<Type> old_type(cell->type(), isolate);
15918  // TODO(2803): Do not track ConsString as constant because they cannot be
15919  // embedded into code.
15920  Handle<Type> new_type(value->IsConsString() || value->IsTheHole()
15921                        ? Type::Any()
15922                        : Type::Constant(value, isolate), isolate);
15923
15924  if (new_type->Is(old_type)) {
15925    return *old_type;
15926  }
15927
15928  cell->dependent_code()->DeoptimizeDependentCodeGroup(
15929      isolate, DependentCode::kPropertyCellChangedGroup);
15930
15931  if (old_type->Is(Type::None()) || old_type->Is(Type::Undefined())) {
15932    return *new_type;
15933  }
15934
15935  return Type::Any();
15936}
15937
15938
15939MaybeObject* PropertyCell::SetValueInferType(Object* value,
15940                                             WriteBarrierMode ignored) {
15941  set_value(value, ignored);
15942  if (!Type::Any()->Is(type())) {
15943    IdempotentPointerToHandleCodeTrampoline trampoline(GetIsolate());
15944    MaybeObject* maybe_type = trampoline.CallWithReturnValue(
15945        &PropertyCell::UpdateType,
15946        Handle<PropertyCell>(this),
15947        Handle<Object>(value, GetIsolate()));
15948    Type* new_type = NULL;
15949    if (!maybe_type->To(&new_type)) return maybe_type;
15950    set_type(new_type);
15951  }
15952  return value;
15953}
15954
15955
15956void PropertyCell::AddDependentCompilationInfo(CompilationInfo* info) {
15957  Handle<DependentCode> dep(dependent_code());
15958  Handle<DependentCode> codes =
15959      DependentCode::Insert(dep, DependentCode::kPropertyCellChangedGroup,
15960                            info->object_wrapper());
15961  if (*codes != dependent_code()) set_dependent_code(*codes);
15962  info->dependencies(DependentCode::kPropertyCellChangedGroup)->Add(
15963      Handle<HeapObject>(this), info->zone());
15964}
15965
15966
15967void PropertyCell::AddDependentCode(Handle<Code> code) {
15968  Handle<DependentCode> codes = DependentCode::Insert(
15969      Handle<DependentCode>(dependent_code()),
15970      DependentCode::kPropertyCellChangedGroup, code);
15971  if (*codes != dependent_code()) set_dependent_code(*codes);
15972}
15973
15974
15975const char* GetBailoutReason(BailoutReason reason) {
15976  ASSERT(reason < kLastErrorMessage);
15977#define ERROR_MESSAGES_TEXTS(C, T) T,
15978  static const char* error_messages_[] = {
15979      ERROR_MESSAGES_LIST(ERROR_MESSAGES_TEXTS)
15980  };
15981#undef ERROR_MESSAGES_TEXTS
15982  return error_messages_[reason];
15983}
15984
15985
15986} }  // namespace v8::internal
15987