1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_IA32
6
7#include "src/code-stubs.h"
8#include "src/api-arguments.h"
9#include "src/base/bits.h"
10#include "src/bootstrapper.h"
11#include "src/codegen.h"
12#include "src/ia32/code-stubs-ia32.h"
13#include "src/ia32/frames-ia32.h"
14#include "src/ic/handler-compiler.h"
15#include "src/ic/ic.h"
16#include "src/ic/stub-cache.h"
17#include "src/isolate.h"
18#include "src/regexp/jsregexp.h"
19#include "src/regexp/regexp-macro-assembler.h"
20#include "src/runtime/runtime.h"
21
22namespace v8 {
23namespace internal {
24
25#define __ ACCESS_MASM(masm)
26
27void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
28  __ pop(ecx);
29  __ mov(MemOperand(esp, eax, times_4, 0), edi);
30  __ push(edi);
31  __ push(ebx);
32  __ push(ecx);
33  __ add(eax, Immediate(3));
34  __ TailCallRuntime(Runtime::kNewArray);
35}
36
37void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
38                                               ExternalReference miss) {
39  // Update the static counter each time a new code stub is generated.
40  isolate()->counters()->code_stubs()->Increment();
41
42  CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
43  int param_count = descriptor.GetRegisterParameterCount();
44  {
45    // Call the runtime system in a fresh internal frame.
46    FrameScope scope(masm, StackFrame::INTERNAL);
47    DCHECK(param_count == 0 ||
48           eax.is(descriptor.GetRegisterParameter(param_count - 1)));
49    // Push arguments
50    for (int i = 0; i < param_count; ++i) {
51      __ push(descriptor.GetRegisterParameter(i));
52    }
53    __ CallExternalReference(miss, param_count);
54  }
55
56  __ ret(0);
57}
58
59
60void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
61  // We don't allow a GC during a store buffer overflow so there is no need to
62  // store the registers in any particular way, but we do have to store and
63  // restore them.
64  __ pushad();
65  if (save_doubles()) {
66    __ sub(esp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
67    for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
68      XMMRegister reg = XMMRegister::from_code(i);
69      __ movsd(Operand(esp, i * kDoubleSize), reg);
70    }
71  }
72  const int argument_count = 1;
73
74  AllowExternalCallThatCantCauseGC scope(masm);
75  __ PrepareCallCFunction(argument_count, ecx);
76  __ mov(Operand(esp, 0 * kPointerSize),
77         Immediate(ExternalReference::isolate_address(isolate())));
78  __ CallCFunction(
79      ExternalReference::store_buffer_overflow_function(isolate()),
80      argument_count);
81  if (save_doubles()) {
82    for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
83      XMMRegister reg = XMMRegister::from_code(i);
84      __ movsd(reg, Operand(esp, i * kDoubleSize));
85    }
86    __ add(esp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
87  }
88  __ popad();
89  __ ret(0);
90}
91
92
93class FloatingPointHelper : public AllStatic {
94 public:
95  enum ArgLocation {
96    ARGS_ON_STACK,
97    ARGS_IN_REGISTERS
98  };
99
100  // Code pattern for loading a floating point value. Input value must
101  // be either a smi or a heap number object (fp value). Requirements:
102  // operand in register number. Returns operand as floating point number
103  // on FPU stack.
104  static void LoadFloatOperand(MacroAssembler* masm, Register number);
105
106  // Test if operands are smi or number objects (fp). Requirements:
107  // operand_1 in eax, operand_2 in edx; falls through on float
108  // operands, jumps to the non_float label otherwise.
109  static void CheckFloatOperands(MacroAssembler* masm,
110                                 Label* non_float,
111                                 Register scratch);
112
113  // Test if operands are numbers (smi or HeapNumber objects), and load
114  // them into xmm0 and xmm1 if they are.  Jump to label not_numbers if
115  // either operand is not a number.  Operands are in edx and eax.
116  // Leaves operands unchanged.
117  static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers);
118};
119
120
121void DoubleToIStub::Generate(MacroAssembler* masm) {
122  Register input_reg = this->source();
123  Register final_result_reg = this->destination();
124  DCHECK(is_truncating());
125
126  Label check_negative, process_64_bits, done, done_no_stash;
127
128  int double_offset = offset();
129
130  // Account for return address and saved regs if input is esp.
131  if (input_reg.is(esp)) double_offset += 3 * kPointerSize;
132
133  MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
134  MemOperand exponent_operand(MemOperand(input_reg,
135                                         double_offset + kDoubleSize / 2));
136
137  Register scratch1;
138  {
139    Register scratch_candidates[3] = { ebx, edx, edi };
140    for (int i = 0; i < 3; i++) {
141      scratch1 = scratch_candidates[i];
142      if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
143    }
144  }
145  // Since we must use ecx for shifts below, use some other register (eax)
146  // to calculate the result if ecx is the requested return register.
147  Register result_reg = final_result_reg.is(ecx) ? eax : final_result_reg;
148  // Save ecx if it isn't the return register and therefore volatile, or if it
149  // is the return register, then save the temp register we use in its stead for
150  // the result.
151  Register save_reg = final_result_reg.is(ecx) ? eax : ecx;
152  __ push(scratch1);
153  __ push(save_reg);
154
155  bool stash_exponent_copy = !input_reg.is(esp);
156  __ mov(scratch1, mantissa_operand);
157  if (CpuFeatures::IsSupported(SSE3)) {
158    CpuFeatureScope scope(masm, SSE3);
159    // Load x87 register with heap number.
160    __ fld_d(mantissa_operand);
161  }
162  __ mov(ecx, exponent_operand);
163  if (stash_exponent_copy) __ push(ecx);
164
165  __ and_(ecx, HeapNumber::kExponentMask);
166  __ shr(ecx, HeapNumber::kExponentShift);
167  __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias));
168  __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits));
169  __ j(below, &process_64_bits);
170
171  // Result is entirely in lower 32-bits of mantissa
172  int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
173  if (CpuFeatures::IsSupported(SSE3)) {
174    __ fstp(0);
175  }
176  __ sub(ecx, Immediate(delta));
177  __ xor_(result_reg, result_reg);
178  __ cmp(ecx, Immediate(31));
179  __ j(above, &done);
180  __ shl_cl(scratch1);
181  __ jmp(&check_negative);
182
183  __ bind(&process_64_bits);
184  if (CpuFeatures::IsSupported(SSE3)) {
185    CpuFeatureScope scope(masm, SSE3);
186    if (stash_exponent_copy) {
187      // Already a copy of the exponent on the stack, overwrite it.
188      STATIC_ASSERT(kDoubleSize == 2 * kPointerSize);
189      __ sub(esp, Immediate(kDoubleSize / 2));
190    } else {
191      // Reserve space for 64 bit answer.
192      __ sub(esp, Immediate(kDoubleSize));  // Nolint.
193    }
194    // Do conversion, which cannot fail because we checked the exponent.
195    __ fisttp_d(Operand(esp, 0));
196    __ mov(result_reg, Operand(esp, 0));  // Load low word of answer as result
197    __ add(esp, Immediate(kDoubleSize));
198    __ jmp(&done_no_stash);
199  } else {
200    // Result must be extracted from shifted 32-bit mantissa
201    __ sub(ecx, Immediate(delta));
202    __ neg(ecx);
203    if (stash_exponent_copy) {
204      __ mov(result_reg, MemOperand(esp, 0));
205    } else {
206      __ mov(result_reg, exponent_operand);
207    }
208    __ and_(result_reg,
209            Immediate(static_cast<uint32_t>(Double::kSignificandMask >> 32)));
210    __ add(result_reg,
211           Immediate(static_cast<uint32_t>(Double::kHiddenBit >> 32)));
212    __ shrd_cl(scratch1, result_reg);
213    __ shr_cl(result_reg);
214    __ test(ecx, Immediate(32));
215    __ cmov(not_equal, scratch1, result_reg);
216  }
217
218  // If the double was negative, negate the integer result.
219  __ bind(&check_negative);
220  __ mov(result_reg, scratch1);
221  __ neg(result_reg);
222  if (stash_exponent_copy) {
223    __ cmp(MemOperand(esp, 0), Immediate(0));
224  } else {
225    __ cmp(exponent_operand, Immediate(0));
226  }
227    __ cmov(greater, result_reg, scratch1);
228
229  // Restore registers
230  __ bind(&done);
231  if (stash_exponent_copy) {
232    __ add(esp, Immediate(kDoubleSize / 2));
233  }
234  __ bind(&done_no_stash);
235  if (!final_result_reg.is(result_reg)) {
236    DCHECK(final_result_reg.is(ecx));
237    __ mov(final_result_reg, result_reg);
238  }
239  __ pop(save_reg);
240  __ pop(scratch1);
241  __ ret(0);
242}
243
244
245void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
246                                           Register number) {
247  Label load_smi, done;
248
249  __ JumpIfSmi(number, &load_smi, Label::kNear);
250  __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
251  __ jmp(&done, Label::kNear);
252
253  __ bind(&load_smi);
254  __ SmiUntag(number);
255  __ push(number);
256  __ fild_s(Operand(esp, 0));
257  __ pop(number);
258
259  __ bind(&done);
260}
261
262
263void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
264                                           Label* not_numbers) {
265  Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
266  // Load operand in edx into xmm0, or branch to not_numbers.
267  __ JumpIfSmi(edx, &load_smi_edx, Label::kNear);
268  Factory* factory = masm->isolate()->factory();
269  __ cmp(FieldOperand(edx, HeapObject::kMapOffset), factory->heap_number_map());
270  __ j(not_equal, not_numbers);  // Argument in edx is not a number.
271  __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
272  __ bind(&load_eax);
273  // Load operand in eax into xmm1, or branch to not_numbers.
274  __ JumpIfSmi(eax, &load_smi_eax, Label::kNear);
275  __ cmp(FieldOperand(eax, HeapObject::kMapOffset), factory->heap_number_map());
276  __ j(equal, &load_float_eax, Label::kNear);
277  __ jmp(not_numbers);  // Argument in eax is not a number.
278  __ bind(&load_smi_edx);
279  __ SmiUntag(edx);  // Untag smi before converting to float.
280  __ Cvtsi2sd(xmm0, edx);
281  __ SmiTag(edx);  // Retag smi for heap number overwriting test.
282  __ jmp(&load_eax);
283  __ bind(&load_smi_eax);
284  __ SmiUntag(eax);  // Untag smi before converting to float.
285  __ Cvtsi2sd(xmm1, eax);
286  __ SmiTag(eax);  // Retag smi for heap number overwriting test.
287  __ jmp(&done, Label::kNear);
288  __ bind(&load_float_eax);
289  __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
290  __ bind(&done);
291}
292
293
294void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
295                                             Label* non_float,
296                                             Register scratch) {
297  Label test_other, done;
298  // Test if both operands are floats or smi -> scratch=k_is_float;
299  // Otherwise scratch = k_not_float.
300  __ JumpIfSmi(edx, &test_other, Label::kNear);
301  __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
302  Factory* factory = masm->isolate()->factory();
303  __ cmp(scratch, factory->heap_number_map());
304  __ j(not_equal, non_float);  // argument in edx is not a number -> NaN
305
306  __ bind(&test_other);
307  __ JumpIfSmi(eax, &done, Label::kNear);
308  __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
309  __ cmp(scratch, factory->heap_number_map());
310  __ j(not_equal, non_float);  // argument in eax is not a number -> NaN
311
312  // Fall-through: Both operands are numbers.
313  __ bind(&done);
314}
315
316
317void MathPowStub::Generate(MacroAssembler* masm) {
318  const Register exponent = MathPowTaggedDescriptor::exponent();
319  DCHECK(exponent.is(eax));
320  const Register scratch = ecx;
321  const XMMRegister double_result = xmm3;
322  const XMMRegister double_base = xmm2;
323  const XMMRegister double_exponent = xmm1;
324  const XMMRegister double_scratch = xmm4;
325
326  Label call_runtime, done, exponent_not_smi, int_exponent;
327
328  // Save 1 in double_result - we need this several times later on.
329  __ mov(scratch, Immediate(1));
330  __ Cvtsi2sd(double_result, scratch);
331
332  if (exponent_type() == TAGGED) {
333    __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
334    __ SmiUntag(exponent);
335    __ jmp(&int_exponent);
336
337    __ bind(&exponent_not_smi);
338    __ movsd(double_exponent,
339              FieldOperand(exponent, HeapNumber::kValueOffset));
340  }
341
342  if (exponent_type() != INTEGER) {
343    Label fast_power, try_arithmetic_simplification;
344    __ DoubleToI(exponent, double_exponent, double_scratch,
345                 TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification,
346                 &try_arithmetic_simplification,
347                 &try_arithmetic_simplification);
348    __ jmp(&int_exponent);
349
350    __ bind(&try_arithmetic_simplification);
351    // Skip to runtime if possibly NaN (indicated by the indefinite integer).
352    __ cvttsd2si(exponent, Operand(double_exponent));
353    __ cmp(exponent, Immediate(0x1));
354    __ j(overflow, &call_runtime);
355
356    // Using FPU instructions to calculate power.
357    Label fast_power_failed;
358    __ bind(&fast_power);
359    __ fnclex();  // Clear flags to catch exceptions later.
360    // Transfer (B)ase and (E)xponent onto the FPU register stack.
361    __ sub(esp, Immediate(kDoubleSize));
362    __ movsd(Operand(esp, 0), double_exponent);
363    __ fld_d(Operand(esp, 0));  // E
364    __ movsd(Operand(esp, 0), double_base);
365    __ fld_d(Operand(esp, 0));  // B, E
366
367    // Exponent is in st(1) and base is in st(0)
368    // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
369    // FYL2X calculates st(1) * log2(st(0))
370    __ fyl2x();    // X
371    __ fld(0);     // X, X
372    __ frndint();  // rnd(X), X
373    __ fsub(1);    // rnd(X), X-rnd(X)
374    __ fxch(1);    // X - rnd(X), rnd(X)
375    // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
376    __ f2xm1();    // 2^(X-rnd(X)) - 1, rnd(X)
377    __ fld1();     // 1, 2^(X-rnd(X)) - 1, rnd(X)
378    __ faddp(1);   // 2^(X-rnd(X)), rnd(X)
379    // FSCALE calculates st(0) * 2^st(1)
380    __ fscale();   // 2^X, rnd(X)
381    __ fstp(1);    // 2^X
382    // Bail out to runtime in case of exceptions in the status word.
383    __ fnstsw_ax();
384    __ test_b(eax,
385              Immediate(0x5F));  // We check for all but precision exception.
386    __ j(not_zero, &fast_power_failed, Label::kNear);
387    __ fstp_d(Operand(esp, 0));
388    __ movsd(double_result, Operand(esp, 0));
389    __ add(esp, Immediate(kDoubleSize));
390    __ jmp(&done);
391
392    __ bind(&fast_power_failed);
393    __ fninit();
394    __ add(esp, Immediate(kDoubleSize));
395    __ jmp(&call_runtime);
396  }
397
398  // Calculate power with integer exponent.
399  __ bind(&int_exponent);
400  const XMMRegister double_scratch2 = double_exponent;
401  __ mov(scratch, exponent);  // Back up exponent.
402  __ movsd(double_scratch, double_base);  // Back up base.
403  __ movsd(double_scratch2, double_result);  // Load double_exponent with 1.
404
405  // Get absolute value of exponent.
406  Label no_neg, while_true, while_false;
407  __ test(scratch, scratch);
408  __ j(positive, &no_neg, Label::kNear);
409  __ neg(scratch);
410  __ bind(&no_neg);
411
412  __ j(zero, &while_false, Label::kNear);
413  __ shr(scratch, 1);
414  // Above condition means CF==0 && ZF==0.  This means that the
415  // bit that has been shifted out is 0 and the result is not 0.
416  __ j(above, &while_true, Label::kNear);
417  __ movsd(double_result, double_scratch);
418  __ j(zero, &while_false, Label::kNear);
419
420  __ bind(&while_true);
421  __ shr(scratch, 1);
422  __ mulsd(double_scratch, double_scratch);
423  __ j(above, &while_true, Label::kNear);
424  __ mulsd(double_result, double_scratch);
425  __ j(not_zero, &while_true);
426
427  __ bind(&while_false);
428  // scratch has the original value of the exponent - if the exponent is
429  // negative, return 1/result.
430  __ test(exponent, exponent);
431  __ j(positive, &done);
432  __ divsd(double_scratch2, double_result);
433  __ movsd(double_result, double_scratch2);
434  // Test whether result is zero.  Bail out to check for subnormal result.
435  // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
436  __ xorps(double_scratch2, double_scratch2);
437  __ ucomisd(double_scratch2, double_result);  // Result cannot be NaN.
438  // double_exponent aliased as double_scratch2 has already been overwritten
439  // and may not have contained the exponent value in the first place when the
440  // exponent is a smi.  We reset it with exponent value before bailing out.
441  __ j(not_equal, &done);
442  __ Cvtsi2sd(double_exponent, exponent);
443
444  // Returning or bailing out.
445  __ bind(&call_runtime);
446  {
447    AllowExternalCallThatCantCauseGC scope(masm);
448    __ PrepareCallCFunction(4, scratch);
449    __ movsd(Operand(esp, 0 * kDoubleSize), double_base);
450    __ movsd(Operand(esp, 1 * kDoubleSize), double_exponent);
451    __ CallCFunction(ExternalReference::power_double_double_function(isolate()),
452                     4);
453  }
454  // Return value is in st(0) on ia32.
455  // Store it into the (fixed) result register.
456  __ sub(esp, Immediate(kDoubleSize));
457  __ fstp_d(Operand(esp, 0));
458  __ movsd(double_result, Operand(esp, 0));
459  __ add(esp, Immediate(kDoubleSize));
460
461  __ bind(&done);
462  __ ret(0);
463}
464
465void RegExpExecStub::Generate(MacroAssembler* masm) {
466  // Just jump directly to runtime if native RegExp is not selected at compile
467  // time or if regexp entry in generated code is turned off runtime switch or
468  // at compilation.
469#ifdef V8_INTERPRETED_REGEXP
470  __ TailCallRuntime(Runtime::kRegExpExec);
471#else  // V8_INTERPRETED_REGEXP
472
473  // Stack frame on entry.
474  //  esp[0]: return address
475  //  esp[4]: last_match_info (expected JSArray)
476  //  esp[8]: previous index
477  //  esp[12]: subject string
478  //  esp[16]: JSRegExp object
479
480  static const int kLastMatchInfoOffset = 1 * kPointerSize;
481  static const int kPreviousIndexOffset = 2 * kPointerSize;
482  static const int kSubjectOffset = 3 * kPointerSize;
483  static const int kJSRegExpOffset = 4 * kPointerSize;
484
485  Label runtime;
486  Factory* factory = isolate()->factory();
487
488  // Ensure that a RegExp stack is allocated.
489  ExternalReference address_of_regexp_stack_memory_address =
490      ExternalReference::address_of_regexp_stack_memory_address(isolate());
491  ExternalReference address_of_regexp_stack_memory_size =
492      ExternalReference::address_of_regexp_stack_memory_size(isolate());
493  __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
494  __ test(ebx, ebx);
495  __ j(zero, &runtime);
496
497  // Check that the first argument is a JSRegExp object.
498  __ mov(eax, Operand(esp, kJSRegExpOffset));
499  STATIC_ASSERT(kSmiTag == 0);
500  __ JumpIfSmi(eax, &runtime);
501  __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
502  __ j(not_equal, &runtime);
503
504  // Check that the RegExp has been compiled (data contains a fixed array).
505  __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
506  if (FLAG_debug_code) {
507    __ test(ecx, Immediate(kSmiTagMask));
508    __ Check(not_zero, kUnexpectedTypeForRegExpDataFixedArrayExpected);
509    __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
510    __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
511  }
512
513  // ecx: RegExp data (FixedArray)
514  // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
515  __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset));
516  __ cmp(ebx, Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
517  __ j(not_equal, &runtime);
518
519  // ecx: RegExp data (FixedArray)
520  // Check that the number of captures fit in the static offsets vector buffer.
521  __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
522  // Check (number_of_captures + 1) * 2 <= offsets vector size
523  // Or          number_of_captures * 2 <= offsets vector size - 2
524  // Multiplying by 2 comes for free since edx is smi-tagged.
525  STATIC_ASSERT(kSmiTag == 0);
526  STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
527  STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
528  __ cmp(edx, Isolate::kJSRegexpStaticOffsetsVectorSize - 2);
529  __ j(above, &runtime);
530
531  // Reset offset for possibly sliced string.
532  __ Move(edi, Immediate(0));
533  __ mov(eax, Operand(esp, kSubjectOffset));
534  __ JumpIfSmi(eax, &runtime);
535  __ mov(edx, eax);  // Make a copy of the original subject string.
536
537  // eax: subject string
538  // edx: subject string
539  // ecx: RegExp data (FixedArray)
540  // Handle subject string according to its encoding and representation:
541  // (1) Sequential two byte?  If yes, go to (9).
542  // (2) Sequential one byte?  If yes, go to (5).
543  // (3) Sequential or cons?  If not, go to (6).
544  // (4) Cons string.  If the string is flat, replace subject with first string
545  //     and go to (1). Otherwise bail out to runtime.
546  // (5) One byte sequential.  Load regexp code for one byte.
547  // (E) Carry on.
548  /// [...]
549
550  // Deferred code at the end of the stub:
551  // (6) Long external string?  If not, go to (10).
552  // (7) External string.  Make it, offset-wise, look like a sequential string.
553  // (8) Is the external string one byte?  If yes, go to (5).
554  // (9) Two byte sequential.  Load regexp code for two byte. Go to (E).
555  // (10) Short external string or not a string?  If yes, bail out to runtime.
556  // (11) Sliced or thin string.  Replace subject with parent. Go to (1).
557
558  Label seq_one_byte_string /* 5 */, seq_two_byte_string /* 9 */,
559      external_string /* 7 */, check_underlying /* 1 */,
560      not_seq_nor_cons /* 6 */, check_code /* E */, not_long_external /* 10 */;
561
562  __ bind(&check_underlying);
563  // (1) Sequential two byte?  If yes, go to (9).
564  __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
565  __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
566
567  __ and_(ebx, kIsNotStringMask |
568               kStringRepresentationMask |
569               kStringEncodingMask |
570               kShortExternalStringMask);
571  STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
572  __ j(zero, &seq_two_byte_string);  // Go to (9).
573
574  // (2) Sequential one byte?  If yes, go to (5).
575  // Any other sequential string must be one byte.
576  __ and_(ebx, Immediate(kIsNotStringMask |
577                         kStringRepresentationMask |
578                         kShortExternalStringMask));
579  __ j(zero, &seq_one_byte_string, Label::kNear);  // Go to (5).
580
581  // (3) Sequential or cons?  If not, go to (6).
582  // We check whether the subject string is a cons, since sequential strings
583  // have already been covered.
584  STATIC_ASSERT(kConsStringTag < kExternalStringTag);
585  STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
586  STATIC_ASSERT(kThinStringTag > kExternalStringTag);
587  STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
588  STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
589  __ cmp(ebx, Immediate(kExternalStringTag));
590  __ j(greater_equal, &not_seq_nor_cons);  // Go to (6).
591
592  // (4) Cons string.  Check that it's flat.
593  // Replace subject with first string and reload instance type.
594  __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string());
595  __ j(not_equal, &runtime);
596  __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
597  __ jmp(&check_underlying);
598
599  // eax: sequential subject string (or look-alike, external string)
600  // edx: original subject string
601  // ecx: RegExp data (FixedArray)
602  // (5) One byte sequential.  Load regexp code for one byte.
603  __ bind(&seq_one_byte_string);
604  // Load previous index and check range before edx is overwritten.  We have
605  // to use edx instead of eax here because it might have been only made to
606  // look like a sequential string when it actually is an external string.
607  __ mov(ebx, Operand(esp, kPreviousIndexOffset));
608  __ JumpIfNotSmi(ebx, &runtime);
609  __ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
610  __ j(above_equal, &runtime);
611  __ mov(edx, FieldOperand(ecx, JSRegExp::kDataOneByteCodeOffset));
612  __ Move(ecx, Immediate(1));  // Type is one byte.
613
614  // (E) Carry on.  String handling is done.
615  __ bind(&check_code);
616  // edx: irregexp code
617  // Check that the irregexp code has been generated for the actual string
618  // encoding. If it has, the field contains a code object otherwise it contains
619  // a smi (code flushing support).
620  __ JumpIfSmi(edx, &runtime);
621
622  // eax: subject string
623  // ebx: previous index (smi)
624  // edx: code
625  // ecx: encoding of subject string (1 if one_byte, 0 if two_byte);
626  // All checks done. Now push arguments for native regexp code.
627  Counters* counters = isolate()->counters();
628  __ IncrementCounter(counters->regexp_entry_native(), 1);
629
630  // Isolates: note we add an additional parameter here (isolate pointer).
631  static const int kRegExpExecuteArguments = 9;
632  __ EnterApiExitFrame(kRegExpExecuteArguments);
633
634  // Argument 9: Pass current isolate address.
635  __ mov(Operand(esp, 8 * kPointerSize),
636      Immediate(ExternalReference::isolate_address(isolate())));
637
638  // Argument 8: Indicate that this is a direct call from JavaScript.
639  __ mov(Operand(esp, 7 * kPointerSize), Immediate(1));
640
641  // Argument 7: Start (high end) of backtracking stack memory area.
642  __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
643  __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
644  __ mov(Operand(esp, 6 * kPointerSize), esi);
645
646  // Argument 6: Set the number of capture registers to zero to force global
647  // regexps to behave as non-global.  This does not affect non-global regexps.
648  __ mov(Operand(esp, 5 * kPointerSize), Immediate(0));
649
650  // Argument 5: static offsets vector buffer.
651  __ mov(Operand(esp, 4 * kPointerSize),
652         Immediate(ExternalReference::address_of_static_offsets_vector(
653             isolate())));
654
655  // Argument 2: Previous index.
656  __ SmiUntag(ebx);
657  __ mov(Operand(esp, 1 * kPointerSize), ebx);
658
659  // Argument 1: Original subject string.
660  // The original subject is in the previous stack frame. Therefore we have to
661  // use ebp, which points exactly to one pointer size below the previous esp.
662  // (Because creating a new stack frame pushes the previous ebp onto the stack
663  // and thereby moves up esp by one kPointerSize.)
664  __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize));
665  __ mov(Operand(esp, 0 * kPointerSize), esi);
666
667  // esi: original subject string
668  // eax: underlying subject string
669  // ebx: previous index
670  // ecx: encoding of subject string (1 if one_byte 0 if two_byte);
671  // edx: code
672  // Argument 4: End of string data
673  // Argument 3: Start of string data
674  // Prepare start and end index of the input.
675  // Load the length from the original sliced string if that is the case.
676  __ mov(esi, FieldOperand(esi, String::kLengthOffset));
677  __ add(esi, edi);  // Calculate input end wrt offset.
678  __ SmiUntag(edi);
679  __ add(ebx, edi);  // Calculate input start wrt offset.
680
681  // ebx: start index of the input string
682  // esi: end index of the input string
683  Label setup_two_byte, setup_rest;
684  __ test(ecx, ecx);
685  __ j(zero, &setup_two_byte, Label::kNear);
686  __ SmiUntag(esi);
687  __ lea(ecx, FieldOperand(eax, esi, times_1, SeqOneByteString::kHeaderSize));
688  __ mov(Operand(esp, 3 * kPointerSize), ecx);  // Argument 4.
689  __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqOneByteString::kHeaderSize));
690  __ mov(Operand(esp, 2 * kPointerSize), ecx);  // Argument 3.
691  __ jmp(&setup_rest, Label::kNear);
692
693  __ bind(&setup_two_byte);
694  STATIC_ASSERT(kSmiTag == 0);
695  STATIC_ASSERT(kSmiTagSize == 1);  // esi is smi (powered by 2).
696  __ lea(ecx, FieldOperand(eax, esi, times_1, SeqTwoByteString::kHeaderSize));
697  __ mov(Operand(esp, 3 * kPointerSize), ecx);  // Argument 4.
698  __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
699  __ mov(Operand(esp, 2 * kPointerSize), ecx);  // Argument 3.
700
701  __ bind(&setup_rest);
702
703  // Locate the code entry and call it.
704  __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
705  __ call(edx);
706
707  // Drop arguments and come back to JS mode.
708  __ LeaveApiExitFrame(true);
709
710  // Check the result.
711  Label success;
712  __ cmp(eax, 1);
713  // We expect exactly one result since we force the called regexp to behave
714  // as non-global.
715  __ j(equal, &success);
716  Label failure;
717  __ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
718  __ j(equal, &failure);
719  __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
720  // If not exception it can only be retry. Handle that in the runtime system.
721  __ j(not_equal, &runtime);
722  // Result must now be exception. If there is no pending exception already a
723  // stack overflow (on the backtrack stack) was detected in RegExp code but
724  // haven't created the exception yet. Handle that in the runtime system.
725  // TODO(592): Rerunning the RegExp to get the stack overflow exception.
726  ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
727                                      isolate());
728  __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
729  __ mov(eax, Operand::StaticVariable(pending_exception));
730  __ cmp(edx, eax);
731  __ j(equal, &runtime);
732
733  // For exception, throw the exception again.
734  __ TailCallRuntime(Runtime::kRegExpExecReThrow);
735
736  __ bind(&failure);
737  // For failure to match, return null.
738  __ mov(eax, factory->null_value());
739  __ ret(4 * kPointerSize);
740
741  // Load RegExp data.
742  __ bind(&success);
743  __ mov(eax, Operand(esp, kJSRegExpOffset));
744  __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
745  __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
746  // Calculate number of capture registers (number_of_captures + 1) * 2.
747  STATIC_ASSERT(kSmiTag == 0);
748  STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
749  __ add(edx, Immediate(2));  // edx was a smi.
750
751  // edx: Number of capture registers
752  // Check that the last match info is a FixedArray.
753  __ mov(ebx, Operand(esp, kLastMatchInfoOffset));
754  __ JumpIfSmi(ebx, &runtime);
755  // Check that the object has fast elements.
756  __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
757  __ cmp(eax, factory->fixed_array_map());
758  __ j(not_equal, &runtime);
759  // Check that the last match info has space for the capture registers and the
760  // additional information.
761  __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
762  __ SmiUntag(eax);
763  __ sub(eax, Immediate(RegExpMatchInfo::kLastMatchOverhead));
764  __ cmp(edx, eax);
765  __ j(greater, &runtime);
766
767  // ebx: last_match_info (FixedArray)
768  // edx: number of capture registers
769  // Store the capture count.
770  __ SmiTag(edx);  // Number of capture registers to smi.
771  __ mov(FieldOperand(ebx, RegExpMatchInfo::kNumberOfCapturesOffset), edx);
772  __ SmiUntag(edx);  // Number of capture registers back from smi.
773  // Store last subject and last input.
774  __ mov(eax, Operand(esp, kSubjectOffset));
775  __ mov(ecx, eax);
776  __ mov(FieldOperand(ebx, RegExpMatchInfo::kLastSubjectOffset), eax);
777  __ RecordWriteField(ebx, RegExpMatchInfo::kLastSubjectOffset, eax, edi,
778                      kDontSaveFPRegs);
779  __ mov(eax, ecx);
780  __ mov(FieldOperand(ebx, RegExpMatchInfo::kLastInputOffset), eax);
781  __ RecordWriteField(ebx, RegExpMatchInfo::kLastInputOffset, eax, edi,
782                      kDontSaveFPRegs);
783
784  // Get the static offsets vector filled by the native regexp code.
785  ExternalReference address_of_static_offsets_vector =
786      ExternalReference::address_of_static_offsets_vector(isolate());
787  __ mov(ecx, Immediate(address_of_static_offsets_vector));
788
789  // ebx: last_match_info (FixedArray)
790  // ecx: offsets vector
791  // edx: number of capture registers
792  Label next_capture, done;
793  // Capture register counter starts from number of capture registers and
794  // counts down until wrapping after zero.
795  __ bind(&next_capture);
796  __ sub(edx, Immediate(1));
797  __ j(negative, &done, Label::kNear);
798  // Read the value from the static offsets vector buffer.
799  __ mov(edi, Operand(ecx, edx, times_int_size, 0));
800  __ SmiTag(edi);
801  // Store the smi value in the last match info.
802  __ mov(FieldOperand(ebx, edx, times_pointer_size,
803                      RegExpMatchInfo::kFirstCaptureOffset),
804         edi);
805  __ jmp(&next_capture);
806  __ bind(&done);
807
808  // Return last match info.
809  __ mov(eax, ebx);
810  __ ret(4 * kPointerSize);
811
812  // Do the runtime call to execute the regexp.
813  __ bind(&runtime);
814  __ TailCallRuntime(Runtime::kRegExpExec);
815
816  // Deferred code for string handling.
817  // (6) Long external string?  If not, go to (10).
818  __ bind(&not_seq_nor_cons);
819  // Compare flags are still set from (3).
820  __ j(greater, &not_long_external, Label::kNear);  // Go to (10).
821
822  // (7) External string.  Short external strings have been ruled out.
823  __ bind(&external_string);
824  // Reload instance type.
825  __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
826  __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
827  if (FLAG_debug_code) {
828    // Assert that we do not have a cons or slice (indirect strings) here.
829    // Sequential strings have already been ruled out.
830    __ test_b(ebx, Immediate(kIsIndirectStringMask));
831    __ Assert(zero, kExternalStringExpectedButNotFound);
832  }
833  __ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset));
834  // Move the pointer so that offset-wise, it looks like a sequential string.
835  STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
836  __ sub(eax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
837  STATIC_ASSERT(kTwoByteStringTag == 0);
838  // (8) Is the external string one byte?  If yes, go to (5).
839  __ test_b(ebx, Immediate(kStringEncodingMask));
840  __ j(not_zero, &seq_one_byte_string);  // Go to (5).
841
842  // eax: sequential subject string (or look-alike, external string)
843  // edx: original subject string
844  // ecx: RegExp data (FixedArray)
845  // (9) Two byte sequential.  Load regexp code for two byte. Go to (E).
846  __ bind(&seq_two_byte_string);
847  // Load previous index and check range before edx is overwritten.  We have
848  // to use edx instead of eax here because it might have been only made to
849  // look like a sequential string when it actually is an external string.
850  __ mov(ebx, Operand(esp, kPreviousIndexOffset));
851  __ JumpIfNotSmi(ebx, &runtime);
852  __ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
853  __ j(above_equal, &runtime);
854  __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
855  __ Move(ecx, Immediate(0));  // Type is two byte.
856  __ jmp(&check_code);  // Go to (E).
857
858  // (10) Not a string or a short external string?  If yes, bail out to runtime.
859  __ bind(&not_long_external);
860  // Catch non-string subject or short external string.
861  STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
862  __ test(ebx, Immediate(kIsNotStringMask | kShortExternalStringTag));
863  __ j(not_zero, &runtime);
864
865  // (11) Sliced or thin string.  Replace subject with parent.  Go to (1).
866  Label thin_string;
867  __ cmp(ebx, Immediate(kThinStringTag));
868  __ j(equal, &thin_string, Label::kNear);
869  // Load offset into edi and replace subject string with parent.
870  __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset));
871  __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset));
872  __ jmp(&check_underlying);  // Go to (1).
873
874  __ bind(&thin_string);
875  __ mov(eax, FieldOperand(eax, ThinString::kActualOffset));
876  __ jmp(&check_underlying);  // Go to (1).
877#endif  // V8_INTERPRETED_REGEXP
878}
879
880
881static int NegativeComparisonResult(Condition cc) {
882  DCHECK(cc != equal);
883  DCHECK((cc == less) || (cc == less_equal)
884      || (cc == greater) || (cc == greater_equal));
885  return (cc == greater || cc == greater_equal) ? LESS : GREATER;
886}
887
888
889static void CheckInputType(MacroAssembler* masm, Register input,
890                           CompareICState::State expected, Label* fail) {
891  Label ok;
892  if (expected == CompareICState::SMI) {
893    __ JumpIfNotSmi(input, fail);
894  } else if (expected == CompareICState::NUMBER) {
895    __ JumpIfSmi(input, &ok);
896    __ cmp(FieldOperand(input, HeapObject::kMapOffset),
897           Immediate(masm->isolate()->factory()->heap_number_map()));
898    __ j(not_equal, fail);
899  }
900  // We could be strict about internalized/non-internalized here, but as long as
901  // hydrogen doesn't care, the stub doesn't have to care either.
902  __ bind(&ok);
903}
904
905
906static void BranchIfNotInternalizedString(MacroAssembler* masm,
907                                          Label* label,
908                                          Register object,
909                                          Register scratch) {
910  __ JumpIfSmi(object, label);
911  __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
912  __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
913  STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
914  __ test(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
915  __ j(not_zero, label);
916}
917
918
919void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
920  Label runtime_call, check_unequal_objects;
921  Condition cc = GetCondition();
922
923  Label miss;
924  CheckInputType(masm, edx, left(), &miss);
925  CheckInputType(masm, eax, right(), &miss);
926
927  // Compare two smis.
928  Label non_smi, smi_done;
929  __ mov(ecx, edx);
930  __ or_(ecx, eax);
931  __ JumpIfNotSmi(ecx, &non_smi, Label::kNear);
932  __ sub(edx, eax);  // Return on the result of the subtraction.
933  __ j(no_overflow, &smi_done, Label::kNear);
934  __ not_(edx);  // Correct sign in case of overflow. edx is never 0 here.
935  __ bind(&smi_done);
936  __ mov(eax, edx);
937  __ ret(0);
938  __ bind(&non_smi);
939
940  // NOTICE! This code is only reached after a smi-fast-case check, so
941  // it is certain that at least one operand isn't a smi.
942
943  // Identical objects can be compared fast, but there are some tricky cases
944  // for NaN and undefined.
945  Label generic_heap_number_comparison;
946  {
947    Label not_identical;
948    __ cmp(eax, edx);
949    __ j(not_equal, &not_identical);
950
951    if (cc != equal) {
952      // Check for undefined.  undefined OP undefined is false even though
953      // undefined == undefined.
954      __ cmp(edx, isolate()->factory()->undefined_value());
955      Label check_for_nan;
956      __ j(not_equal, &check_for_nan, Label::kNear);
957      __ Move(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
958      __ ret(0);
959      __ bind(&check_for_nan);
960    }
961
962    // Test for NaN. Compare heap numbers in a general way,
963    // to handle NaNs correctly.
964    __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
965           Immediate(isolate()->factory()->heap_number_map()));
966    __ j(equal, &generic_heap_number_comparison, Label::kNear);
967    if (cc != equal) {
968      __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
969      __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
970      // Call runtime on identical JSObjects.  Otherwise return equal.
971      __ cmpb(ecx, Immediate(FIRST_JS_RECEIVER_TYPE));
972      __ j(above_equal, &runtime_call, Label::kFar);
973      // Call runtime on identical symbols since we need to throw a TypeError.
974      __ cmpb(ecx, Immediate(SYMBOL_TYPE));
975      __ j(equal, &runtime_call, Label::kFar);
976    }
977    __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
978    __ ret(0);
979
980
981    __ bind(&not_identical);
982  }
983
984  // Strict equality can quickly decide whether objects are equal.
985  // Non-strict object equality is slower, so it is handled later in the stub.
986  if (cc == equal && strict()) {
987    Label slow;  // Fallthrough label.
988    Label not_smis;
989    // If we're doing a strict equality comparison, we don't have to do
990    // type conversion, so we generate code to do fast comparison for objects
991    // and oddballs. Non-smi numbers and strings still go through the usual
992    // slow-case code.
993    // If either is a Smi (we know that not both are), then they can only
994    // be equal if the other is a HeapNumber. If so, use the slow case.
995    STATIC_ASSERT(kSmiTag == 0);
996    DCHECK_EQ(static_cast<Smi*>(0), Smi::kZero);
997    __ mov(ecx, Immediate(kSmiTagMask));
998    __ and_(ecx, eax);
999    __ test(ecx, edx);
1000    __ j(not_zero, &not_smis, Label::kNear);
1001    // One operand is a smi.
1002
1003    // Check whether the non-smi is a heap number.
1004    STATIC_ASSERT(kSmiTagMask == 1);
1005    // ecx still holds eax & kSmiTag, which is either zero or one.
1006    __ sub(ecx, Immediate(0x01));
1007    __ mov(ebx, edx);
1008    __ xor_(ebx, eax);
1009    __ and_(ebx, ecx);  // ebx holds either 0 or eax ^ edx.
1010    __ xor_(ebx, eax);
1011    // if eax was smi, ebx is now edx, else eax.
1012
1013    // Check if the non-smi operand is a heap number.
1014    __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
1015           Immediate(isolate()->factory()->heap_number_map()));
1016    // If heap number, handle it in the slow case.
1017    __ j(equal, &slow, Label::kNear);
1018    // Return non-equal (ebx is not zero)
1019    __ mov(eax, ebx);
1020    __ ret(0);
1021
1022    __ bind(&not_smis);
1023    // If either operand is a JSObject or an oddball value, then they are not
1024    // equal since their pointers are different
1025    // There is no test for undetectability in strict equality.
1026
1027    // Get the type of the first operand.
1028    // If the first object is a JS object, we have done pointer comparison.
1029    Label first_non_object;
1030    STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
1031    __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
1032    __ j(below, &first_non_object, Label::kNear);
1033
1034    // Return non-zero (eax is not zero)
1035    Label return_not_equal;
1036    STATIC_ASSERT(kHeapObjectTag != 0);
1037    __ bind(&return_not_equal);
1038    __ ret(0);
1039
1040    __ bind(&first_non_object);
1041    // Check for oddballs: true, false, null, undefined.
1042    __ CmpInstanceType(ecx, ODDBALL_TYPE);
1043    __ j(equal, &return_not_equal);
1044
1045    __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx);
1046    __ j(above_equal, &return_not_equal);
1047
1048    // Check for oddballs: true, false, null, undefined.
1049    __ CmpInstanceType(ecx, ODDBALL_TYPE);
1050    __ j(equal, &return_not_equal);
1051
1052    // Fall through to the general case.
1053    __ bind(&slow);
1054  }
1055
1056  // Generate the number comparison code.
1057  Label non_number_comparison;
1058  Label unordered;
1059  __ bind(&generic_heap_number_comparison);
1060
1061  FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
1062  __ ucomisd(xmm0, xmm1);
1063  // Don't base result on EFLAGS when a NaN is involved.
1064  __ j(parity_even, &unordered, Label::kNear);
1065
1066  __ mov(eax, 0);  // equal
1067  __ mov(ecx, Immediate(Smi::FromInt(1)));
1068  __ cmov(above, eax, ecx);
1069  __ mov(ecx, Immediate(Smi::FromInt(-1)));
1070  __ cmov(below, eax, ecx);
1071  __ ret(0);
1072
1073  // If one of the numbers was NaN, then the result is always false.
1074  // The cc is never not-equal.
1075  __ bind(&unordered);
1076  DCHECK(cc != not_equal);
1077  if (cc == less || cc == less_equal) {
1078    __ mov(eax, Immediate(Smi::FromInt(1)));
1079  } else {
1080    __ mov(eax, Immediate(Smi::FromInt(-1)));
1081  }
1082  __ ret(0);
1083
1084  // The number comparison code did not provide a valid result.
1085  __ bind(&non_number_comparison);
1086
1087  // Fast negative check for internalized-to-internalized equality.
1088  Label check_for_strings;
1089  if (cc == equal) {
1090    BranchIfNotInternalizedString(masm, &check_for_strings, eax, ecx);
1091    BranchIfNotInternalizedString(masm, &check_for_strings, edx, ecx);
1092
1093    // We've already checked for object identity, so if both operands
1094    // are internalized they aren't equal. Register eax already holds a
1095    // non-zero value, which indicates not equal, so just return.
1096    __ ret(0);
1097  }
1098
1099  __ bind(&check_for_strings);
1100
1101  __ JumpIfNotBothSequentialOneByteStrings(edx, eax, ecx, ebx,
1102                                           &check_unequal_objects);
1103
1104  // Inline comparison of one-byte strings.
1105  if (cc == equal) {
1106    StringHelper::GenerateFlatOneByteStringEquals(masm, edx, eax, ecx, ebx);
1107  } else {
1108    StringHelper::GenerateCompareFlatOneByteStrings(masm, edx, eax, ecx, ebx,
1109                                                    edi);
1110  }
1111#ifdef DEBUG
1112  __ Abort(kUnexpectedFallThroughFromStringComparison);
1113#endif
1114
1115  __ bind(&check_unequal_objects);
1116  if (cc == equal && !strict()) {
1117    // Non-strict equality.  Objects are unequal if
1118    // they are both JSObjects and not undetectable,
1119    // and their pointers are different.
1120    Label return_equal, return_unequal, undetectable;
1121    // At most one is a smi, so we can test for smi by adding the two.
1122    // A smi plus a heap object has the low bit set, a heap object plus
1123    // a heap object has the low bit clear.
1124    STATIC_ASSERT(kSmiTag == 0);
1125    STATIC_ASSERT(kSmiTagMask == 1);
1126    __ lea(ecx, Operand(eax, edx, times_1, 0));
1127    __ test(ecx, Immediate(kSmiTagMask));
1128    __ j(not_zero, &runtime_call);
1129
1130    __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
1131    __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
1132
1133    __ test_b(FieldOperand(ebx, Map::kBitFieldOffset),
1134              Immediate(1 << Map::kIsUndetectable));
1135    __ j(not_zero, &undetectable, Label::kNear);
1136    __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1137              Immediate(1 << Map::kIsUndetectable));
1138    __ j(not_zero, &return_unequal, Label::kNear);
1139
1140    __ CmpInstanceType(ebx, FIRST_JS_RECEIVER_TYPE);
1141    __ j(below, &runtime_call, Label::kNear);
1142    __ CmpInstanceType(ecx, FIRST_JS_RECEIVER_TYPE);
1143    __ j(below, &runtime_call, Label::kNear);
1144
1145    __ bind(&return_unequal);
1146    // Return non-equal by returning the non-zero object pointer in eax.
1147    __ ret(0);  // eax, edx were pushed
1148
1149    __ bind(&undetectable);
1150    __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1151              Immediate(1 << Map::kIsUndetectable));
1152    __ j(zero, &return_unequal, Label::kNear);
1153
1154    // If both sides are JSReceivers, then the result is false according to
1155    // the HTML specification, which says that only comparisons with null or
1156    // undefined are affected by special casing for document.all.
1157    __ CmpInstanceType(ebx, ODDBALL_TYPE);
1158    __ j(zero, &return_equal, Label::kNear);
1159    __ CmpInstanceType(ecx, ODDBALL_TYPE);
1160    __ j(not_zero, &return_unequal, Label::kNear);
1161
1162    __ bind(&return_equal);
1163    __ Move(eax, Immediate(EQUAL));
1164    __ ret(0);  // eax, edx were pushed
1165  }
1166  __ bind(&runtime_call);
1167
1168  if (cc == equal) {
1169    {
1170      FrameScope scope(masm, StackFrame::INTERNAL);
1171      __ Push(esi);
1172      __ Call(strict() ? isolate()->builtins()->StrictEqual()
1173                       : isolate()->builtins()->Equal(),
1174              RelocInfo::CODE_TARGET);
1175      __ Pop(esi);
1176    }
1177    // Turn true into 0 and false into some non-zero value.
1178    STATIC_ASSERT(EQUAL == 0);
1179    __ sub(eax, Immediate(isolate()->factory()->true_value()));
1180    __ Ret();
1181  } else {
1182    // Push arguments below the return address.
1183    __ pop(ecx);
1184    __ push(edx);
1185    __ push(eax);
1186    __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
1187    __ push(ecx);
1188    // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
1189    // tagged as a small integer.
1190    __ TailCallRuntime(Runtime::kCompare);
1191  }
1192
1193  __ bind(&miss);
1194  GenerateMiss(masm);
1195}
1196
1197
1198static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
1199  // eax : number of arguments to the construct function
1200  // ebx : feedback vector
1201  // edx : slot in feedback vector (Smi)
1202  // edi : the function to call
1203
1204  {
1205    FrameScope scope(masm, StackFrame::INTERNAL);
1206
1207    // Number-of-arguments register must be smi-tagged to call out.
1208    __ SmiTag(eax);
1209    __ push(eax);
1210    __ push(edi);
1211    __ push(edx);
1212    __ push(ebx);
1213    __ push(esi);
1214
1215    __ CallStub(stub);
1216
1217    __ pop(esi);
1218    __ pop(ebx);
1219    __ pop(edx);
1220    __ pop(edi);
1221    __ pop(eax);
1222    __ SmiUntag(eax);
1223  }
1224}
1225
1226
1227static void GenerateRecordCallTarget(MacroAssembler* masm) {
1228  // Cache the called function in a feedback vector slot.  Cache states
1229  // are uninitialized, monomorphic (indicated by a JSFunction), and
1230  // megamorphic.
1231  // eax : number of arguments to the construct function
1232  // ebx : feedback vector
1233  // edx : slot in feedback vector (Smi)
1234  // edi : the function to call
1235  Isolate* isolate = masm->isolate();
1236  Label initialize, done, miss, megamorphic, not_array_function;
1237
1238  // Load the cache state into ecx.
1239  __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
1240                           FixedArray::kHeaderSize));
1241
1242  // A monomorphic cache hit or an already megamorphic state: invoke the
1243  // function without changing the state.
1244  // We don't know if ecx is a WeakCell or a Symbol, but it's harmless to read
1245  // at this position in a symbol (see static asserts in feedback-vector.h).
1246  Label check_allocation_site;
1247  __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset));
1248  __ j(equal, &done, Label::kFar);
1249  __ CompareRoot(ecx, Heap::kmegamorphic_symbolRootIndex);
1250  __ j(equal, &done, Label::kFar);
1251  __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
1252                 Heap::kWeakCellMapRootIndex);
1253  __ j(not_equal, &check_allocation_site);
1254
1255  // If the weak cell is cleared, we have a new chance to become monomorphic.
1256  __ JumpIfSmi(FieldOperand(ecx, WeakCell::kValueOffset), &initialize);
1257  __ jmp(&megamorphic);
1258
1259  __ bind(&check_allocation_site);
1260  // If we came here, we need to see if we are the array function.
1261  // If we didn't have a matching function, and we didn't find the megamorph
1262  // sentinel, then we have in the slot either some other function or an
1263  // AllocationSite.
1264  __ CompareRoot(FieldOperand(ecx, 0), Heap::kAllocationSiteMapRootIndex);
1265  __ j(not_equal, &miss);
1266
1267  // Make sure the function is the Array() function
1268  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1269  __ cmp(edi, ecx);
1270  __ j(not_equal, &megamorphic);
1271  __ jmp(&done, Label::kFar);
1272
1273  __ bind(&miss);
1274
1275  // A monomorphic miss (i.e, here the cache is not uninitialized) goes
1276  // megamorphic.
1277  __ CompareRoot(ecx, Heap::kuninitialized_symbolRootIndex);
1278  __ j(equal, &initialize);
1279  // MegamorphicSentinel is an immortal immovable object (undefined) so no
1280  // write-barrier is needed.
1281  __ bind(&megamorphic);
1282  __ mov(
1283      FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize),
1284      Immediate(FeedbackVector::MegamorphicSentinel(isolate)));
1285  __ jmp(&done, Label::kFar);
1286
1287  // An uninitialized cache is patched with the function or sentinel to
1288  // indicate the ElementsKind if function is the Array constructor.
1289  __ bind(&initialize);
1290  // Make sure the function is the Array() function
1291  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1292  __ cmp(edi, ecx);
1293  __ j(not_equal, &not_array_function);
1294
1295  // The target function is the Array constructor,
1296  // Create an AllocationSite if we don't already have it, store it in the
1297  // slot.
1298  CreateAllocationSiteStub create_stub(isolate);
1299  CallStubInRecordCallTarget(masm, &create_stub);
1300  __ jmp(&done);
1301
1302  __ bind(&not_array_function);
1303  CreateWeakCellStub weak_cell_stub(isolate);
1304  CallStubInRecordCallTarget(masm, &weak_cell_stub);
1305
1306  __ bind(&done);
1307  // Increment the call count for all function calls.
1308  __ add(FieldOperand(ebx, edx, times_half_pointer_size,
1309                      FixedArray::kHeaderSize + kPointerSize),
1310         Immediate(Smi::FromInt(1)));
1311}
1312
1313
1314void CallConstructStub::Generate(MacroAssembler* masm) {
1315  // eax : number of arguments
1316  // ebx : feedback vector
1317  // edx : slot in feedback vector (Smi, for RecordCallTarget)
1318  // edi : constructor function
1319
1320  Label non_function;
1321  // Check that function is not a smi.
1322  __ JumpIfSmi(edi, &non_function);
1323  // Check that function is a JSFunction.
1324  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
1325  __ j(not_equal, &non_function);
1326
1327  GenerateRecordCallTarget(masm);
1328
1329  Label feedback_register_initialized;
1330  // Put the AllocationSite from the feedback vector into ebx, or undefined.
1331  __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
1332                           FixedArray::kHeaderSize));
1333  Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
1334  __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
1335  __ j(equal, &feedback_register_initialized);
1336  __ mov(ebx, isolate()->factory()->undefined_value());
1337  __ bind(&feedback_register_initialized);
1338
1339  __ AssertUndefinedOrAllocationSite(ebx);
1340
1341  // Pass new target to construct stub.
1342  __ mov(edx, edi);
1343
1344  // Tail call to the function-specific construct stub (still in the caller
1345  // context at this point).
1346  __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1347  __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset));
1348  __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
1349  __ jmp(ecx);
1350
1351  __ bind(&non_function);
1352  __ mov(edx, edi);
1353  __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1354}
1355
1356bool CEntryStub::NeedsImmovableCode() {
1357  return false;
1358}
1359
1360
1361void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
1362  CEntryStub::GenerateAheadOfTime(isolate);
1363  StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
1364  StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
1365  // It is important that the store buffer overflow stubs are generated first.
1366  CommonArrayConstructorStub::GenerateStubsAheadOfTime(isolate);
1367  CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
1368  CreateWeakCellStub::GenerateAheadOfTime(isolate);
1369  BinaryOpICStub::GenerateAheadOfTime(isolate);
1370  BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
1371  StoreFastElementStub::GenerateAheadOfTime(isolate);
1372}
1373
1374
1375void CodeStub::GenerateFPStubs(Isolate* isolate) {
1376  // Generate if not already in cache.
1377  CEntryStub(isolate, 1, kSaveFPRegs).GetCode();
1378}
1379
1380
1381void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
1382  CEntryStub stub(isolate, 1, kDontSaveFPRegs);
1383  stub.GetCode();
1384}
1385
1386
1387void CEntryStub::Generate(MacroAssembler* masm) {
1388  // eax: number of arguments including receiver
1389  // ebx: pointer to C function  (C callee-saved)
1390  // ebp: frame pointer  (restored after C call)
1391  // esp: stack pointer  (restored after C call)
1392  // esi: current context (C callee-saved)
1393  // edi: JS function of the caller (C callee-saved)
1394  //
1395  // If argv_in_register():
1396  // ecx: pointer to the first argument
1397
1398  ProfileEntryHookStub::MaybeCallEntryHook(masm);
1399
1400  // Reserve space on the stack for the three arguments passed to the call. If
1401  // result size is greater than can be returned in registers, also reserve
1402  // space for the hidden argument for the result location, and space for the
1403  // result itself.
1404  int arg_stack_space = result_size() < 3 ? 3 : 4 + result_size();
1405
1406  // Enter the exit frame that transitions from JavaScript to C++.
1407  if (argv_in_register()) {
1408    DCHECK(!save_doubles());
1409    DCHECK(!is_builtin_exit());
1410    __ EnterApiExitFrame(arg_stack_space);
1411
1412    // Move argc and argv into the correct registers.
1413    __ mov(esi, ecx);
1414    __ mov(edi, eax);
1415  } else {
1416    __ EnterExitFrame(
1417        arg_stack_space, save_doubles(),
1418        is_builtin_exit() ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
1419  }
1420
1421  // ebx: pointer to C function  (C callee-saved)
1422  // ebp: frame pointer  (restored after C call)
1423  // esp: stack pointer  (restored after C call)
1424  // edi: number of arguments including receiver  (C callee-saved)
1425  // esi: pointer to the first argument (C callee-saved)
1426
1427  // Result returned in eax, or eax+edx if result size is 2.
1428
1429  // Check stack alignment.
1430  if (FLAG_debug_code) {
1431    __ CheckStackAlignment();
1432  }
1433  // Call C function.
1434  if (result_size() <= 2) {
1435    __ mov(Operand(esp, 0 * kPointerSize), edi);  // argc.
1436    __ mov(Operand(esp, 1 * kPointerSize), esi);  // argv.
1437    __ mov(Operand(esp, 2 * kPointerSize),
1438           Immediate(ExternalReference::isolate_address(isolate())));
1439  } else {
1440    DCHECK_EQ(3, result_size());
1441    // Pass a pointer to the result location as the first argument.
1442    __ lea(eax, Operand(esp, 4 * kPointerSize));
1443    __ mov(Operand(esp, 0 * kPointerSize), eax);
1444    __ mov(Operand(esp, 1 * kPointerSize), edi);  // argc.
1445    __ mov(Operand(esp, 2 * kPointerSize), esi);  // argv.
1446    __ mov(Operand(esp, 3 * kPointerSize),
1447           Immediate(ExternalReference::isolate_address(isolate())));
1448  }
1449  __ call(ebx);
1450
1451  if (result_size() > 2) {
1452    DCHECK_EQ(3, result_size());
1453#ifndef _WIN32
1454    // Restore the "hidden" argument on the stack which was popped by caller.
1455    __ sub(esp, Immediate(kPointerSize));
1456#endif
1457    // Read result values stored on stack. Result is stored above the arguments.
1458    __ mov(kReturnRegister0, Operand(esp, 4 * kPointerSize));
1459    __ mov(kReturnRegister1, Operand(esp, 5 * kPointerSize));
1460    __ mov(kReturnRegister2, Operand(esp, 6 * kPointerSize));
1461  }
1462  // Result is in eax, edx:eax or edi:edx:eax - do not destroy these registers!
1463
1464  // Check result for exception sentinel.
1465  Label exception_returned;
1466  __ cmp(eax, isolate()->factory()->exception());
1467  __ j(equal, &exception_returned);
1468
1469  // Check that there is no pending exception, otherwise we
1470  // should have returned the exception sentinel.
1471  if (FLAG_debug_code) {
1472    __ push(edx);
1473    __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
1474    Label okay;
1475    ExternalReference pending_exception_address(
1476        Isolate::kPendingExceptionAddress, isolate());
1477    __ cmp(edx, Operand::StaticVariable(pending_exception_address));
1478    // Cannot use check here as it attempts to generate call into runtime.
1479    __ j(equal, &okay, Label::kNear);
1480    __ int3();
1481    __ bind(&okay);
1482    __ pop(edx);
1483  }
1484
1485  // Exit the JavaScript to C++ exit frame.
1486  __ LeaveExitFrame(save_doubles(), !argv_in_register());
1487  __ ret(0);
1488
1489  // Handling of exception.
1490  __ bind(&exception_returned);
1491
1492  ExternalReference pending_handler_context_address(
1493      Isolate::kPendingHandlerContextAddress, isolate());
1494  ExternalReference pending_handler_code_address(
1495      Isolate::kPendingHandlerCodeAddress, isolate());
1496  ExternalReference pending_handler_offset_address(
1497      Isolate::kPendingHandlerOffsetAddress, isolate());
1498  ExternalReference pending_handler_fp_address(
1499      Isolate::kPendingHandlerFPAddress, isolate());
1500  ExternalReference pending_handler_sp_address(
1501      Isolate::kPendingHandlerSPAddress, isolate());
1502
1503  // Ask the runtime for help to determine the handler. This will set eax to
1504  // contain the current pending exception, don't clobber it.
1505  ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
1506                                 isolate());
1507  {
1508    FrameScope scope(masm, StackFrame::MANUAL);
1509    __ PrepareCallCFunction(3, eax);
1510    __ mov(Operand(esp, 0 * kPointerSize), Immediate(0));  // argc.
1511    __ mov(Operand(esp, 1 * kPointerSize), Immediate(0));  // argv.
1512    __ mov(Operand(esp, 2 * kPointerSize),
1513           Immediate(ExternalReference::isolate_address(isolate())));
1514    __ CallCFunction(find_handler, 3);
1515  }
1516
1517  // Retrieve the handler context, SP and FP.
1518  __ mov(esi, Operand::StaticVariable(pending_handler_context_address));
1519  __ mov(esp, Operand::StaticVariable(pending_handler_sp_address));
1520  __ mov(ebp, Operand::StaticVariable(pending_handler_fp_address));
1521
1522  // If the handler is a JS frame, restore the context to the frame. Note that
1523  // the context will be set to (esi == 0) for non-JS frames.
1524  Label skip;
1525  __ test(esi, esi);
1526  __ j(zero, &skip, Label::kNear);
1527  __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
1528  __ bind(&skip);
1529
1530  // Compute the handler entry address and jump to it.
1531  __ mov(edi, Operand::StaticVariable(pending_handler_code_address));
1532  __ mov(edx, Operand::StaticVariable(pending_handler_offset_address));
1533  __ lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
1534  __ jmp(edi);
1535}
1536
1537
1538void JSEntryStub::Generate(MacroAssembler* masm) {
1539  Label invoke, handler_entry, exit;
1540  Label not_outermost_js, not_outermost_js_2;
1541
1542  ProfileEntryHookStub::MaybeCallEntryHook(masm);
1543
1544  // Set up frame.
1545  __ push(ebp);
1546  __ mov(ebp, esp);
1547
1548  // Push marker in two places.
1549  StackFrame::Type marker = type();
1550  __ push(Immediate(StackFrame::TypeToMarker(marker)));  // marker
1551  ExternalReference context_address(Isolate::kContextAddress, isolate());
1552  __ push(Operand::StaticVariable(context_address));  // context
1553  // Save callee-saved registers (C calling conventions).
1554  __ push(edi);
1555  __ push(esi);
1556  __ push(ebx);
1557
1558  // Save copies of the top frame descriptor on the stack.
1559  ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
1560  __ push(Operand::StaticVariable(c_entry_fp));
1561
1562  // If this is the outermost JS call, set js_entry_sp value.
1563  ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
1564  __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
1565  __ j(not_equal, &not_outermost_js, Label::kNear);
1566  __ mov(Operand::StaticVariable(js_entry_sp), ebp);
1567  __ push(Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
1568  __ jmp(&invoke, Label::kNear);
1569  __ bind(&not_outermost_js);
1570  __ push(Immediate(StackFrame::INNER_JSENTRY_FRAME));
1571
1572  // Jump to a faked try block that does the invoke, with a faked catch
1573  // block that sets the pending exception.
1574  __ jmp(&invoke);
1575  __ bind(&handler_entry);
1576  handler_offset_ = handler_entry.pos();
1577  // Caught exception: Store result (exception) in the pending exception
1578  // field in the JSEnv and return a failure sentinel.
1579  ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
1580                                      isolate());
1581  __ mov(Operand::StaticVariable(pending_exception), eax);
1582  __ mov(eax, Immediate(isolate()->factory()->exception()));
1583  __ jmp(&exit);
1584
1585  // Invoke: Link this frame into the handler chain.
1586  __ bind(&invoke);
1587  __ PushStackHandler();
1588
1589  // Fake a receiver (NULL).
1590  __ push(Immediate(0));  // receiver
1591
1592  // Invoke the function by calling through JS entry trampoline builtin and
1593  // pop the faked function when we return. Notice that we cannot store a
1594  // reference to the trampoline code directly in this stub, because the
1595  // builtin stubs may not have been generated yet.
1596  if (type() == StackFrame::ENTRY_CONSTRUCT) {
1597    ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
1598                                      isolate());
1599    __ mov(edx, Immediate(construct_entry));
1600  } else {
1601    ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
1602    __ mov(edx, Immediate(entry));
1603  }
1604  __ mov(edx, Operand(edx, 0));  // deref address
1605  __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
1606  __ call(edx);
1607
1608  // Unlink this frame from the handler chain.
1609  __ PopStackHandler();
1610
1611  __ bind(&exit);
1612  // Check if the current stack frame is marked as the outermost JS frame.
1613  __ pop(ebx);
1614  __ cmp(ebx, Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
1615  __ j(not_equal, &not_outermost_js_2);
1616  __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
1617  __ bind(&not_outermost_js_2);
1618
1619  // Restore the top frame descriptor from the stack.
1620  __ pop(Operand::StaticVariable(ExternalReference(
1621      Isolate::kCEntryFPAddress, isolate())));
1622
1623  // Restore callee-saved registers (C calling conventions).
1624  __ pop(ebx);
1625  __ pop(esi);
1626  __ pop(edi);
1627  __ add(esp, Immediate(2 * kPointerSize));  // remove markers
1628
1629  // Restore frame pointer and return.
1630  __ pop(ebp);
1631  __ ret(0);
1632}
1633
1634
1635// -------------------------------------------------------------------------
1636// StringCharCodeAtGenerator
1637
1638void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
1639  // If the receiver is a smi trigger the non-string case.
1640  STATIC_ASSERT(kSmiTag == 0);
1641  if (check_mode_ == RECEIVER_IS_UNKNOWN) {
1642    __ JumpIfSmi(object_, receiver_not_string_);
1643
1644    // Fetch the instance type of the receiver into result register.
1645    __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
1646    __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
1647    // If the receiver is not a string trigger the non-string case.
1648    __ test(result_, Immediate(kIsNotStringMask));
1649    __ j(not_zero, receiver_not_string_);
1650  }
1651
1652  // If the index is non-smi trigger the non-smi case.
1653  STATIC_ASSERT(kSmiTag == 0);
1654  __ JumpIfNotSmi(index_, &index_not_smi_);
1655  __ bind(&got_smi_index_);
1656
1657  // Check for index out of range.
1658  __ cmp(index_, FieldOperand(object_, String::kLengthOffset));
1659  __ j(above_equal, index_out_of_range_);
1660
1661  __ SmiUntag(index_);
1662
1663  Factory* factory = masm->isolate()->factory();
1664  StringCharLoadGenerator::Generate(
1665      masm, factory, object_, index_, result_, &call_runtime_);
1666
1667  __ SmiTag(result_);
1668  __ bind(&exit_);
1669}
1670
1671
1672void StringCharCodeAtGenerator::GenerateSlow(
1673    MacroAssembler* masm, EmbedMode embed_mode,
1674    const RuntimeCallHelper& call_helper) {
1675  __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
1676
1677  // Index is not a smi.
1678  __ bind(&index_not_smi_);
1679  // If index is a heap number, try converting it to an integer.
1680  __ CheckMap(index_,
1681              masm->isolate()->factory()->heap_number_map(),
1682              index_not_number_,
1683              DONT_DO_SMI_CHECK);
1684  call_helper.BeforeCall(masm);
1685  if (embed_mode == PART_OF_IC_HANDLER) {
1686    __ push(LoadWithVectorDescriptor::VectorRegister());
1687    __ push(LoadDescriptor::SlotRegister());
1688  }
1689  __ push(object_);
1690  __ push(index_);  // Consumed by runtime conversion function.
1691  __ CallRuntime(Runtime::kNumberToSmi);
1692  if (!index_.is(eax)) {
1693    // Save the conversion result before the pop instructions below
1694    // have a chance to overwrite it.
1695    __ mov(index_, eax);
1696  }
1697  __ pop(object_);
1698  if (embed_mode == PART_OF_IC_HANDLER) {
1699    __ pop(LoadDescriptor::SlotRegister());
1700    __ pop(LoadWithVectorDescriptor::VectorRegister());
1701  }
1702  // Reload the instance type.
1703  __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
1704  __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
1705  call_helper.AfterCall(masm);
1706  // If index is still not a smi, it must be out of range.
1707  STATIC_ASSERT(kSmiTag == 0);
1708  __ JumpIfNotSmi(index_, index_out_of_range_);
1709  // Otherwise, return to the fast path.
1710  __ jmp(&got_smi_index_);
1711
1712  // Call runtime. We get here when the receiver is a string and the
1713  // index is a number, but the code of getting the actual character
1714  // is too complex (e.g., when the string needs to be flattened).
1715  __ bind(&call_runtime_);
1716  call_helper.BeforeCall(masm);
1717  __ push(object_);
1718  __ SmiTag(index_);
1719  __ push(index_);
1720  __ CallRuntime(Runtime::kStringCharCodeAtRT);
1721  if (!result_.is(eax)) {
1722    __ mov(result_, eax);
1723  }
1724  call_helper.AfterCall(masm);
1725  __ jmp(&exit_);
1726
1727  __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
1728}
1729
1730void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
1731                                                   Register left,
1732                                                   Register right,
1733                                                   Register scratch1,
1734                                                   Register scratch2) {
1735  Register length = scratch1;
1736
1737  // Compare lengths.
1738  Label strings_not_equal, check_zero_length;
1739  __ mov(length, FieldOperand(left, String::kLengthOffset));
1740  __ cmp(length, FieldOperand(right, String::kLengthOffset));
1741  __ j(equal, &check_zero_length, Label::kNear);
1742  __ bind(&strings_not_equal);
1743  __ Move(eax, Immediate(Smi::FromInt(NOT_EQUAL)));
1744  __ ret(0);
1745
1746  // Check if the length is zero.
1747  Label compare_chars;
1748  __ bind(&check_zero_length);
1749  STATIC_ASSERT(kSmiTag == 0);
1750  __ test(length, length);
1751  __ j(not_zero, &compare_chars, Label::kNear);
1752  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
1753  __ ret(0);
1754
1755  // Compare characters.
1756  __ bind(&compare_chars);
1757  GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2,
1758                                  &strings_not_equal, Label::kNear);
1759
1760  // Characters are equal.
1761  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
1762  __ ret(0);
1763}
1764
1765
1766void StringHelper::GenerateCompareFlatOneByteStrings(
1767    MacroAssembler* masm, Register left, Register right, Register scratch1,
1768    Register scratch2, Register scratch3) {
1769  Counters* counters = masm->isolate()->counters();
1770  __ IncrementCounter(counters->string_compare_native(), 1);
1771
1772  // Find minimum length.
1773  Label left_shorter;
1774  __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
1775  __ mov(scratch3, scratch1);
1776  __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
1777
1778  Register length_delta = scratch3;
1779
1780  __ j(less_equal, &left_shorter, Label::kNear);
1781  // Right string is shorter. Change scratch1 to be length of right string.
1782  __ sub(scratch1, length_delta);
1783  __ bind(&left_shorter);
1784
1785  Register min_length = scratch1;
1786
1787  // If either length is zero, just compare lengths.
1788  Label compare_lengths;
1789  __ test(min_length, min_length);
1790  __ j(zero, &compare_lengths, Label::kNear);
1791
1792  // Compare characters.
1793  Label result_not_equal;
1794  GenerateOneByteCharsCompareLoop(masm, left, right, min_length, scratch2,
1795                                  &result_not_equal, Label::kNear);
1796
1797  // Compare lengths -  strings up to min-length are equal.
1798  __ bind(&compare_lengths);
1799  __ test(length_delta, length_delta);
1800  Label length_not_equal;
1801  __ j(not_zero, &length_not_equal, Label::kNear);
1802
1803  // Result is EQUAL.
1804  STATIC_ASSERT(EQUAL == 0);
1805  STATIC_ASSERT(kSmiTag == 0);
1806  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
1807  __ ret(0);
1808
1809  Label result_greater;
1810  Label result_less;
1811  __ bind(&length_not_equal);
1812  __ j(greater, &result_greater, Label::kNear);
1813  __ jmp(&result_less, Label::kNear);
1814  __ bind(&result_not_equal);
1815  __ j(above, &result_greater, Label::kNear);
1816  __ bind(&result_less);
1817
1818  // Result is LESS.
1819  __ Move(eax, Immediate(Smi::FromInt(LESS)));
1820  __ ret(0);
1821
1822  // Result is GREATER.
1823  __ bind(&result_greater);
1824  __ Move(eax, Immediate(Smi::FromInt(GREATER)));
1825  __ ret(0);
1826}
1827
1828
1829void StringHelper::GenerateOneByteCharsCompareLoop(
1830    MacroAssembler* masm, Register left, Register right, Register length,
1831    Register scratch, Label* chars_not_equal,
1832    Label::Distance chars_not_equal_near) {
1833  // Change index to run from -length to -1 by adding length to string
1834  // start. This means that loop ends when index reaches zero, which
1835  // doesn't need an additional compare.
1836  __ SmiUntag(length);
1837  __ lea(left,
1838         FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
1839  __ lea(right,
1840         FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
1841  __ neg(length);
1842  Register index = length;  // index = -length;
1843
1844  // Compare loop.
1845  Label loop;
1846  __ bind(&loop);
1847  __ mov_b(scratch, Operand(left, index, times_1, 0));
1848  __ cmpb(scratch, Operand(right, index, times_1, 0));
1849  __ j(not_equal, chars_not_equal, chars_not_equal_near);
1850  __ inc(index);
1851  __ j(not_zero, &loop);
1852}
1853
1854
1855void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
1856  // ----------- S t a t e -------------
1857  //  -- edx    : left
1858  //  -- eax    : right
1859  //  -- esp[0] : return address
1860  // -----------------------------------
1861
1862  // Load ecx with the allocation site.  We stick an undefined dummy value here
1863  // and replace it with the real allocation site later when we instantiate this
1864  // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
1865  __ mov(ecx, isolate()->factory()->undefined_value());
1866
1867  // Make sure that we actually patched the allocation site.
1868  if (FLAG_debug_code) {
1869    __ test(ecx, Immediate(kSmiTagMask));
1870    __ Assert(not_equal, kExpectedAllocationSite);
1871    __ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
1872           isolate()->factory()->allocation_site_map());
1873    __ Assert(equal, kExpectedAllocationSite);
1874  }
1875
1876  // Tail call into the stub that handles binary operations with allocation
1877  // sites.
1878  BinaryOpWithAllocationSiteStub stub(isolate(), state());
1879  __ TailCallStub(&stub);
1880}
1881
1882
1883void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
1884  DCHECK_EQ(CompareICState::BOOLEAN, state());
1885  Label miss;
1886  Label::Distance const miss_distance =
1887      masm->emit_debug_code() ? Label::kFar : Label::kNear;
1888
1889  __ JumpIfSmi(edx, &miss, miss_distance);
1890  __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
1891  __ JumpIfSmi(eax, &miss, miss_distance);
1892  __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
1893  __ JumpIfNotRoot(ecx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
1894  __ JumpIfNotRoot(ebx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
1895  if (!Token::IsEqualityOp(op())) {
1896    __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset));
1897    __ AssertSmi(eax);
1898    __ mov(edx, FieldOperand(edx, Oddball::kToNumberOffset));
1899    __ AssertSmi(edx);
1900    __ push(eax);
1901    __ mov(eax, edx);
1902    __ pop(edx);
1903  }
1904  __ sub(eax, edx);
1905  __ Ret();
1906
1907  __ bind(&miss);
1908  GenerateMiss(masm);
1909}
1910
1911
1912void CompareICStub::GenerateSmis(MacroAssembler* masm) {
1913  DCHECK(state() == CompareICState::SMI);
1914  Label miss;
1915  __ mov(ecx, edx);
1916  __ or_(ecx, eax);
1917  __ JumpIfNotSmi(ecx, &miss, Label::kNear);
1918
1919  if (GetCondition() == equal) {
1920    // For equality we do not care about the sign of the result.
1921    __ sub(eax, edx);
1922  } else {
1923    Label done;
1924    __ sub(edx, eax);
1925    __ j(no_overflow, &done, Label::kNear);
1926    // Correct sign of result in case of overflow.
1927    __ not_(edx);
1928    __ bind(&done);
1929    __ mov(eax, edx);
1930  }
1931  __ ret(0);
1932
1933  __ bind(&miss);
1934  GenerateMiss(masm);
1935}
1936
1937
1938void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
1939  DCHECK(state() == CompareICState::NUMBER);
1940
1941  Label generic_stub;
1942  Label unordered, maybe_undefined1, maybe_undefined2;
1943  Label miss;
1944
1945  if (left() == CompareICState::SMI) {
1946    __ JumpIfNotSmi(edx, &miss);
1947  }
1948  if (right() == CompareICState::SMI) {
1949    __ JumpIfNotSmi(eax, &miss);
1950  }
1951
1952  // Load left and right operand.
1953  Label done, left, left_smi, right_smi;
1954  __ JumpIfSmi(eax, &right_smi, Label::kNear);
1955  __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
1956         isolate()->factory()->heap_number_map());
1957  __ j(not_equal, &maybe_undefined1, Label::kNear);
1958  __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
1959  __ jmp(&left, Label::kNear);
1960  __ bind(&right_smi);
1961  __ mov(ecx, eax);  // Can't clobber eax because we can still jump away.
1962  __ SmiUntag(ecx);
1963  __ Cvtsi2sd(xmm1, ecx);
1964
1965  __ bind(&left);
1966  __ JumpIfSmi(edx, &left_smi, Label::kNear);
1967  __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
1968         isolate()->factory()->heap_number_map());
1969  __ j(not_equal, &maybe_undefined2, Label::kNear);
1970  __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
1971  __ jmp(&done);
1972  __ bind(&left_smi);
1973  __ mov(ecx, edx);  // Can't clobber edx because we can still jump away.
1974  __ SmiUntag(ecx);
1975  __ Cvtsi2sd(xmm0, ecx);
1976
1977  __ bind(&done);
1978  // Compare operands.
1979  __ ucomisd(xmm0, xmm1);
1980
1981  // Don't base result on EFLAGS when a NaN is involved.
1982  __ j(parity_even, &unordered, Label::kNear);
1983
1984  // Return a result of -1, 0, or 1, based on EFLAGS.
1985  // Performing mov, because xor would destroy the flag register.
1986  __ mov(eax, 0);  // equal
1987  __ mov(ecx, Immediate(Smi::FromInt(1)));
1988  __ cmov(above, eax, ecx);
1989  __ mov(ecx, Immediate(Smi::FromInt(-1)));
1990  __ cmov(below, eax, ecx);
1991  __ ret(0);
1992
1993  __ bind(&unordered);
1994  __ bind(&generic_stub);
1995  CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
1996                     CompareICState::GENERIC, CompareICState::GENERIC);
1997  __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
1998
1999  __ bind(&maybe_undefined1);
2000  if (Token::IsOrderedRelationalCompareOp(op())) {
2001    __ cmp(eax, Immediate(isolate()->factory()->undefined_value()));
2002    __ j(not_equal, &miss);
2003    __ JumpIfSmi(edx, &unordered);
2004    __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
2005    __ j(not_equal, &maybe_undefined2, Label::kNear);
2006    __ jmp(&unordered);
2007  }
2008
2009  __ bind(&maybe_undefined2);
2010  if (Token::IsOrderedRelationalCompareOp(op())) {
2011    __ cmp(edx, Immediate(isolate()->factory()->undefined_value()));
2012    __ j(equal, &unordered);
2013  }
2014
2015  __ bind(&miss);
2016  GenerateMiss(masm);
2017}
2018
2019
2020void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
2021  DCHECK(state() == CompareICState::INTERNALIZED_STRING);
2022  DCHECK(GetCondition() == equal);
2023
2024  // Registers containing left and right operands respectively.
2025  Register left = edx;
2026  Register right = eax;
2027  Register tmp1 = ecx;
2028  Register tmp2 = ebx;
2029
2030  // Check that both operands are heap objects.
2031  Label miss;
2032  __ mov(tmp1, left);
2033  STATIC_ASSERT(kSmiTag == 0);
2034  __ and_(tmp1, right);
2035  __ JumpIfSmi(tmp1, &miss, Label::kNear);
2036
2037  // Check that both operands are internalized strings.
2038  __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2039  __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2040  __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2041  __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
2042  STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2043  __ or_(tmp1, tmp2);
2044  __ test(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2045  __ j(not_zero, &miss, Label::kNear);
2046
2047  // Internalized strings are compared by identity.
2048  Label done;
2049  __ cmp(left, right);
2050  // Make sure eax is non-zero. At this point input operands are
2051  // guaranteed to be non-zero.
2052  DCHECK(right.is(eax));
2053  __ j(not_equal, &done, Label::kNear);
2054  STATIC_ASSERT(EQUAL == 0);
2055  STATIC_ASSERT(kSmiTag == 0);
2056  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
2057  __ bind(&done);
2058  __ ret(0);
2059
2060  __ bind(&miss);
2061  GenerateMiss(masm);
2062}
2063
2064
2065void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
2066  DCHECK(state() == CompareICState::UNIQUE_NAME);
2067  DCHECK(GetCondition() == equal);
2068
2069  // Registers containing left and right operands respectively.
2070  Register left = edx;
2071  Register right = eax;
2072  Register tmp1 = ecx;
2073  Register tmp2 = ebx;
2074
2075  // Check that both operands are heap objects.
2076  Label miss;
2077  __ mov(tmp1, left);
2078  STATIC_ASSERT(kSmiTag == 0);
2079  __ and_(tmp1, right);
2080  __ JumpIfSmi(tmp1, &miss, Label::kNear);
2081
2082  // Check that both operands are unique names. This leaves the instance
2083  // types loaded in tmp1 and tmp2.
2084  __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2085  __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2086  __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2087  __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
2088
2089  __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
2090  __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
2091
2092  // Unique names are compared by identity.
2093  Label done;
2094  __ cmp(left, right);
2095  // Make sure eax is non-zero. At this point input operands are
2096  // guaranteed to be non-zero.
2097  DCHECK(right.is(eax));
2098  __ j(not_equal, &done, Label::kNear);
2099  STATIC_ASSERT(EQUAL == 0);
2100  STATIC_ASSERT(kSmiTag == 0);
2101  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
2102  __ bind(&done);
2103  __ ret(0);
2104
2105  __ bind(&miss);
2106  GenerateMiss(masm);
2107}
2108
2109
2110void CompareICStub::GenerateStrings(MacroAssembler* masm) {
2111  DCHECK(state() == CompareICState::STRING);
2112  Label miss;
2113
2114  bool equality = Token::IsEqualityOp(op());
2115
2116  // Registers containing left and right operands respectively.
2117  Register left = edx;
2118  Register right = eax;
2119  Register tmp1 = ecx;
2120  Register tmp2 = ebx;
2121  Register tmp3 = edi;
2122
2123  // Check that both operands are heap objects.
2124  __ mov(tmp1, left);
2125  STATIC_ASSERT(kSmiTag == 0);
2126  __ and_(tmp1, right);
2127  __ JumpIfSmi(tmp1, &miss);
2128
2129  // Check that both operands are strings. This leaves the instance
2130  // types loaded in tmp1 and tmp2.
2131  __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2132  __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2133  __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2134  __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
2135  __ mov(tmp3, tmp1);
2136  STATIC_ASSERT(kNotStringTag != 0);
2137  __ or_(tmp3, tmp2);
2138  __ test(tmp3, Immediate(kIsNotStringMask));
2139  __ j(not_zero, &miss);
2140
2141  // Fast check for identical strings.
2142  Label not_same;
2143  __ cmp(left, right);
2144  __ j(not_equal, &not_same, Label::kNear);
2145  STATIC_ASSERT(EQUAL == 0);
2146  STATIC_ASSERT(kSmiTag == 0);
2147  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
2148  __ ret(0);
2149
2150  // Handle not identical strings.
2151  __ bind(&not_same);
2152
2153  // Check that both strings are internalized. If they are, we're done
2154  // because we already know they are not identical.  But in the case of
2155  // non-equality compare, we still need to determine the order. We
2156  // also know they are both strings.
2157  if (equality) {
2158    Label do_compare;
2159    STATIC_ASSERT(kInternalizedTag == 0);
2160    __ or_(tmp1, tmp2);
2161    __ test(tmp1, Immediate(kIsNotInternalizedMask));
2162    __ j(not_zero, &do_compare, Label::kNear);
2163    // Make sure eax is non-zero. At this point input operands are
2164    // guaranteed to be non-zero.
2165    DCHECK(right.is(eax));
2166    __ ret(0);
2167    __ bind(&do_compare);
2168  }
2169
2170  // Check that both strings are sequential one-byte.
2171  Label runtime;
2172  __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime);
2173
2174  // Compare flat one byte strings. Returns when done.
2175  if (equality) {
2176    StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1,
2177                                                  tmp2);
2178  } else {
2179    StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1,
2180                                                    tmp2, tmp3);
2181  }
2182
2183  // Handle more complex cases in runtime.
2184  __ bind(&runtime);
2185  if (equality) {
2186    {
2187      FrameScope scope(masm, StackFrame::INTERNAL);
2188      __ Push(left);
2189      __ Push(right);
2190      __ CallRuntime(Runtime::kStringEqual);
2191    }
2192    __ sub(eax, Immediate(masm->isolate()->factory()->true_value()));
2193    __ Ret();
2194  } else {
2195    __ pop(tmp1);  // Return address.
2196    __ push(left);
2197    __ push(right);
2198    __ push(tmp1);
2199    __ TailCallRuntime(Runtime::kStringCompare);
2200  }
2201
2202  __ bind(&miss);
2203  GenerateMiss(masm);
2204}
2205
2206
2207void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
2208  DCHECK_EQ(CompareICState::RECEIVER, state());
2209  Label miss;
2210  __ mov(ecx, edx);
2211  __ and_(ecx, eax);
2212  __ JumpIfSmi(ecx, &miss, Label::kNear);
2213
2214  STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
2215  __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
2216  __ j(below, &miss, Label::kNear);
2217  __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx);
2218  __ j(below, &miss, Label::kNear);
2219
2220  DCHECK_EQ(equal, GetCondition());
2221  __ sub(eax, edx);
2222  __ ret(0);
2223
2224  __ bind(&miss);
2225  GenerateMiss(masm);
2226}
2227
2228
2229void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
2230  Label miss;
2231  Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
2232  __ mov(ecx, edx);
2233  __ and_(ecx, eax);
2234  __ JumpIfSmi(ecx, &miss, Label::kNear);
2235
2236  __ GetWeakValue(edi, cell);
2237  __ cmp(edi, FieldOperand(eax, HeapObject::kMapOffset));
2238  __ j(not_equal, &miss, Label::kNear);
2239  __ cmp(edi, FieldOperand(edx, HeapObject::kMapOffset));
2240  __ j(not_equal, &miss, Label::kNear);
2241
2242  if (Token::IsEqualityOp(op())) {
2243    __ sub(eax, edx);
2244    __ ret(0);
2245  } else {
2246    __ PopReturnAddressTo(ecx);
2247    __ Push(edx);
2248    __ Push(eax);
2249    __ Push(Immediate(Smi::FromInt(NegativeComparisonResult(GetCondition()))));
2250    __ PushReturnAddressFrom(ecx);
2251    __ TailCallRuntime(Runtime::kCompare);
2252  }
2253
2254  __ bind(&miss);
2255  GenerateMiss(masm);
2256}
2257
2258
2259void CompareICStub::GenerateMiss(MacroAssembler* masm) {
2260  {
2261    // Call the runtime system in a fresh internal frame.
2262    FrameScope scope(masm, StackFrame::INTERNAL);
2263    __ push(edx);  // Preserve edx and eax.
2264    __ push(eax);
2265    __ push(edx);  // And also use them as the arguments.
2266    __ push(eax);
2267    __ push(Immediate(Smi::FromInt(op())));
2268    __ CallRuntime(Runtime::kCompareIC_Miss);
2269    // Compute the entry point of the rewritten stub.
2270    __ lea(edi, FieldOperand(eax, Code::kHeaderSize));
2271    __ pop(eax);
2272    __ pop(edx);
2273  }
2274
2275  // Do a tail call to the rewritten stub.
2276  __ jmp(edi);
2277}
2278
2279
2280// Helper function used to check that the dictionary doesn't contain
2281// the property. This function may return false negatives, so miss_label
2282// must always call a backup property check that is complete.
2283// This function is safe to call if the receiver has fast properties.
2284// Name must be a unique name and receiver must be a heap object.
2285void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
2286                                                      Label* miss,
2287                                                      Label* done,
2288                                                      Register properties,
2289                                                      Handle<Name> name,
2290                                                      Register r0) {
2291  DCHECK(name->IsUniqueName());
2292
2293  // If names of slots in range from 1 to kProbes - 1 for the hash value are
2294  // not equal to the name and kProbes-th slot is not used (its name is the
2295  // undefined value), it guarantees the hash table doesn't contain the
2296  // property. It's true even if some slots represent deleted properties
2297  // (their names are the hole value).
2298  for (int i = 0; i < kInlinedProbes; i++) {
2299    // Compute the masked index: (hash + i + i * i) & mask.
2300    Register index = r0;
2301    // Capacity is smi 2^n.
2302    __ mov(index, FieldOperand(properties, kCapacityOffset));
2303    __ dec(index);
2304    __ and_(index,
2305            Immediate(Smi::FromInt(name->Hash() +
2306                                   NameDictionary::GetProbeOffset(i))));
2307
2308    // Scale the index by multiplying by the entry size.
2309    STATIC_ASSERT(NameDictionary::kEntrySize == 3);
2310    __ lea(index, Operand(index, index, times_2, 0));  // index *= 3.
2311    Register entity_name = r0;
2312    // Having undefined at this place means the name is not contained.
2313    STATIC_ASSERT(kSmiTagSize == 1);
2314    __ mov(entity_name, Operand(properties, index, times_half_pointer_size,
2315                                kElementsStartOffset - kHeapObjectTag));
2316    __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
2317    __ j(equal, done);
2318
2319    // Stop if found the property.
2320    __ cmp(entity_name, Handle<Name>(name));
2321    __ j(equal, miss);
2322
2323    Label good;
2324    // Check for the hole and skip.
2325    __ cmp(entity_name, masm->isolate()->factory()->the_hole_value());
2326    __ j(equal, &good, Label::kNear);
2327
2328    // Check if the entry name is not a unique name.
2329    __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
2330    __ JumpIfNotUniqueNameInstanceType(
2331        FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
2332    __ bind(&good);
2333  }
2334
2335  NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
2336                                NEGATIVE_LOOKUP);
2337  __ push(Immediate(Handle<Object>(name)));
2338  __ push(Immediate(name->Hash()));
2339  __ CallStub(&stub);
2340  __ test(r0, r0);
2341  __ j(not_zero, miss);
2342  __ jmp(done);
2343}
2344
2345void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
2346  // This stub overrides SometimesSetsUpAFrame() to return false.  That means
2347  // we cannot call anything that could cause a GC from this stub.
2348  // Stack frame on entry:
2349  //  esp[0 * kPointerSize]: return address.
2350  //  esp[1 * kPointerSize]: key's hash.
2351  //  esp[2 * kPointerSize]: key.
2352  // Registers:
2353  //  dictionary_: NameDictionary to probe.
2354  //  result_: used as scratch.
2355  //  index_: will hold an index of entry if lookup is successful.
2356  //          might alias with result_.
2357  // Returns:
2358  //  result_ is zero if lookup failed, non zero otherwise.
2359
2360  Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
2361
2362  Register scratch = result();
2363
2364  __ mov(scratch, FieldOperand(dictionary(), kCapacityOffset));
2365  __ dec(scratch);
2366  __ SmiUntag(scratch);
2367  __ push(scratch);
2368
2369  // If names of slots in range from 1 to kProbes - 1 for the hash value are
2370  // not equal to the name and kProbes-th slot is not used (its name is the
2371  // undefined value), it guarantees the hash table doesn't contain the
2372  // property. It's true even if some slots represent deleted properties
2373  // (their names are the null value).
2374  for (int i = kInlinedProbes; i < kTotalProbes; i++) {
2375    // Compute the masked index: (hash + i + i * i) & mask.
2376    __ mov(scratch, Operand(esp, 2 * kPointerSize));
2377    if (i > 0) {
2378      __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
2379    }
2380    __ and_(scratch, Operand(esp, 0));
2381
2382    // Scale the index by multiplying by the entry size.
2383    STATIC_ASSERT(NameDictionary::kEntrySize == 3);
2384    __ lea(index(), Operand(scratch, scratch, times_2, 0));  // index *= 3.
2385
2386    // Having undefined at this place means the name is not contained.
2387    STATIC_ASSERT(kSmiTagSize == 1);
2388    __ mov(scratch, Operand(dictionary(), index(), times_pointer_size,
2389                            kElementsStartOffset - kHeapObjectTag));
2390    __ cmp(scratch, isolate()->factory()->undefined_value());
2391    __ j(equal, &not_in_dictionary);
2392
2393    // Stop if found the property.
2394    __ cmp(scratch, Operand(esp, 3 * kPointerSize));
2395    __ j(equal, &in_dictionary);
2396
2397    if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
2398      // If we hit a key that is not a unique name during negative
2399      // lookup we have to bailout as this key might be equal to the
2400      // key we are looking for.
2401
2402      // Check if the entry name is not a unique name.
2403      __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
2404      __ JumpIfNotUniqueNameInstanceType(
2405          FieldOperand(scratch, Map::kInstanceTypeOffset),
2406          &maybe_in_dictionary);
2407    }
2408  }
2409
2410  __ bind(&maybe_in_dictionary);
2411  // If we are doing negative lookup then probing failure should be
2412  // treated as a lookup success. For positive lookup probing failure
2413  // should be treated as lookup failure.
2414  if (mode() == POSITIVE_LOOKUP) {
2415    __ mov(result(), Immediate(0));
2416    __ Drop(1);
2417    __ ret(2 * kPointerSize);
2418  }
2419
2420  __ bind(&in_dictionary);
2421  __ mov(result(), Immediate(1));
2422  __ Drop(1);
2423  __ ret(2 * kPointerSize);
2424
2425  __ bind(&not_in_dictionary);
2426  __ mov(result(), Immediate(0));
2427  __ Drop(1);
2428  __ ret(2 * kPointerSize);
2429}
2430
2431
2432void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
2433    Isolate* isolate) {
2434  StoreBufferOverflowStub stub(isolate, kDontSaveFPRegs);
2435  stub.GetCode();
2436  StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
2437  stub2.GetCode();
2438}
2439
2440
2441// Takes the input in 3 registers: address_ value_ and object_.  A pointer to
2442// the value has just been written into the object, now this stub makes sure
2443// we keep the GC informed.  The word in the object where the value has been
2444// written is in the address register.
2445void RecordWriteStub::Generate(MacroAssembler* masm) {
2446  Label skip_to_incremental_noncompacting;
2447  Label skip_to_incremental_compacting;
2448
2449  // The first two instructions are generated with labels so as to get the
2450  // offset fixed up correctly by the bind(Label*) call.  We patch it back and
2451  // forth between a compare instructions (a nop in this position) and the
2452  // real branch when we start and stop incremental heap marking.
2453  __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
2454  __ jmp(&skip_to_incremental_compacting, Label::kFar);
2455
2456  if (remembered_set_action() == EMIT_REMEMBERED_SET) {
2457    __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
2458                           MacroAssembler::kReturnAtEnd);
2459  } else {
2460    __ ret(0);
2461  }
2462
2463  __ bind(&skip_to_incremental_noncompacting);
2464  GenerateIncremental(masm, INCREMENTAL);
2465
2466  __ bind(&skip_to_incremental_compacting);
2467  GenerateIncremental(masm, INCREMENTAL_COMPACTION);
2468
2469  // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
2470  // Will be checked in IncrementalMarking::ActivateGeneratedStub.
2471  masm->set_byte_at(0, kTwoByteNopInstruction);
2472  masm->set_byte_at(2, kFiveByteNopInstruction);
2473}
2474
2475
2476void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
2477  regs_.Save(masm);
2478
2479  if (remembered_set_action() == EMIT_REMEMBERED_SET) {
2480    Label dont_need_remembered_set;
2481
2482    __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
2483    __ JumpIfNotInNewSpace(regs_.scratch0(),  // Value.
2484                           regs_.scratch0(),
2485                           &dont_need_remembered_set);
2486
2487    __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(),
2488                        &dont_need_remembered_set);
2489
2490    // First notify the incremental marker if necessary, then update the
2491    // remembered set.
2492    CheckNeedsToInformIncrementalMarker(
2493        masm,
2494        kUpdateRememberedSetOnNoNeedToInformIncrementalMarker,
2495        mode);
2496    InformIncrementalMarker(masm);
2497    regs_.Restore(masm);
2498    __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
2499                           MacroAssembler::kReturnAtEnd);
2500
2501    __ bind(&dont_need_remembered_set);
2502  }
2503
2504  CheckNeedsToInformIncrementalMarker(
2505      masm,
2506      kReturnOnNoNeedToInformIncrementalMarker,
2507      mode);
2508  InformIncrementalMarker(masm);
2509  regs_.Restore(masm);
2510  __ ret(0);
2511}
2512
2513
2514void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
2515  regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
2516  int argument_count = 3;
2517  __ PrepareCallCFunction(argument_count, regs_.scratch0());
2518  __ mov(Operand(esp, 0 * kPointerSize), regs_.object());
2519  __ mov(Operand(esp, 1 * kPointerSize), regs_.address());  // Slot.
2520  __ mov(Operand(esp, 2 * kPointerSize),
2521         Immediate(ExternalReference::isolate_address(isolate())));
2522
2523  AllowExternalCallThatCantCauseGC scope(masm);
2524  __ CallCFunction(
2525      ExternalReference::incremental_marking_record_write_function(isolate()),
2526      argument_count);
2527
2528  regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
2529}
2530
2531
2532void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
2533    MacroAssembler* masm,
2534    OnNoNeedToInformIncrementalMarker on_no_need,
2535    Mode mode) {
2536  Label object_is_black, need_incremental, need_incremental_pop_object;
2537
2538  // Let's look at the color of the object:  If it is not black we don't have
2539  // to inform the incremental marker.
2540  __ JumpIfBlack(regs_.object(),
2541                 regs_.scratch0(),
2542                 regs_.scratch1(),
2543                 &object_is_black,
2544                 Label::kNear);
2545
2546  regs_.Restore(masm);
2547  if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
2548    __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
2549                           MacroAssembler::kReturnAtEnd);
2550  } else {
2551    __ ret(0);
2552  }
2553
2554  __ bind(&object_is_black);
2555
2556  // Get the value from the slot.
2557  __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
2558
2559  if (mode == INCREMENTAL_COMPACTION) {
2560    Label ensure_not_white;
2561
2562    __ CheckPageFlag(regs_.scratch0(),  // Contains value.
2563                     regs_.scratch1(),  // Scratch.
2564                     MemoryChunk::kEvacuationCandidateMask,
2565                     zero,
2566                     &ensure_not_white,
2567                     Label::kNear);
2568
2569    __ CheckPageFlag(regs_.object(),
2570                     regs_.scratch1(),  // Scratch.
2571                     MemoryChunk::kSkipEvacuationSlotsRecordingMask,
2572                     not_zero,
2573                     &ensure_not_white,
2574                     Label::kNear);
2575
2576    __ jmp(&need_incremental);
2577
2578    __ bind(&ensure_not_white);
2579  }
2580
2581  // We need an extra register for this, so we push the object register
2582  // temporarily.
2583  __ push(regs_.object());
2584  __ JumpIfWhite(regs_.scratch0(),  // The value.
2585                 regs_.scratch1(),  // Scratch.
2586                 regs_.object(),    // Scratch.
2587                 &need_incremental_pop_object, Label::kNear);
2588  __ pop(regs_.object());
2589
2590  regs_.Restore(masm);
2591  if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
2592    __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
2593                           MacroAssembler::kReturnAtEnd);
2594  } else {
2595    __ ret(0);
2596  }
2597
2598  __ bind(&need_incremental_pop_object);
2599  __ pop(regs_.object());
2600
2601  __ bind(&need_incremental);
2602
2603  // Fall through when we need to inform the incremental marker.
2604}
2605
2606
2607void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
2608  CEntryStub ces(isolate(), 1, kSaveFPRegs);
2609  __ call(ces.GetCode(), RelocInfo::CODE_TARGET);
2610  int parameter_count_offset =
2611      StubFailureTrampolineFrameConstants::kArgumentsLengthOffset;
2612  __ mov(ebx, MemOperand(ebp, parameter_count_offset));
2613  masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
2614  __ pop(ecx);
2615  int additional_offset =
2616      function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0;
2617  __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset));
2618  __ jmp(ecx);  // Return to IC Miss stub, continuation still on stack.
2619}
2620
2621void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
2622  if (masm->isolate()->function_entry_hook() != NULL) {
2623    ProfileEntryHookStub stub(masm->isolate());
2624    masm->CallStub(&stub);
2625  }
2626}
2627
2628
2629void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
2630  // Save volatile registers.
2631  const int kNumSavedRegisters = 3;
2632  __ push(eax);
2633  __ push(ecx);
2634  __ push(edx);
2635
2636  // Calculate and push the original stack pointer.
2637  __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
2638  __ push(eax);
2639
2640  // Retrieve our return address and use it to calculate the calling
2641  // function's address.
2642  __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
2643  __ sub(eax, Immediate(Assembler::kCallInstructionLength));
2644  __ push(eax);
2645
2646  // Call the entry hook.
2647  DCHECK(isolate()->function_entry_hook() != NULL);
2648  __ call(FUNCTION_ADDR(isolate()->function_entry_hook()),
2649          RelocInfo::RUNTIME_ENTRY);
2650  __ add(esp, Immediate(2 * kPointerSize));
2651
2652  // Restore ecx.
2653  __ pop(edx);
2654  __ pop(ecx);
2655  __ pop(eax);
2656
2657  __ ret(0);
2658}
2659
2660
2661template<class T>
2662static void CreateArrayDispatch(MacroAssembler* masm,
2663                                AllocationSiteOverrideMode mode) {
2664  if (mode == DISABLE_ALLOCATION_SITES) {
2665    T stub(masm->isolate(),
2666           GetInitialFastElementsKind(),
2667           mode);
2668    __ TailCallStub(&stub);
2669  } else if (mode == DONT_OVERRIDE) {
2670    int last_index = GetSequenceIndexFromFastElementsKind(
2671        TERMINAL_FAST_ELEMENTS_KIND);
2672    for (int i = 0; i <= last_index; ++i) {
2673      Label next;
2674      ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
2675      __ cmp(edx, kind);
2676      __ j(not_equal, &next);
2677      T stub(masm->isolate(), kind);
2678      __ TailCallStub(&stub);
2679      __ bind(&next);
2680    }
2681
2682    // If we reached this point there is a problem.
2683    __ Abort(kUnexpectedElementsKindInArrayConstructor);
2684  } else {
2685    UNREACHABLE();
2686  }
2687}
2688
2689
2690static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
2691                                           AllocationSiteOverrideMode mode) {
2692  // ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
2693  // edx - kind (if mode != DISABLE_ALLOCATION_SITES)
2694  // eax - number of arguments
2695  // edi - constructor?
2696  // esp[0] - return address
2697  // esp[4] - last argument
2698  Label normal_sequence;
2699  if (mode == DONT_OVERRIDE) {
2700    STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2701    STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2702    STATIC_ASSERT(FAST_ELEMENTS == 2);
2703    STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
2704    STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4);
2705    STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
2706
2707    // is the low bit set? If so, we are holey and that is good.
2708    __ test_b(edx, Immediate(1));
2709    __ j(not_zero, &normal_sequence);
2710  }
2711
2712  // look at the first argument
2713  __ mov(ecx, Operand(esp, kPointerSize));
2714  __ test(ecx, ecx);
2715  __ j(zero, &normal_sequence);
2716
2717  if (mode == DISABLE_ALLOCATION_SITES) {
2718    ElementsKind initial = GetInitialFastElementsKind();
2719    ElementsKind holey_initial = GetHoleyElementsKind(initial);
2720
2721    ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
2722                                                  holey_initial,
2723                                                  DISABLE_ALLOCATION_SITES);
2724    __ TailCallStub(&stub_holey);
2725
2726    __ bind(&normal_sequence);
2727    ArraySingleArgumentConstructorStub stub(masm->isolate(),
2728                                            initial,
2729                                            DISABLE_ALLOCATION_SITES);
2730    __ TailCallStub(&stub);
2731  } else if (mode == DONT_OVERRIDE) {
2732    // We are going to create a holey array, but our kind is non-holey.
2733    // Fix kind and retry.
2734    __ inc(edx);
2735
2736    if (FLAG_debug_code) {
2737      Handle<Map> allocation_site_map =
2738          masm->isolate()->factory()->allocation_site_map();
2739      __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
2740      __ Assert(equal, kExpectedAllocationSite);
2741    }
2742
2743    // Save the resulting elements kind in type info. We can't just store r3
2744    // in the AllocationSite::transition_info field because elements kind is
2745    // restricted to a portion of the field...upper bits need to be left alone.
2746    STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
2747    __ add(FieldOperand(ebx, AllocationSite::kTransitionInfoOffset),
2748           Immediate(Smi::FromInt(kFastElementsKindPackedToHoley)));
2749
2750    __ bind(&normal_sequence);
2751    int last_index = GetSequenceIndexFromFastElementsKind(
2752        TERMINAL_FAST_ELEMENTS_KIND);
2753    for (int i = 0; i <= last_index; ++i) {
2754      Label next;
2755      ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
2756      __ cmp(edx, kind);
2757      __ j(not_equal, &next);
2758      ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
2759      __ TailCallStub(&stub);
2760      __ bind(&next);
2761    }
2762
2763    // If we reached this point there is a problem.
2764    __ Abort(kUnexpectedElementsKindInArrayConstructor);
2765  } else {
2766    UNREACHABLE();
2767  }
2768}
2769
2770
2771template<class T>
2772static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
2773  int to_index = GetSequenceIndexFromFastElementsKind(
2774      TERMINAL_FAST_ELEMENTS_KIND);
2775  for (int i = 0; i <= to_index; ++i) {
2776    ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
2777    T stub(isolate, kind);
2778    stub.GetCode();
2779    if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
2780      T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
2781      stub1.GetCode();
2782    }
2783  }
2784}
2785
2786void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2787  ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
2788      isolate);
2789  ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
2790      isolate);
2791  ArrayNArgumentsConstructorStub stub(isolate);
2792  stub.GetCode();
2793
2794  ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
2795  for (int i = 0; i < 2; i++) {
2796    // For internal arrays we only need a few things
2797    InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
2798    stubh1.GetCode();
2799    InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
2800    stubh2.GetCode();
2801  }
2802}
2803
2804void ArrayConstructorStub::GenerateDispatchToArrayStub(
2805    MacroAssembler* masm, AllocationSiteOverrideMode mode) {
2806  Label not_zero_case, not_one_case;
2807  __ test(eax, eax);
2808  __ j(not_zero, &not_zero_case);
2809  CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
2810
2811  __ bind(&not_zero_case);
2812  __ cmp(eax, 1);
2813  __ j(greater, &not_one_case);
2814  CreateArrayDispatchOneArgument(masm, mode);
2815
2816  __ bind(&not_one_case);
2817  ArrayNArgumentsConstructorStub stub(masm->isolate());
2818  __ TailCallStub(&stub);
2819}
2820
2821void ArrayConstructorStub::Generate(MacroAssembler* masm) {
2822  // ----------- S t a t e -------------
2823  //  -- eax : argc (only if argument_count() is ANY or MORE_THAN_ONE)
2824  //  -- ebx : AllocationSite or undefined
2825  //  -- edi : constructor
2826  //  -- edx : Original constructor
2827  //  -- esp[0] : return address
2828  //  -- esp[4] : last argument
2829  // -----------------------------------
2830  if (FLAG_debug_code) {
2831    // The array construct code is only set for the global and natives
2832    // builtin Array functions which always have maps.
2833
2834    // Initial map for the builtin Array function should be a map.
2835    __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
2836    // Will both indicate a NULL and a Smi.
2837    __ test(ecx, Immediate(kSmiTagMask));
2838    __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
2839    __ CmpObjectType(ecx, MAP_TYPE, ecx);
2840    __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
2841
2842    // We should either have undefined in ebx or a valid AllocationSite
2843    __ AssertUndefinedOrAllocationSite(ebx);
2844  }
2845
2846  Label subclassing;
2847
2848  // Enter the context of the Array function.
2849  __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2850
2851  __ cmp(edx, edi);
2852  __ j(not_equal, &subclassing);
2853
2854  Label no_info;
2855  // If the feedback vector is the undefined value call an array constructor
2856  // that doesn't use AllocationSites.
2857  __ cmp(ebx, isolate()->factory()->undefined_value());
2858  __ j(equal, &no_info);
2859
2860  // Only look at the lower 16 bits of the transition info.
2861  __ mov(edx, FieldOperand(ebx, AllocationSite::kTransitionInfoOffset));
2862  __ SmiUntag(edx);
2863  STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
2864  __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask));
2865  GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
2866
2867  __ bind(&no_info);
2868  GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
2869
2870  // Subclassing.
2871  __ bind(&subclassing);
2872  __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
2873  __ add(eax, Immediate(3));
2874  __ PopReturnAddressTo(ecx);
2875  __ Push(edx);
2876  __ Push(ebx);
2877  __ PushReturnAddressFrom(ecx);
2878  __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
2879}
2880
2881
2882void InternalArrayConstructorStub::GenerateCase(
2883    MacroAssembler* masm, ElementsKind kind) {
2884  Label not_zero_case, not_one_case;
2885  Label normal_sequence;
2886
2887  __ test(eax, eax);
2888  __ j(not_zero, &not_zero_case);
2889  InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
2890  __ TailCallStub(&stub0);
2891
2892  __ bind(&not_zero_case);
2893  __ cmp(eax, 1);
2894  __ j(greater, &not_one_case);
2895
2896  if (IsFastPackedElementsKind(kind)) {
2897    // We might need to create a holey array
2898    // look at the first argument
2899    __ mov(ecx, Operand(esp, kPointerSize));
2900    __ test(ecx, ecx);
2901    __ j(zero, &normal_sequence);
2902
2903    InternalArraySingleArgumentConstructorStub
2904        stub1_holey(isolate(), GetHoleyElementsKind(kind));
2905    __ TailCallStub(&stub1_holey);
2906  }
2907
2908  __ bind(&normal_sequence);
2909  InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
2910  __ TailCallStub(&stub1);
2911
2912  __ bind(&not_one_case);
2913  ArrayNArgumentsConstructorStub stubN(isolate());
2914  __ TailCallStub(&stubN);
2915}
2916
2917
2918void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
2919  // ----------- S t a t e -------------
2920  //  -- eax : argc
2921  //  -- edi : constructor
2922  //  -- esp[0] : return address
2923  //  -- esp[4] : last argument
2924  // -----------------------------------
2925
2926  if (FLAG_debug_code) {
2927    // The array construct code is only set for the global and natives
2928    // builtin Array functions which always have maps.
2929
2930    // Initial map for the builtin Array function should be a map.
2931    __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
2932    // Will both indicate a NULL and a Smi.
2933    __ test(ecx, Immediate(kSmiTagMask));
2934    __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
2935    __ CmpObjectType(ecx, MAP_TYPE, ecx);
2936    __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
2937  }
2938
2939  // Figure out the right elements kind
2940  __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
2941
2942  // Load the map's "bit field 2" into |result|. We only need the first byte,
2943  // but the following masking takes care of that anyway.
2944  __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
2945  // Retrieve elements_kind from bit field 2.
2946  __ DecodeField<Map::ElementsKindBits>(ecx);
2947
2948  if (FLAG_debug_code) {
2949    Label done;
2950    __ cmp(ecx, Immediate(FAST_ELEMENTS));
2951    __ j(equal, &done);
2952    __ cmp(ecx, Immediate(FAST_HOLEY_ELEMENTS));
2953    __ Assert(equal,
2954              kInvalidElementsKindForInternalArrayOrInternalPackedArray);
2955    __ bind(&done);
2956  }
2957
2958  Label fast_elements_case;
2959  __ cmp(ecx, Immediate(FAST_ELEMENTS));
2960  __ j(equal, &fast_elements_case);
2961  GenerateCase(masm, FAST_HOLEY_ELEMENTS);
2962
2963  __ bind(&fast_elements_case);
2964  GenerateCase(masm, FAST_ELEMENTS);
2965}
2966
2967// Generates an Operand for saving parameters after PrepareCallApiFunction.
2968static Operand ApiParameterOperand(int index) {
2969  return Operand(esp, index * kPointerSize);
2970}
2971
2972
2973// Prepares stack to put arguments (aligns and so on). Reserves
2974// space for return value if needed (assumes the return value is a handle).
2975// Arguments must be stored in ApiParameterOperand(0), ApiParameterOperand(1)
2976// etc. Saves context (esi). If space was reserved for return value then
2977// stores the pointer to the reserved slot into esi.
2978static void PrepareCallApiFunction(MacroAssembler* masm, int argc) {
2979  __ EnterApiExitFrame(argc);
2980  if (__ emit_debug_code()) {
2981    __ mov(esi, Immediate(bit_cast<int32_t>(kZapValue)));
2982  }
2983}
2984
2985
2986// Calls an API function.  Allocates HandleScope, extracts returned value
2987// from handle and propagates exceptions.  Clobbers ebx, edi and
2988// caller-save registers.  Restores context.  On return removes
2989// stack_space * kPointerSize (GCed).
2990static void CallApiFunctionAndReturn(MacroAssembler* masm,
2991                                     Register function_address,
2992                                     ExternalReference thunk_ref,
2993                                     Operand thunk_last_arg, int stack_space,
2994                                     Operand* stack_space_operand,
2995                                     Operand return_value_operand,
2996                                     Operand* context_restore_operand) {
2997  Isolate* isolate = masm->isolate();
2998
2999  ExternalReference next_address =
3000      ExternalReference::handle_scope_next_address(isolate);
3001  ExternalReference limit_address =
3002      ExternalReference::handle_scope_limit_address(isolate);
3003  ExternalReference level_address =
3004      ExternalReference::handle_scope_level_address(isolate);
3005
3006  DCHECK(edx.is(function_address));
3007  // Allocate HandleScope in callee-save registers.
3008  __ mov(ebx, Operand::StaticVariable(next_address));
3009  __ mov(edi, Operand::StaticVariable(limit_address));
3010  __ add(Operand::StaticVariable(level_address), Immediate(1));
3011
3012  if (FLAG_log_timer_events) {
3013    FrameScope frame(masm, StackFrame::MANUAL);
3014    __ PushSafepointRegisters();
3015    __ PrepareCallCFunction(1, eax);
3016    __ mov(Operand(esp, 0),
3017           Immediate(ExternalReference::isolate_address(isolate)));
3018    __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
3019                     1);
3020    __ PopSafepointRegisters();
3021  }
3022
3023
3024  Label profiler_disabled;
3025  Label end_profiler_check;
3026  __ mov(eax, Immediate(ExternalReference::is_profiling_address(isolate)));
3027  __ cmpb(Operand(eax, 0), Immediate(0));
3028  __ j(zero, &profiler_disabled);
3029
3030  // Additional parameter is the address of the actual getter function.
3031  __ mov(thunk_last_arg, function_address);
3032  // Call the api function.
3033  __ mov(eax, Immediate(thunk_ref));
3034  __ call(eax);
3035  __ jmp(&end_profiler_check);
3036
3037  __ bind(&profiler_disabled);
3038  // Call the api function.
3039  __ call(function_address);
3040  __ bind(&end_profiler_check);
3041
3042  if (FLAG_log_timer_events) {
3043    FrameScope frame(masm, StackFrame::MANUAL);
3044    __ PushSafepointRegisters();
3045    __ PrepareCallCFunction(1, eax);
3046    __ mov(Operand(esp, 0),
3047           Immediate(ExternalReference::isolate_address(isolate)));
3048    __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
3049                     1);
3050    __ PopSafepointRegisters();
3051  }
3052
3053  Label prologue;
3054  // Load the value from ReturnValue
3055  __ mov(eax, return_value_operand);
3056
3057  Label promote_scheduled_exception;
3058  Label delete_allocated_handles;
3059  Label leave_exit_frame;
3060
3061  __ bind(&prologue);
3062  // No more valid handles (the result handle was the last one). Restore
3063  // previous handle scope.
3064  __ mov(Operand::StaticVariable(next_address), ebx);
3065  __ sub(Operand::StaticVariable(level_address), Immediate(1));
3066  __ Assert(above_equal, kInvalidHandleScopeLevel);
3067  __ cmp(edi, Operand::StaticVariable(limit_address));
3068  __ j(not_equal, &delete_allocated_handles);
3069
3070  // Leave the API exit frame.
3071  __ bind(&leave_exit_frame);
3072  bool restore_context = context_restore_operand != NULL;
3073  if (restore_context) {
3074    __ mov(esi, *context_restore_operand);
3075  }
3076  if (stack_space_operand != nullptr) {
3077    __ mov(ebx, *stack_space_operand);
3078  }
3079  __ LeaveApiExitFrame(!restore_context);
3080
3081  // Check if the function scheduled an exception.
3082  ExternalReference scheduled_exception_address =
3083      ExternalReference::scheduled_exception_address(isolate);
3084  __ cmp(Operand::StaticVariable(scheduled_exception_address),
3085         Immediate(isolate->factory()->the_hole_value()));
3086  __ j(not_equal, &promote_scheduled_exception);
3087
3088#if DEBUG
3089  // Check if the function returned a valid JavaScript value.
3090  Label ok;
3091  Register return_value = eax;
3092  Register map = ecx;
3093
3094  __ JumpIfSmi(return_value, &ok, Label::kNear);
3095  __ mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
3096
3097  __ CmpInstanceType(map, LAST_NAME_TYPE);
3098  __ j(below_equal, &ok, Label::kNear);
3099
3100  __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
3101  __ j(above_equal, &ok, Label::kNear);
3102
3103  __ cmp(map, isolate->factory()->heap_number_map());
3104  __ j(equal, &ok, Label::kNear);
3105
3106  __ cmp(return_value, isolate->factory()->undefined_value());
3107  __ j(equal, &ok, Label::kNear);
3108
3109  __ cmp(return_value, isolate->factory()->true_value());
3110  __ j(equal, &ok, Label::kNear);
3111
3112  __ cmp(return_value, isolate->factory()->false_value());
3113  __ j(equal, &ok, Label::kNear);
3114
3115  __ cmp(return_value, isolate->factory()->null_value());
3116  __ j(equal, &ok, Label::kNear);
3117
3118  __ Abort(kAPICallReturnedInvalidObject);
3119
3120  __ bind(&ok);
3121#endif
3122
3123  if (stack_space_operand != nullptr) {
3124    DCHECK_EQ(0, stack_space);
3125    __ pop(ecx);
3126    __ add(esp, ebx);
3127    __ jmp(ecx);
3128  } else {
3129    __ ret(stack_space * kPointerSize);
3130  }
3131
3132  // Re-throw by promoting a scheduled exception.
3133  __ bind(&promote_scheduled_exception);
3134  __ TailCallRuntime(Runtime::kPromoteScheduledException);
3135
3136  // HandleScope limit has changed. Delete allocated extensions.
3137  ExternalReference delete_extensions =
3138      ExternalReference::delete_handle_scope_extensions(isolate);
3139  __ bind(&delete_allocated_handles);
3140  __ mov(Operand::StaticVariable(limit_address), edi);
3141  __ mov(edi, eax);
3142  __ mov(Operand(esp, 0),
3143         Immediate(ExternalReference::isolate_address(isolate)));
3144  __ mov(eax, Immediate(delete_extensions));
3145  __ call(eax);
3146  __ mov(eax, edi);
3147  __ jmp(&leave_exit_frame);
3148}
3149
3150void CallApiCallbackStub::Generate(MacroAssembler* masm) {
3151  // ----------- S t a t e -------------
3152  //  -- edi                 : callee
3153  //  -- ebx                 : call_data
3154  //  -- ecx                 : holder
3155  //  -- edx                 : api_function_address
3156  //  -- esi                 : context
3157  //  --
3158  //  -- esp[0]              : return address
3159  //  -- esp[4]              : last argument
3160  //  -- ...
3161  //  -- esp[argc * 4]       : first argument
3162  //  -- esp[(argc + 1) * 4] : receiver
3163  // -----------------------------------
3164
3165  Register callee = edi;
3166  Register call_data = ebx;
3167  Register holder = ecx;
3168  Register api_function_address = edx;
3169  Register context = esi;
3170  Register return_address = eax;
3171
3172  typedef FunctionCallbackArguments FCA;
3173
3174  STATIC_ASSERT(FCA::kContextSaveIndex == 6);
3175  STATIC_ASSERT(FCA::kCalleeIndex == 5);
3176  STATIC_ASSERT(FCA::kDataIndex == 4);
3177  STATIC_ASSERT(FCA::kReturnValueOffset == 3);
3178  STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
3179  STATIC_ASSERT(FCA::kIsolateIndex == 1);
3180  STATIC_ASSERT(FCA::kHolderIndex == 0);
3181  STATIC_ASSERT(FCA::kNewTargetIndex == 7);
3182  STATIC_ASSERT(FCA::kArgsLength == 8);
3183
3184  __ pop(return_address);
3185
3186  // new target
3187  __ PushRoot(Heap::kUndefinedValueRootIndex);
3188
3189  // context save.
3190  __ push(context);
3191
3192  // callee
3193  __ push(callee);
3194
3195  // call data
3196  __ push(call_data);
3197
3198  Register scratch = call_data;
3199  if (!call_data_undefined()) {
3200    // return value
3201    __ push(Immediate(masm->isolate()->factory()->undefined_value()));
3202    // return value default
3203    __ push(Immediate(masm->isolate()->factory()->undefined_value()));
3204  } else {
3205    // return value
3206    __ push(scratch);
3207    // return value default
3208    __ push(scratch);
3209  }
3210  // isolate
3211  __ push(Immediate(reinterpret_cast<int>(masm->isolate())));
3212  // holder
3213  __ push(holder);
3214
3215  __ mov(scratch, esp);
3216
3217  // push return address
3218  __ push(return_address);
3219
3220  if (!is_lazy()) {
3221    // load context from callee
3222    __ mov(context, FieldOperand(callee, JSFunction::kContextOffset));
3223  }
3224
3225  // API function gets reference to the v8::Arguments. If CPU profiler
3226  // is enabled wrapper function will be called and we need to pass
3227  // address of the callback as additional parameter, always allocate
3228  // space for it.
3229  const int kApiArgc = 1 + 1;
3230
3231  // Allocate the v8::Arguments structure in the arguments' space since
3232  // it's not controlled by GC.
3233  const int kApiStackSpace = 3;
3234
3235  PrepareCallApiFunction(masm, kApiArgc + kApiStackSpace);
3236
3237  // FunctionCallbackInfo::implicit_args_.
3238  __ mov(ApiParameterOperand(2), scratch);
3239  __ add(scratch, Immediate((argc() + FCA::kArgsLength - 1) * kPointerSize));
3240  // FunctionCallbackInfo::values_.
3241  __ mov(ApiParameterOperand(3), scratch);
3242  // FunctionCallbackInfo::length_.
3243  __ Move(ApiParameterOperand(4), Immediate(argc()));
3244
3245  // v8::InvocationCallback's argument.
3246  __ lea(scratch, ApiParameterOperand(2));
3247  __ mov(ApiParameterOperand(0), scratch);
3248
3249  ExternalReference thunk_ref =
3250      ExternalReference::invoke_function_callback(masm->isolate());
3251
3252  Operand context_restore_operand(ebp,
3253                                  (2 + FCA::kContextSaveIndex) * kPointerSize);
3254  // Stores return the first js argument
3255  int return_value_offset = 0;
3256  if (is_store()) {
3257    return_value_offset = 2 + FCA::kArgsLength;
3258  } else {
3259    return_value_offset = 2 + FCA::kReturnValueOffset;
3260  }
3261  Operand return_value_operand(ebp, return_value_offset * kPointerSize);
3262  int stack_space = 0;
3263  Operand length_operand = ApiParameterOperand(4);
3264  Operand* stack_space_operand = &length_operand;
3265  stack_space = argc() + FCA::kArgsLength + 1;
3266  stack_space_operand = nullptr;
3267  CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
3268                           ApiParameterOperand(1), stack_space,
3269                           stack_space_operand, return_value_operand,
3270                           &context_restore_operand);
3271}
3272
3273
3274void CallApiGetterStub::Generate(MacroAssembler* masm) {
3275  // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
3276  // name below the exit frame to make GC aware of them.
3277  STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
3278  STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
3279  STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
3280  STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
3281  STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
3282  STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
3283  STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
3284  STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
3285
3286  Register receiver = ApiGetterDescriptor::ReceiverRegister();
3287  Register holder = ApiGetterDescriptor::HolderRegister();
3288  Register callback = ApiGetterDescriptor::CallbackRegister();
3289  Register scratch = ebx;
3290  DCHECK(!AreAliased(receiver, holder, callback, scratch));
3291
3292  __ pop(scratch);  // Pop return address to extend the frame.
3293  __ push(receiver);
3294  __ push(FieldOperand(callback, AccessorInfo::kDataOffset));
3295  __ PushRoot(Heap::kUndefinedValueRootIndex);  // ReturnValue
3296  // ReturnValue default value
3297  __ PushRoot(Heap::kUndefinedValueRootIndex);
3298  __ push(Immediate(ExternalReference::isolate_address(isolate())));
3299  __ push(holder);
3300  __ push(Immediate(Smi::kZero));  // should_throw_on_error -> false
3301  __ push(FieldOperand(callback, AccessorInfo::kNameOffset));
3302  __ push(scratch);  // Restore return address.
3303
3304  // v8::PropertyCallbackInfo::args_ array and name handle.
3305  const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
3306
3307  // Allocate v8::PropertyCallbackInfo object, arguments for callback and
3308  // space for optional callback address parameter (in case CPU profiler is
3309  // active) in non-GCed stack space.
3310  const int kApiArgc = 3 + 1;
3311
3312  // Load address of v8::PropertyAccessorInfo::args_ array.
3313  __ lea(scratch, Operand(esp, 2 * kPointerSize));
3314
3315  PrepareCallApiFunction(masm, kApiArgc);
3316  // Create v8::PropertyCallbackInfo object on the stack and initialize
3317  // it's args_ field.
3318  Operand info_object = ApiParameterOperand(3);
3319  __ mov(info_object, scratch);
3320
3321  // Name as handle.
3322  __ sub(scratch, Immediate(kPointerSize));
3323  __ mov(ApiParameterOperand(0), scratch);
3324  // Arguments pointer.
3325  __ lea(scratch, info_object);
3326  __ mov(ApiParameterOperand(1), scratch);
3327  // Reserve space for optional callback address parameter.
3328  Operand thunk_last_arg = ApiParameterOperand(2);
3329
3330  ExternalReference thunk_ref =
3331      ExternalReference::invoke_accessor_getter_callback(isolate());
3332
3333  __ mov(scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset));
3334  Register function_address = edx;
3335  __ mov(function_address,
3336         FieldOperand(scratch, Foreign::kForeignAddressOffset));
3337  // +3 is to skip prolog, return address and name handle.
3338  Operand return_value_operand(
3339      ebp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
3340  CallApiFunctionAndReturn(masm, function_address, thunk_ref, thunk_last_arg,
3341                           kStackUnwindSpace, nullptr, return_value_operand,
3342                           NULL);
3343}
3344
3345#undef __
3346
3347}  // namespace internal
3348}  // namespace v8
3349
3350#endif  // V8_TARGET_ARCH_IA32
3351