1/*
2 * Copyright (C) 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 *    notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 *    notice, this list of conditions and the following disclaimer in the
12 *    documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26
27#include "config.h"
28
29#if ENABLE(JIT)
30#if USE(JSVALUE32_64)
31#include "JIT.h"
32
33#include "JITInlineMethods.h"
34#include "JITStubCall.h"
35#include "JSArray.h"
36#include "JSCell.h"
37#include "JSFunction.h"
38#include "JSPropertyNameIterator.h"
39#include "LinkBuffer.h"
40
41namespace JSC {
42
43void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, TrampolineStructure *trampolines)
44{
45#if ENABLE(JIT_USE_SOFT_MODULO)
46    Label softModBegin = align();
47    softModulo();
48#endif
49#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
50    // (1) This function provides fast property access for string length
51    Label stringLengthBegin = align();
52
53    // regT0 holds payload, regT1 holds tag
54
55    Jump string_failureCases1 = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
56    Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr));
57
58    // Checks out okay! - get the length from the Ustring.
59    load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_length)), regT2);
60
61    Jump string_failureCases3 = branch32(Above, regT2, TrustedImm32(INT_MAX));
62    move(regT2, regT0);
63    move(TrustedImm32(JSValue::Int32Tag), regT1);
64
65    ret();
66#endif
67
68    JumpList callLinkFailures;
69    // (2) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
70#if ENABLE(JIT_OPTIMIZE_CALL)
71    // VirtualCallLink Trampoline
72    // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
73    Label virtualCallLinkBegin = align();
74    compileOpCallInitializeCallFrame();
75    preserveReturnAddressAfterCall(regT3);
76    emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
77    restoreArgumentReference();
78    Call callLazyLinkCall = call();
79    callLinkFailures.append(branchTestPtr(Zero, regT0));
80    restoreReturnAddressBeforeReturn(regT3);
81    emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
82    jump(regT0);
83
84    // VirtualConstructLink Trampoline
85    // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
86    Label virtualConstructLinkBegin = align();
87    compileOpCallInitializeCallFrame();
88    preserveReturnAddressAfterCall(regT3);
89    emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
90    restoreArgumentReference();
91    Call callLazyLinkConstruct = call();
92    restoreReturnAddressBeforeReturn(regT3);
93    callLinkFailures.append(branchTestPtr(Zero, regT0));
94    emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
95    jump(regT0);
96
97#endif // ENABLE(JIT_OPTIMIZE_CALL)
98
99    // VirtualCall Trampoline
100    // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
101    Label virtualCallBegin = align();
102    compileOpCallInitializeCallFrame();
103
104    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
105
106    Jump hasCodeBlock3 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForCall)), TrustedImm32(0));
107    preserveReturnAddressAfterCall(regT3);
108    restoreArgumentReference();
109    Call callCompileCall = call();
110    callLinkFailures.append(branchTestPtr(Zero, regT0));
111    emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
112    restoreReturnAddressBeforeReturn(regT3);
113    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
114    hasCodeBlock3.link(this);
115
116    loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForCallWithArityCheck)), regT0);
117    jump(regT0);
118
119    // VirtualConstruct Trampoline
120    // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
121    Label virtualConstructBegin = align();
122    compileOpCallInitializeCallFrame();
123
124    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
125
126    Jump hasCodeBlock4 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForConstruct)), TrustedImm32(0));
127    preserveReturnAddressAfterCall(regT3);
128    restoreArgumentReference();
129    Call callCompileCconstruct = call();
130    callLinkFailures.append(branchTestPtr(Zero, regT0));
131    emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
132    restoreReturnAddressBeforeReturn(regT3);
133    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
134    hasCodeBlock4.link(this);
135
136    loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForConstructWithArityCheck)), regT0);
137    jump(regT0);
138
139    // If the parser fails we want to be able to be able to keep going,
140    // So we handle this as a parse failure.
141    callLinkFailures.link(this);
142    emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
143    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
144    restoreReturnAddressBeforeReturn(regT1);
145    move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
146    storePtr(regT1, regT2);
147    poke(callFrameRegister, 1 + OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
148    poke(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()));
149    ret();
150
151    // NativeCall Trampoline
152    Label nativeCallThunk = privateCompileCTINativeCall(globalData);
153    Label nativeConstructThunk = privateCompileCTINativeCall(globalData, true);
154
155#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
156    Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
157    Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
158    Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
159#endif
160
161    // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
162    LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()), 0);
163
164#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
165    patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
166    patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
167    patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
168#endif
169#if ENABLE(JIT_OPTIMIZE_CALL)
170    patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
171    patchBuffer.link(callLazyLinkConstruct, FunctionPtr(cti_vm_lazyLinkConstruct));
172#endif
173    patchBuffer.link(callCompileCall, FunctionPtr(cti_op_call_jitCompile));
174    patchBuffer.link(callCompileCconstruct, FunctionPtr(cti_op_construct_jitCompile));
175
176    CodeRef finalCode = patchBuffer.finalizeCode();
177    *executablePool = finalCode.m_executablePool;
178
179    trampolines->ctiVirtualCall = patchBuffer.trampolineAt(virtualCallBegin);
180    trampolines->ctiVirtualConstruct = patchBuffer.trampolineAt(virtualConstructBegin);
181    trampolines->ctiNativeCall = patchBuffer.trampolineAt(nativeCallThunk);
182    trampolines->ctiNativeConstruct = patchBuffer.trampolineAt(nativeConstructThunk);
183#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
184    trampolines->ctiStringLengthTrampoline = patchBuffer.trampolineAt(stringLengthBegin);
185#endif
186#if ENABLE(JIT_OPTIMIZE_CALL)
187    trampolines->ctiVirtualCallLink = patchBuffer.trampolineAt(virtualCallLinkBegin);
188    trampolines->ctiVirtualConstructLink = patchBuffer.trampolineAt(virtualConstructLinkBegin);
189#endif
190#if ENABLE(JIT_USE_SOFT_MODULO)
191    trampolines->ctiSoftModulo = patchBuffer.trampolineAt(softModBegin);
192#endif
193}
194
195JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isConstruct)
196{
197    int executableOffsetToFunction = isConstruct ? OBJECT_OFFSETOF(NativeExecutable, m_constructor) : OBJECT_OFFSETOF(NativeExecutable, m_function);
198
199    Label nativeCallThunk = align();
200
201    emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
202
203#if CPU(X86)
204    // Load caller frame's scope chain into this callframe so that whatever we call can
205    // get to its global data.
206    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
207    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
208    emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
209
210    peek(regT1);
211    emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
212
213    // Calling convention:      f(ecx, edx, ...);
214    // Host function signature: f(ExecState*);
215    move(callFrameRegister, X86Registers::ecx);
216
217    subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
218
219    // call the function
220    emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT1);
221    loadPtr(Address(regT1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT1);
222    move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
223    call(Address(regT1, executableOffsetToFunction));
224
225    addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
226
227#elif CPU(ARM)
228    // Load caller frame's scope chain into this callframe so that whatever we call can
229    // get to its global data.
230    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
231    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
232    emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
233
234    preserveReturnAddressAfterCall(regT3); // Callee preserved
235    emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
236
237    // Calling convention:      f(r0 == regT0, r1 == regT1, ...);
238    // Host function signature: f(ExecState*);
239    move(callFrameRegister, ARMRegisters::r0);
240
241    // call the function
242    emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
243    move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
244    loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
245    call(Address(regT2, executableOffsetToFunction));
246
247    restoreReturnAddressBeforeReturn(regT3);
248#elif CPU(SH4)
249    // Load caller frame's scope chain into this callframe so that whatever we call can
250    // get to its global data.
251    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
252    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
253    emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
254
255    preserveReturnAddressAfterCall(regT3); // Callee preserved
256    emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
257
258    // Calling convention: f(r0 == regT4, r1 == regT5, ...);
259    // Host function signature: f(ExecState*);
260    move(callFrameRegister, regT4);
261
262    emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT5);
263    move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
264    loadPtr(Address(regT5, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
265
266    call(Address(regT2, executableOffsetToFunction), regT0);
267    restoreReturnAddressBeforeReturn(regT3);
268#elif CPU(MIPS)
269    // Load caller frame's scope chain into this callframe so that whatever we call can
270    // get to its global data.
271    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
272    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
273    emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
274
275    preserveReturnAddressAfterCall(regT3); // Callee preserved
276    emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
277
278    // Calling convention:      f(a0, a1, a2, a3);
279    // Host function signature: f(ExecState*);
280
281    // Allocate stack space for 16 bytes (8-byte aligned)
282    // 16 bytes (unused) for 4 arguments
283    subPtr(TrustedImm32(16), stackPointerRegister);
284
285    // Setup arg0
286    move(callFrameRegister, MIPSRegisters::a0);
287
288    // Call
289    emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
290    loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
291    move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
292    call(Address(regT2, executableOffsetToFunction));
293
294    // Restore stack space
295    addPtr(TrustedImm32(16), stackPointerRegister);
296
297    restoreReturnAddressBeforeReturn(regT3);
298
299#elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
300#error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
301#else
302    UNUSED_PARAM(executableOffsetToFunction);
303    breakpoint();
304#endif // CPU(X86)
305
306    // Check for an exception
307    Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(&globalData->exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
308
309    // Return.
310    ret();
311
312    // Handle an exception
313    sawException.link(this);
314
315    // Grab the return address.
316    preserveReturnAddressAfterCall(regT1);
317
318    move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
319    storePtr(regT1, regT2);
320    poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
321
322    // Set the return address.
323    move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
324    restoreReturnAddressBeforeReturn(regT1);
325
326    ret();
327
328    return nativeCallThunk;
329}
330
331JIT::CodePtr JIT::privateCompileCTINativeCall(PassRefPtr<ExecutablePool> executablePool, JSGlobalData* globalData, NativeFunction func)
332{
333    Call nativeCall;
334    Label nativeCallThunk = align();
335
336    emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
337
338#if CPU(X86)
339    // Load caller frame's scope chain into this callframe so that whatever we call can
340    // get to its global data.
341    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
342    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
343    emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
344
345    peek(regT1);
346    emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
347
348    // Calling convention:      f(ecx, edx, ...);
349    // Host function signature: f(ExecState*);
350    move(callFrameRegister, X86Registers::ecx);
351
352    subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
353
354    move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
355
356    // call the function
357    nativeCall = call();
358
359    addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
360
361#elif CPU(ARM)
362    // Load caller frame's scope chain into this callframe so that whatever we call can
363    // get to its global data.
364    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
365    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
366    emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
367
368    preserveReturnAddressAfterCall(regT3); // Callee preserved
369    emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
370
371    // Calling convention:      f(r0 == regT0, r1 == regT1, ...);
372    // Host function signature: f(ExecState*);
373    move(callFrameRegister, ARMRegisters::r0);
374
375    emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
376    move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
377    loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
378
379    // call the function
380    nativeCall = call();
381
382    restoreReturnAddressBeforeReturn(regT3);
383
384#elif CPU(MIPS)
385    // Load caller frame's scope chain into this callframe so that whatever we call can
386    // get to its global data.
387    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
388    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
389    emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
390
391    preserveReturnAddressAfterCall(regT3); // Callee preserved
392    emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
393
394    // Calling convention:      f(a0, a1, a2, a3);
395    // Host function signature: f(ExecState*);
396
397    // Allocate stack space for 16 bytes (8-byte aligned)
398    // 16 bytes (unused) for 4 arguments
399    subPtr(TrustedImm32(16), stackPointerRegister);
400
401    // Setup arg0
402    move(callFrameRegister, MIPSRegisters::a0);
403
404    // Call
405    emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
406    loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
407    move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
408
409    // call the function
410    nativeCall = call();
411
412    // Restore stack space
413    addPtr(TrustedImm32(16), stackPointerRegister);
414
415    restoreReturnAddressBeforeReturn(regT3);
416#elif CPU(SH4)
417    // Load caller frame's scope chain into this callframe so that whatever we call can
418    // get to its global data.
419    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
420    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
421    emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
422
423    preserveReturnAddressAfterCall(regT3); // Callee preserved
424    emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
425
426    // Calling convention: f(r0 == regT4, r1 == regT5, ...);
427    // Host function signature: f(ExecState*);
428    move(callFrameRegister, regT4);
429
430    emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT5);
431    move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
432    loadPtr(Address(regT5, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
433
434    // call the function
435    nativeCall = call();
436
437    restoreReturnAddressBeforeReturn(regT3);
438#elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
439#error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
440#else
441    breakpoint();
442#endif // CPU(X86)
443
444    // Check for an exception
445    Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(&globalData->exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
446
447    // Return.
448    ret();
449
450    // Handle an exception
451    sawException.link(this);
452
453    // Grab the return address.
454    preserveReturnAddressAfterCall(regT1);
455
456    move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
457    storePtr(regT1, regT2);
458    poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
459
460    // Set the return address.
461    move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
462    restoreReturnAddressBeforeReturn(regT1);
463
464    ret();
465
466    // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
467    LinkBuffer patchBuffer(this, executablePool, 0);
468
469    patchBuffer.link(nativeCall, FunctionPtr(func));
470    patchBuffer.finalizeCode();
471
472    return patchBuffer.trampolineAt(nativeCallThunk);
473}
474
475void JIT::emit_op_mov(Instruction* currentInstruction)
476{
477    unsigned dst = currentInstruction[1].u.operand;
478    unsigned src = currentInstruction[2].u.operand;
479
480    if (m_codeBlock->isConstantRegisterIndex(src))
481        emitStore(dst, getConstantOperand(src));
482    else {
483        emitLoad(src, regT1, regT0);
484        emitStore(dst, regT1, regT0);
485        map(m_bytecodeOffset + OPCODE_LENGTH(op_mov), dst, regT1, regT0);
486    }
487}
488
489void JIT::emit_op_end(Instruction* currentInstruction)
490{
491    ASSERT(returnValueRegister != callFrameRegister);
492    emitLoad(currentInstruction[1].u.operand, regT1, regT0);
493    restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
494    ret();
495}
496
497void JIT::emit_op_jmp(Instruction* currentInstruction)
498{
499    unsigned target = currentInstruction[1].u.operand;
500    addJump(jump(), target);
501}
502
503void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
504{
505    unsigned op1 = currentInstruction[1].u.operand;
506    unsigned op2 = currentInstruction[2].u.operand;
507    unsigned target = currentInstruction[3].u.operand;
508
509    emitTimeoutCheck();
510
511    if (isOperandConstantImmediateInt(op1)) {
512        emitLoad(op2, regT1, regT0);
513        addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
514        addJump(branch32(GreaterThanOrEqual, regT0, Imm32(getConstantOperand(op1).asInt32())), target);
515        return;
516    }
517
518    if (isOperandConstantImmediateInt(op2)) {
519        emitLoad(op1, regT1, regT0);
520        addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
521        addJump(branch32(LessThanOrEqual, regT0, Imm32(getConstantOperand(op2).asInt32())), target);
522        return;
523    }
524
525    emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
526    addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
527    addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
528    addJump(branch32(LessThanOrEqual, regT0, regT2), target);
529}
530
531void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
532{
533    unsigned op1 = currentInstruction[1].u.operand;
534    unsigned op2 = currentInstruction[2].u.operand;
535    unsigned target = currentInstruction[3].u.operand;
536
537    if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
538        linkSlowCase(iter); // int32 check
539    linkSlowCase(iter); // int32 check
540
541    JITStubCall stubCall(this, cti_op_loop_if_lesseq);
542    stubCall.addArgument(op1);
543    stubCall.addArgument(op2);
544    stubCall.call();
545    emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
546}
547
548void JIT::emit_op_new_object(Instruction* currentInstruction)
549{
550    JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
551}
552
553void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
554{
555    unsigned baseVal = currentInstruction[1].u.operand;
556
557    emitLoadPayload(baseVal, regT0);
558
559    // Check that baseVal is a cell.
560    emitJumpSlowCaseIfNotJSCell(baseVal);
561
562    // Check that baseVal 'ImplementsHasInstance'.
563    loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
564    addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsHasInstance)));
565}
566
567void JIT::emit_op_instanceof(Instruction* currentInstruction)
568{
569    unsigned dst = currentInstruction[1].u.operand;
570    unsigned value = currentInstruction[2].u.operand;
571    unsigned baseVal = currentInstruction[3].u.operand;
572    unsigned proto = currentInstruction[4].u.operand;
573
574    // Load the operands into registers.
575    // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
576    emitLoadPayload(value, regT2);
577    emitLoadPayload(baseVal, regT0);
578    emitLoadPayload(proto, regT1);
579
580    // Check that proto are cells.  baseVal must be a cell - this is checked by op_check_has_instance.
581    emitJumpSlowCaseIfNotJSCell(value);
582    emitJumpSlowCaseIfNotJSCell(proto);
583
584    // Check that prototype is an object
585    loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
586    addSlowCase(branch8(NotEqual, Address(regT3, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType)));
587
588    // Fixme: this check is only needed because the JSC API allows HasInstance to be overridden; we should deprecate this.
589    // Check that baseVal 'ImplementsDefaultHasInstance'.
590    loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
591    addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
592
593    // Optimistically load the result true, and start looping.
594    // Initially, regT1 still contains proto and regT2 still contains value.
595    // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
596    move(TrustedImm32(1), regT0);
597    Label loop(this);
598
599    // Load the prototype of the cell in regT2.  If this is equal to regT1 - WIN!
600    // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
601    loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
602    load32(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
603    Jump isInstance = branchPtr(Equal, regT2, regT1);
604    branchTest32(NonZero, regT2).linkTo(loop, this);
605
606    // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
607    move(TrustedImm32(0), regT0);
608
609    // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
610    isInstance.link(this);
611    emitStoreBool(dst, regT0);
612}
613
614void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
615{
616    unsigned baseVal = currentInstruction[1].u.operand;
617
618    linkSlowCaseIfNotJSCell(iter, baseVal);
619    linkSlowCase(iter);
620
621    JITStubCall stubCall(this, cti_op_check_has_instance);
622    stubCall.addArgument(baseVal);
623    stubCall.call();
624}
625
626void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
627{
628    unsigned dst = currentInstruction[1].u.operand;
629    unsigned value = currentInstruction[2].u.operand;
630    unsigned baseVal = currentInstruction[3].u.operand;
631    unsigned proto = currentInstruction[4].u.operand;
632
633    linkSlowCaseIfNotJSCell(iter, value);
634    linkSlowCaseIfNotJSCell(iter, proto);
635    linkSlowCase(iter);
636    linkSlowCase(iter);
637
638    JITStubCall stubCall(this, cti_op_instanceof);
639    stubCall.addArgument(value);
640    stubCall.addArgument(baseVal);
641    stubCall.addArgument(proto);
642    stubCall.call(dst);
643}
644
645void JIT::emit_op_get_global_var(Instruction* currentInstruction)
646{
647    int dst = currentInstruction[1].u.operand;
648    JSGlobalObject* globalObject = m_codeBlock->globalObject();
649    ASSERT(globalObject->isGlobalObject());
650    int index = currentInstruction[2].u.operand;
651
652    loadPtr(&globalObject->m_registers, regT2);
653
654    emitLoad(index, regT1, regT0, regT2);
655    emitStore(dst, regT1, regT0);
656    map(m_bytecodeOffset + OPCODE_LENGTH(op_get_global_var), dst, regT1, regT0);
657}
658
659void JIT::emit_op_put_global_var(Instruction* currentInstruction)
660{
661    JSGlobalObject* globalObject = m_codeBlock->globalObject();
662    ASSERT(globalObject->isGlobalObject());
663    int index = currentInstruction[1].u.operand;
664    int value = currentInstruction[2].u.operand;
665
666    emitLoad(value, regT1, regT0);
667
668    loadPtr(&globalObject->m_registers, regT2);
669    emitStore(index, regT1, regT0, regT2);
670    map(m_bytecodeOffset + OPCODE_LENGTH(op_put_global_var), value, regT1, regT0);
671}
672
673void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
674{
675    int dst = currentInstruction[1].u.operand;
676    int index = currentInstruction[2].u.operand;
677    int skip = currentInstruction[3].u.operand;
678
679    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
680    bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
681    ASSERT(skip || !checkTopLevel);
682    if (checkTopLevel && skip--) {
683        Jump activationNotCreated;
684        if (checkTopLevel)
685            activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
686        loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
687        activationNotCreated.link(this);
688    }
689    while (skip--)
690        loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
691
692    loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
693    loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, m_registers)), regT2);
694
695    emitLoad(index, regT1, regT0, regT2);
696    emitStore(dst, regT1, regT0);
697    map(m_bytecodeOffset + OPCODE_LENGTH(op_get_scoped_var), dst, regT1, regT0);
698}
699
700void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
701{
702    int index = currentInstruction[1].u.operand;
703    int skip = currentInstruction[2].u.operand;
704    int value = currentInstruction[3].u.operand;
705
706    emitLoad(value, regT1, regT0);
707
708    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
709    bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
710    ASSERT(skip || !checkTopLevel);
711    if (checkTopLevel && skip--) {
712        Jump activationNotCreated;
713        if (checkTopLevel)
714            activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
715        loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
716        activationNotCreated.link(this);
717    }
718    while (skip--)
719        loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
720
721    loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
722    loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, m_registers)), regT2);
723
724    emitStore(index, regT1, regT0, regT2);
725    map(m_bytecodeOffset + OPCODE_LENGTH(op_put_scoped_var), value, regT1, regT0);
726}
727
728void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
729{
730    unsigned activation = currentInstruction[1].u.operand;
731    unsigned arguments = currentInstruction[2].u.operand;
732    Jump activationCreated = branch32(NotEqual, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
733    Jump argumentsNotCreated = branch32(Equal, tagFor(arguments), TrustedImm32(JSValue::EmptyValueTag));
734    activationCreated.link(this);
735    JITStubCall stubCall(this, cti_op_tear_off_activation);
736    stubCall.addArgument(currentInstruction[1].u.operand);
737    stubCall.addArgument(unmodifiedArgumentsRegister(currentInstruction[2].u.operand));
738    stubCall.call();
739    argumentsNotCreated.link(this);
740}
741
742void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
743{
744    int dst = currentInstruction[1].u.operand;
745
746    Jump argsNotCreated = branch32(Equal, tagFor(unmodifiedArgumentsRegister(dst)), TrustedImm32(JSValue::EmptyValueTag));
747    JITStubCall stubCall(this, cti_op_tear_off_arguments);
748    stubCall.addArgument(unmodifiedArgumentsRegister(dst));
749    stubCall.call();
750    argsNotCreated.link(this);
751}
752
753void JIT::emit_op_new_array(Instruction* currentInstruction)
754{
755    JITStubCall stubCall(this, cti_op_new_array);
756    stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
757    stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
758    stubCall.call(currentInstruction[1].u.operand);
759}
760
761void JIT::emit_op_resolve(Instruction* currentInstruction)
762{
763    JITStubCall stubCall(this, cti_op_resolve);
764    stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
765    stubCall.call(currentInstruction[1].u.operand);
766}
767
768void JIT::emit_op_to_primitive(Instruction* currentInstruction)
769{
770    int dst = currentInstruction[1].u.operand;
771    int src = currentInstruction[2].u.operand;
772
773    emitLoad(src, regT1, regT0);
774
775    Jump isImm = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
776    addSlowCase(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr)));
777    isImm.link(this);
778
779    if (dst != src)
780        emitStore(dst, regT1, regT0);
781    map(m_bytecodeOffset + OPCODE_LENGTH(op_to_primitive), dst, regT1, regT0);
782}
783
784void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
785{
786    int dst = currentInstruction[1].u.operand;
787
788    linkSlowCase(iter);
789
790    JITStubCall stubCall(this, cti_op_to_primitive);
791    stubCall.addArgument(regT1, regT0);
792    stubCall.call(dst);
793}
794
795void JIT::emit_op_strcat(Instruction* currentInstruction)
796{
797    JITStubCall stubCall(this, cti_op_strcat);
798    stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
799    stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
800    stubCall.call(currentInstruction[1].u.operand);
801}
802
803void JIT::emit_op_resolve_base(Instruction* currentInstruction)
804{
805    JITStubCall stubCall(this, currentInstruction[3].u.operand ? cti_op_resolve_base_strict_put : cti_op_resolve_base);
806    stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
807    stubCall.call(currentInstruction[1].u.operand);
808}
809
810void JIT::emit_op_ensure_property_exists(Instruction* currentInstruction)
811{
812    JITStubCall stubCall(this, cti_op_ensure_property_exists);
813    stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
814    stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
815    stubCall.call(currentInstruction[1].u.operand);
816}
817
818void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
819{
820    JITStubCall stubCall(this, cti_op_resolve_skip);
821    stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
822    stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
823    stubCall.call(currentInstruction[1].u.operand);
824}
825
826void JIT::emit_op_resolve_global(Instruction* currentInstruction, bool dynamic)
827{
828    // FIXME: Optimize to use patching instead of so many memory accesses.
829
830    unsigned dst = currentInstruction[1].u.operand;
831    void* globalObject = m_codeBlock->globalObject();
832
833    unsigned currentIndex = m_globalResolveInfoIndex++;
834    void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
835    void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
836
837    // Verify structure.
838    move(TrustedImmPtr(globalObject), regT0);
839    loadPtr(structureAddress, regT1);
840    addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, JSCell::structureOffset())));
841
842    // Load property.
843    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_propertyStorage)), regT2);
844    load32(offsetAddr, regT3);
845    load32(BaseIndex(regT2, regT3, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
846    load32(BaseIndex(regT2, regT3, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
847    emitStore(dst, regT1, regT0);
848    map(m_bytecodeOffset + dynamic ? OPCODE_LENGTH(op_resolve_global_dynamic) : OPCODE_LENGTH(op_resolve_global), dst, regT1, regT0);
849}
850
851void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
852{
853    unsigned dst = currentInstruction[1].u.operand;
854    Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
855
856    unsigned currentIndex = m_globalResolveInfoIndex++;
857
858    linkSlowCase(iter);
859    JITStubCall stubCall(this, cti_op_resolve_global);
860    stubCall.addArgument(TrustedImmPtr(ident));
861    stubCall.addArgument(Imm32(currentIndex));
862    stubCall.call(dst);
863}
864
865void JIT::emit_op_not(Instruction* currentInstruction)
866{
867    unsigned dst = currentInstruction[1].u.operand;
868    unsigned src = currentInstruction[2].u.operand;
869
870    emitLoadTag(src, regT0);
871
872    emitLoad(src, regT1, regT0);
873    addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::BooleanTag)));
874    xor32(TrustedImm32(1), regT0);
875
876    emitStoreBool(dst, regT0, (dst == src));
877}
878
879void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
880{
881    unsigned dst = currentInstruction[1].u.operand;
882    unsigned src = currentInstruction[2].u.operand;
883
884    linkSlowCase(iter);
885
886    JITStubCall stubCall(this, cti_op_not);
887    stubCall.addArgument(src);
888    stubCall.call(dst);
889}
890
891void JIT::emit_op_jfalse(Instruction* currentInstruction)
892{
893    unsigned cond = currentInstruction[1].u.operand;
894    unsigned target = currentInstruction[2].u.operand;
895
896    emitLoad(cond, regT1, regT0);
897
898    ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
899    addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
900    addJump(branchTest32(Zero, regT0), target);
901}
902
903void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
904{
905    unsigned cond = currentInstruction[1].u.operand;
906    unsigned target = currentInstruction[2].u.operand;
907
908    linkSlowCase(iter);
909
910    if (supportsFloatingPoint()) {
911        // regT1 contains the tag from the hot path.
912        Jump notNumber = branch32(Above, regT1, Imm32(JSValue::LowestTag));
913
914        emitLoadDouble(cond, fpRegT0);
915        emitJumpSlowToHot(branchDoubleZeroOrNaN(fpRegT0, fpRegT1), target);
916        emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jfalse));
917
918        notNumber.link(this);
919    }
920
921    JITStubCall stubCall(this, cti_op_jtrue);
922    stubCall.addArgument(cond);
923    stubCall.call();
924    emitJumpSlowToHot(branchTest32(Zero, regT0), target); // Inverted.
925}
926
927void JIT::emit_op_jtrue(Instruction* currentInstruction)
928{
929    unsigned cond = currentInstruction[1].u.operand;
930    unsigned target = currentInstruction[2].u.operand;
931
932    emitLoad(cond, regT1, regT0);
933
934    ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
935    addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
936    addJump(branchTest32(NonZero, regT0), target);
937}
938
939void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
940{
941    unsigned cond = currentInstruction[1].u.operand;
942    unsigned target = currentInstruction[2].u.operand;
943
944    linkSlowCase(iter);
945
946    if (supportsFloatingPoint()) {
947        // regT1 contains the tag from the hot path.
948        Jump notNumber = branch32(Above, regT1, Imm32(JSValue::LowestTag));
949
950        emitLoadDouble(cond, fpRegT0);
951        emitJumpSlowToHot(branchDoubleNonZero(fpRegT0, fpRegT1), target);
952        emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jtrue));
953
954        notNumber.link(this);
955    }
956
957    JITStubCall stubCall(this, cti_op_jtrue);
958    stubCall.addArgument(cond);
959    stubCall.call();
960    emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
961}
962
963void JIT::emit_op_jeq_null(Instruction* currentInstruction)
964{
965    unsigned src = currentInstruction[1].u.operand;
966    unsigned target = currentInstruction[2].u.operand;
967
968    emitLoad(src, regT1, regT0);
969
970    Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
971
972    // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
973    loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
974    addJump(branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
975
976    Jump wasNotImmediate = jump();
977
978    // Now handle the immediate cases - undefined & null
979    isImmediate.link(this);
980
981    ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
982    or32(TrustedImm32(1), regT1);
983    addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), target);
984
985    wasNotImmediate.link(this);
986}
987
988void JIT::emit_op_jneq_null(Instruction* currentInstruction)
989{
990    unsigned src = currentInstruction[1].u.operand;
991    unsigned target = currentInstruction[2].u.operand;
992
993    emitLoad(src, regT1, regT0);
994
995    Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
996
997    // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
998    loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
999    addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
1000
1001    Jump wasNotImmediate = jump();
1002
1003    // Now handle the immediate cases - undefined & null
1004    isImmediate.link(this);
1005
1006    ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
1007    or32(TrustedImm32(1), regT1);
1008    addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::NullTag)), target);
1009
1010    wasNotImmediate.link(this);
1011}
1012
1013void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
1014{
1015    unsigned src = currentInstruction[1].u.operand;
1016    JSCell* ptr = currentInstruction[2].u.jsCell.get();
1017    unsigned target = currentInstruction[3].u.operand;
1018
1019    emitLoad(src, regT1, regT0);
1020    addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)), target);
1021    addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(ptr)), target);
1022}
1023
1024void JIT::emit_op_jsr(Instruction* currentInstruction)
1025{
1026    int retAddrDst = currentInstruction[1].u.operand;
1027    int target = currentInstruction[2].u.operand;
1028    DataLabelPtr storeLocation = storePtrWithPatch(TrustedImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
1029    addJump(jump(), target);
1030    m_jsrSites.append(JSRInfo(storeLocation, label()));
1031}
1032
1033void JIT::emit_op_sret(Instruction* currentInstruction)
1034{
1035    jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
1036}
1037
1038void JIT::emit_op_eq(Instruction* currentInstruction)
1039{
1040    unsigned dst = currentInstruction[1].u.operand;
1041    unsigned src1 = currentInstruction[2].u.operand;
1042    unsigned src2 = currentInstruction[3].u.operand;
1043
1044    emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
1045    addSlowCase(branch32(NotEqual, regT1, regT3));
1046    addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
1047    addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
1048
1049    set32Compare32(Equal, regT0, regT2, regT0);
1050
1051    emitStoreBool(dst, regT0);
1052}
1053
1054void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1055{
1056    unsigned dst = currentInstruction[1].u.operand;
1057    unsigned op1 = currentInstruction[2].u.operand;
1058    unsigned op2 = currentInstruction[3].u.operand;
1059
1060    JumpList storeResult;
1061    JumpList genericCase;
1062
1063    genericCase.append(getSlowCase(iter)); // tags not equal
1064
1065    linkSlowCase(iter); // tags equal and JSCell
1066    genericCase.append(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr)));
1067    genericCase.append(branchPtr(NotEqual, Address(regT2), TrustedImmPtr(m_globalData->jsStringVPtr)));
1068
1069    // String case.
1070    JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
1071    stubCallEqStrings.addArgument(regT0);
1072    stubCallEqStrings.addArgument(regT2);
1073    stubCallEqStrings.call();
1074    storeResult.append(jump());
1075
1076    // Generic case.
1077    genericCase.append(getSlowCase(iter)); // doubles
1078    genericCase.link(this);
1079    JITStubCall stubCallEq(this, cti_op_eq);
1080    stubCallEq.addArgument(op1);
1081    stubCallEq.addArgument(op2);
1082    stubCallEq.call(regT0);
1083
1084    storeResult.link(this);
1085    emitStoreBool(dst, regT0);
1086}
1087
1088void JIT::emit_op_neq(Instruction* currentInstruction)
1089{
1090    unsigned dst = currentInstruction[1].u.operand;
1091    unsigned src1 = currentInstruction[2].u.operand;
1092    unsigned src2 = currentInstruction[3].u.operand;
1093
1094    emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
1095    addSlowCase(branch32(NotEqual, regT1, regT3));
1096    addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
1097    addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
1098
1099    set32Compare32(NotEqual, regT0, regT2, regT0);
1100
1101    emitStoreBool(dst, regT0);
1102}
1103
1104void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1105{
1106    unsigned dst = currentInstruction[1].u.operand;
1107
1108    JumpList storeResult;
1109    JumpList genericCase;
1110
1111    genericCase.append(getSlowCase(iter)); // tags not equal
1112
1113    linkSlowCase(iter); // tags equal and JSCell
1114    genericCase.append(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr)));
1115    genericCase.append(branchPtr(NotEqual, Address(regT2), TrustedImmPtr(m_globalData->jsStringVPtr)));
1116
1117    // String case.
1118    JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
1119    stubCallEqStrings.addArgument(regT0);
1120    stubCallEqStrings.addArgument(regT2);
1121    stubCallEqStrings.call(regT0);
1122    storeResult.append(jump());
1123
1124    // Generic case.
1125    genericCase.append(getSlowCase(iter)); // doubles
1126    genericCase.link(this);
1127    JITStubCall stubCallEq(this, cti_op_eq);
1128    stubCallEq.addArgument(regT1, regT0);
1129    stubCallEq.addArgument(regT3, regT2);
1130    stubCallEq.call(regT0);
1131
1132    storeResult.link(this);
1133    xor32(TrustedImm32(0x1), regT0);
1134    emitStoreBool(dst, regT0);
1135}
1136
1137void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
1138{
1139    unsigned dst = currentInstruction[1].u.operand;
1140    unsigned src1 = currentInstruction[2].u.operand;
1141    unsigned src2 = currentInstruction[3].u.operand;
1142
1143    emitLoadTag(src1, regT0);
1144    emitLoadTag(src2, regT1);
1145
1146    // Jump to a slow case if either operand is double, or if both operands are
1147    // cells and/or Int32s.
1148    move(regT0, regT2);
1149    and32(regT1, regT2);
1150    addSlowCase(branch32(Below, regT2, TrustedImm32(JSValue::LowestTag)));
1151    addSlowCase(branch32(AboveOrEqual, regT2, TrustedImm32(JSValue::CellTag)));
1152
1153    if (type == OpStrictEq)
1154        set32Compare32(Equal, regT0, regT1, regT0);
1155    else
1156        set32Compare32(NotEqual, regT0, regT1, regT0);
1157
1158    emitStoreBool(dst, regT0);
1159}
1160
1161void JIT::emit_op_stricteq(Instruction* currentInstruction)
1162{
1163    compileOpStrictEq(currentInstruction, OpStrictEq);
1164}
1165
1166void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1167{
1168    unsigned dst = currentInstruction[1].u.operand;
1169    unsigned src1 = currentInstruction[2].u.operand;
1170    unsigned src2 = currentInstruction[3].u.operand;
1171
1172    linkSlowCase(iter);
1173    linkSlowCase(iter);
1174
1175    JITStubCall stubCall(this, cti_op_stricteq);
1176    stubCall.addArgument(src1);
1177    stubCall.addArgument(src2);
1178    stubCall.call(dst);
1179}
1180
1181void JIT::emit_op_nstricteq(Instruction* currentInstruction)
1182{
1183    compileOpStrictEq(currentInstruction, OpNStrictEq);
1184}
1185
1186void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1187{
1188    unsigned dst = currentInstruction[1].u.operand;
1189    unsigned src1 = currentInstruction[2].u.operand;
1190    unsigned src2 = currentInstruction[3].u.operand;
1191
1192    linkSlowCase(iter);
1193    linkSlowCase(iter);
1194
1195    JITStubCall stubCall(this, cti_op_nstricteq);
1196    stubCall.addArgument(src1);
1197    stubCall.addArgument(src2);
1198    stubCall.call(dst);
1199}
1200
1201void JIT::emit_op_eq_null(Instruction* currentInstruction)
1202{
1203    unsigned dst = currentInstruction[1].u.operand;
1204    unsigned src = currentInstruction[2].u.operand;
1205
1206    emitLoad(src, regT1, regT0);
1207    Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
1208
1209    loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1210    set32Test8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT1);
1211
1212    Jump wasNotImmediate = jump();
1213
1214    isImmediate.link(this);
1215
1216    set32Compare32(Equal, regT1, TrustedImm32(JSValue::NullTag), regT2);
1217    set32Compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
1218    or32(regT2, regT1);
1219
1220    wasNotImmediate.link(this);
1221
1222    emitStoreBool(dst, regT1);
1223}
1224
1225void JIT::emit_op_neq_null(Instruction* currentInstruction)
1226{
1227    unsigned dst = currentInstruction[1].u.operand;
1228    unsigned src = currentInstruction[2].u.operand;
1229
1230    emitLoad(src, regT1, regT0);
1231    Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
1232
1233    loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1234    set32Test8(Zero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT1);
1235
1236    Jump wasNotImmediate = jump();
1237
1238    isImmediate.link(this);
1239
1240    set32Compare32(NotEqual, regT1, TrustedImm32(JSValue::NullTag), regT2);
1241    set32Compare32(NotEqual, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
1242    and32(regT2, regT1);
1243
1244    wasNotImmediate.link(this);
1245
1246    emitStoreBool(dst, regT1);
1247}
1248
1249void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
1250{
1251    JITStubCall stubCall(this, cti_op_resolve_with_base);
1252    stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
1253    stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1254    stubCall.call(currentInstruction[2].u.operand);
1255}
1256
1257void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1258{
1259    JITStubCall stubCall(this, cti_op_new_func_exp);
1260    stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
1261    stubCall.call(currentInstruction[1].u.operand);
1262}
1263
1264void JIT::emit_op_throw(Instruction* currentInstruction)
1265{
1266    unsigned exception = currentInstruction[1].u.operand;
1267    JITStubCall stubCall(this, cti_op_throw);
1268    stubCall.addArgument(exception);
1269    stubCall.call();
1270
1271#ifndef NDEBUG
1272    // cti_op_throw always changes it's return address,
1273    // this point in the code should never be reached.
1274    breakpoint();
1275#endif
1276}
1277
1278void JIT::emit_op_get_pnames(Instruction* currentInstruction)
1279{
1280    int dst = currentInstruction[1].u.operand;
1281    int base = currentInstruction[2].u.operand;
1282    int i = currentInstruction[3].u.operand;
1283    int size = currentInstruction[4].u.operand;
1284    int breakTarget = currentInstruction[5].u.operand;
1285
1286    JumpList isNotObject;
1287
1288    emitLoad(base, regT1, regT0);
1289    if (!m_codeBlock->isKnownNotImmediate(base))
1290        isNotObject.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
1291    if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
1292        loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1293        isNotObject.append(branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType)));
1294    }
1295
1296    // We could inline the case where you have a valid cache, but
1297    // this call doesn't seem to be hot.
1298    Label isObject(this);
1299    JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
1300    getPnamesStubCall.addArgument(regT0);
1301    getPnamesStubCall.call(dst);
1302    load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
1303    store32(TrustedImm32(Int32Tag), intTagFor(i));
1304    store32(TrustedImm32(0), intPayloadFor(i));
1305    store32(TrustedImm32(Int32Tag), intTagFor(size));
1306    store32(regT3, payloadFor(size));
1307    Jump end = jump();
1308
1309    isNotObject.link(this);
1310    addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), breakTarget);
1311    addJump(branch32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag)), breakTarget);
1312    JITStubCall toObjectStubCall(this, cti_to_object);
1313    toObjectStubCall.addArgument(regT1, regT0);
1314    toObjectStubCall.call(base);
1315    jump().linkTo(isObject, this);
1316
1317    end.link(this);
1318}
1319
1320void JIT::emit_op_next_pname(Instruction* currentInstruction)
1321{
1322    int dst = currentInstruction[1].u.operand;
1323    int base = currentInstruction[2].u.operand;
1324    int i = currentInstruction[3].u.operand;
1325    int size = currentInstruction[4].u.operand;
1326    int it = currentInstruction[5].u.operand;
1327    int target = currentInstruction[6].u.operand;
1328
1329    JumpList callHasProperty;
1330
1331    Label begin(this);
1332    load32(intPayloadFor(i), regT0);
1333    Jump end = branch32(Equal, regT0, intPayloadFor(size));
1334
1335    // Grab key @ i
1336    loadPtr(payloadFor(it), regT1);
1337    loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
1338    load32(BaseIndex(regT2, regT0, TimesEight), regT2);
1339    store32(TrustedImm32(JSValue::CellTag), tagFor(dst));
1340    store32(regT2, payloadFor(dst));
1341
1342    // Increment i
1343    add32(TrustedImm32(1), regT0);
1344    store32(regT0, intPayloadFor(i));
1345
1346    // Verify that i is valid:
1347    loadPtr(payloadFor(base), regT0);
1348
1349    // Test base's structure
1350    loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1351    callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
1352
1353    // Test base's prototype chain
1354    loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
1355    loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
1356    addJump(branchTestPtr(Zero, Address(regT3)), target);
1357
1358    Label checkPrototype(this);
1359    callHasProperty.append(branch32(Equal, Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::NullTag)));
1360    loadPtr(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
1361    loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
1362    callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
1363    addPtr(TrustedImm32(sizeof(Structure*)), regT3);
1364    branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
1365
1366    // Continue loop.
1367    addJump(jump(), target);
1368
1369    // Slow case: Ask the object if i is valid.
1370    callHasProperty.link(this);
1371    loadPtr(addressFor(dst), regT1);
1372    JITStubCall stubCall(this, cti_has_property);
1373    stubCall.addArgument(regT0);
1374    stubCall.addArgument(regT1);
1375    stubCall.call();
1376
1377    // Test for valid key.
1378    addJump(branchTest32(NonZero, regT0), target);
1379    jump().linkTo(begin, this);
1380
1381    // End of loop.
1382    end.link(this);
1383}
1384
1385void JIT::emit_op_push_scope(Instruction* currentInstruction)
1386{
1387    JITStubCall stubCall(this, cti_op_push_scope);
1388    stubCall.addArgument(currentInstruction[1].u.operand);
1389    stubCall.call(currentInstruction[1].u.operand);
1390}
1391
1392void JIT::emit_op_pop_scope(Instruction*)
1393{
1394    JITStubCall(this, cti_op_pop_scope).call();
1395}
1396
1397void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
1398{
1399    int dst = currentInstruction[1].u.operand;
1400    int src = currentInstruction[2].u.operand;
1401
1402    emitLoad(src, regT1, regT0);
1403
1404    Jump isInt32 = branch32(Equal, regT1, TrustedImm32(JSValue::Int32Tag));
1405    addSlowCase(branch32(AboveOrEqual, regT1, TrustedImm32(JSValue::EmptyValueTag)));
1406    isInt32.link(this);
1407
1408    if (src != dst)
1409        emitStore(dst, regT1, regT0);
1410    map(m_bytecodeOffset + OPCODE_LENGTH(op_to_jsnumber), dst, regT1, regT0);
1411}
1412
1413void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1414{
1415    int dst = currentInstruction[1].u.operand;
1416
1417    linkSlowCase(iter);
1418
1419    JITStubCall stubCall(this, cti_op_to_jsnumber);
1420    stubCall.addArgument(regT1, regT0);
1421    stubCall.call(dst);
1422}
1423
1424void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
1425{
1426    JITStubCall stubCall(this, cti_op_push_new_scope);
1427    stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
1428    stubCall.addArgument(currentInstruction[3].u.operand);
1429    stubCall.call(currentInstruction[1].u.operand);
1430}
1431
1432void JIT::emit_op_catch(Instruction* currentInstruction)
1433{
1434    // cti_op_throw returns the callFrame for the handler.
1435    move(regT0, callFrameRegister);
1436
1437    // Now store the exception returned by cti_op_throw.
1438    loadPtr(Address(stackPointerRegister, OBJECT_OFFSETOF(struct JITStackFrame, globalData)), regT3);
1439    load32(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1440    load32(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1441    store32(TrustedImm32(JSValue().payload()), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
1442    store32(TrustedImm32(JSValue().tag()), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
1443
1444    unsigned exception = currentInstruction[1].u.operand;
1445    emitStore(exception, regT1, regT0);
1446    map(m_bytecodeOffset + OPCODE_LENGTH(op_catch), exception, regT1, regT0);
1447}
1448
1449void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
1450{
1451    JITStubCall stubCall(this, cti_op_jmp_scopes);
1452    stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1453    stubCall.call();
1454    addJump(jump(), currentInstruction[2].u.operand);
1455}
1456
1457void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1458{
1459    unsigned tableIndex = currentInstruction[1].u.operand;
1460    unsigned defaultOffset = currentInstruction[2].u.operand;
1461    unsigned scrutinee = currentInstruction[3].u.operand;
1462
1463    // create jump table for switch destinations, track this switch statement.
1464    SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1465    m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
1466    jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1467
1468    JITStubCall stubCall(this, cti_op_switch_imm);
1469    stubCall.addArgument(scrutinee);
1470    stubCall.addArgument(Imm32(tableIndex));
1471    stubCall.call();
1472    jump(regT0);
1473}
1474
1475void JIT::emit_op_switch_char(Instruction* currentInstruction)
1476{
1477    unsigned tableIndex = currentInstruction[1].u.operand;
1478    unsigned defaultOffset = currentInstruction[2].u.operand;
1479    unsigned scrutinee = currentInstruction[3].u.operand;
1480
1481    // create jump table for switch destinations, track this switch statement.
1482    SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1483    m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
1484    jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1485
1486    JITStubCall stubCall(this, cti_op_switch_char);
1487    stubCall.addArgument(scrutinee);
1488    stubCall.addArgument(Imm32(tableIndex));
1489    stubCall.call();
1490    jump(regT0);
1491}
1492
1493void JIT::emit_op_switch_string(Instruction* currentInstruction)
1494{
1495    unsigned tableIndex = currentInstruction[1].u.operand;
1496    unsigned defaultOffset = currentInstruction[2].u.operand;
1497    unsigned scrutinee = currentInstruction[3].u.operand;
1498
1499    // create jump table for switch destinations, track this switch statement.
1500    StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1501    m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
1502
1503    JITStubCall stubCall(this, cti_op_switch_string);
1504    stubCall.addArgument(scrutinee);
1505    stubCall.addArgument(Imm32(tableIndex));
1506    stubCall.call();
1507    jump(regT0);
1508}
1509
1510void JIT::emit_op_throw_reference_error(Instruction* currentInstruction)
1511{
1512    unsigned message = currentInstruction[1].u.operand;
1513
1514    JITStubCall stubCall(this, cti_op_throw_reference_error);
1515    stubCall.addArgument(m_codeBlock->getConstant(message));
1516    stubCall.call();
1517}
1518
1519void JIT::emit_op_debug(Instruction* currentInstruction)
1520{
1521#if ENABLE(DEBUG_WITH_BREAKPOINT)
1522    UNUSED_PARAM(currentInstruction);
1523    breakpoint();
1524#else
1525    JITStubCall stubCall(this, cti_op_debug);
1526    stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1527    stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1528    stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1529    stubCall.call();
1530#endif
1531}
1532
1533
1534void JIT::emit_op_enter(Instruction*)
1535{
1536    // Even though JIT code doesn't use them, we initialize our constant
1537    // registers to zap stale pointers, to avoid unnecessarily prolonging
1538    // object lifetime and increasing GC pressure.
1539    for (int i = 0; i < m_codeBlock->m_numVars; ++i)
1540        emitStore(i, jsUndefined());
1541}
1542
1543void JIT::emit_op_create_activation(Instruction* currentInstruction)
1544{
1545    unsigned activation = currentInstruction[1].u.operand;
1546
1547    Jump activationCreated = branch32(NotEqual, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
1548    JITStubCall(this, cti_op_push_activation).call(activation);
1549    activationCreated.link(this);
1550}
1551
1552void JIT::emit_op_create_arguments(Instruction* currentInstruction)
1553{
1554    unsigned dst = currentInstruction[1].u.operand;
1555
1556    Jump argsCreated = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
1557
1558    if (m_codeBlock->m_numParameters == 1)
1559        JITStubCall(this, cti_op_create_arguments_no_params).call();
1560    else
1561        JITStubCall(this, cti_op_create_arguments).call();
1562
1563    emitStore(dst, regT1, regT0);
1564    emitStore(unmodifiedArgumentsRegister(dst), regT1, regT0);
1565
1566    argsCreated.link(this);
1567}
1568
1569void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
1570{
1571    unsigned dst = currentInstruction[1].u.operand;
1572
1573    emitStore(dst, JSValue());
1574}
1575
1576void JIT::emit_op_get_callee(Instruction* currentInstruction)
1577{
1578    int dst = currentInstruction[1].u.operand;
1579    emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT0);
1580    emitStoreCell(dst, regT0);
1581}
1582
1583void JIT::emit_op_create_this(Instruction* currentInstruction)
1584{
1585    unsigned protoRegister = currentInstruction[2].u.operand;
1586    emitLoad(protoRegister, regT1, regT0);
1587    JITStubCall stubCall(this, cti_op_create_this);
1588    stubCall.addArgument(regT1, regT0);
1589    stubCall.call(currentInstruction[1].u.operand);
1590}
1591
1592void JIT::emit_op_convert_this(Instruction* currentInstruction)
1593{
1594    unsigned thisRegister = currentInstruction[1].u.operand;
1595
1596    emitLoad(thisRegister, regT1, regT0);
1597
1598    addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
1599
1600    loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1601    addSlowCase(branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(NeedsThisConversion)));
1602
1603    map(m_bytecodeOffset + OPCODE_LENGTH(op_convert_this), thisRegister, regT1, regT0);
1604}
1605
1606void JIT::emit_op_convert_this_strict(Instruction* currentInstruction)
1607{
1608    unsigned thisRegister = currentInstruction[1].u.operand;
1609
1610    emitLoad(thisRegister, regT1, regT0);
1611
1612    Jump notNull = branch32(NotEqual, regT1, TrustedImm32(JSValue::EmptyValueTag));
1613    emitStore(thisRegister, jsNull());
1614    Jump setThis = jump();
1615    notNull.link(this);
1616    Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
1617    loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1618    Jump notAnObject = branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType));
1619    addSlowCase(branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(NeedsThisConversion)));
1620    isImmediate.link(this);
1621    notAnObject.link(this);
1622    setThis.link(this);
1623    map(m_bytecodeOffset + OPCODE_LENGTH(op_convert_this_strict), thisRegister, regT1, regT0);
1624}
1625
1626void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1627{
1628    unsigned thisRegister = currentInstruction[1].u.operand;
1629
1630    linkSlowCase(iter);
1631    linkSlowCase(iter);
1632
1633    JITStubCall stubCall(this, cti_op_convert_this);
1634    stubCall.addArgument(regT1, regT0);
1635    stubCall.call(thisRegister);
1636}
1637
1638void JIT::emitSlow_op_convert_this_strict(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1639{
1640    unsigned thisRegister = currentInstruction[1].u.operand;
1641
1642    linkSlowCase(iter);
1643
1644    JITStubCall stubCall(this, cti_op_convert_this_strict);
1645    stubCall.addArgument(regT1, regT0);
1646    stubCall.call(thisRegister);
1647}
1648
1649void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1650{
1651    peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1652    Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1653
1654    JITStubCall stubCall(this, cti_op_profile_will_call);
1655    stubCall.addArgument(currentInstruction[1].u.operand);
1656    stubCall.call();
1657    noProfiler.link(this);
1658}
1659
1660void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1661{
1662    peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1663    Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1664
1665    JITStubCall stubCall(this, cti_op_profile_did_call);
1666    stubCall.addArgument(currentInstruction[1].u.operand);
1667    stubCall.call();
1668    noProfiler.link(this);
1669}
1670
1671void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1672{
1673    int dst = currentInstruction[1].u.operand;
1674    int argumentsRegister = currentInstruction[2].u.operand;
1675    addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
1676    emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1677    sub32(TrustedImm32(1), regT0);
1678    emitStoreInt32(dst, regT0);
1679}
1680
1681void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1682{
1683    linkSlowCase(iter);
1684    int dst = currentInstruction[1].u.operand;
1685    int base = currentInstruction[2].u.operand;
1686    int ident = currentInstruction[3].u.operand;
1687
1688    JITStubCall stubCall(this, cti_op_get_by_id_generic);
1689    stubCall.addArgument(base);
1690    stubCall.addArgument(TrustedImmPtr(&(m_codeBlock->identifier(ident))));
1691    stubCall.call(dst);
1692}
1693
1694void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1695{
1696    int dst = currentInstruction[1].u.operand;
1697    int argumentsRegister = currentInstruction[2].u.operand;
1698    int property = currentInstruction[3].u.operand;
1699    addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
1700    emitLoad(property, regT1, regT2);
1701    addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
1702    add32(TrustedImm32(1), regT2);
1703    // regT2 now contains the integer index of the argument we want, including this
1704    emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT3);
1705    addSlowCase(branch32(AboveOrEqual, regT2, regT3));
1706
1707    Jump skipOutofLineParams;
1708    int numArgs = m_codeBlock->m_numParameters;
1709    if (numArgs) {
1710        Jump notInInPlaceArgs = branch32(AboveOrEqual, regT2, Imm32(numArgs));
1711        addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT1);
1712        loadPtr(BaseIndex(regT1, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1713        loadPtr(BaseIndex(regT1, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1714        skipOutofLineParams = jump();
1715        notInInPlaceArgs.link(this);
1716    }
1717
1718    addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT1);
1719    mul32(TrustedImm32(sizeof(Register)), regT3, regT3);
1720    subPtr(regT3, regT1);
1721    loadPtr(BaseIndex(regT1, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1722    loadPtr(BaseIndex(regT1, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1723    if (numArgs)
1724        skipOutofLineParams.link(this);
1725    emitStore(dst, regT1, regT0);
1726}
1727
1728void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1729{
1730    unsigned dst = currentInstruction[1].u.operand;
1731    unsigned arguments = currentInstruction[2].u.operand;
1732    unsigned property = currentInstruction[3].u.operand;
1733
1734    linkSlowCase(iter);
1735    Jump skipArgumentsCreation = jump();
1736
1737    linkSlowCase(iter);
1738    linkSlowCase(iter);
1739    if (m_codeBlock->m_numParameters == 1)
1740        JITStubCall(this, cti_op_create_arguments_no_params).call();
1741    else
1742        JITStubCall(this, cti_op_create_arguments).call();
1743
1744    emitStore(arguments, regT1, regT0);
1745    emitStore(unmodifiedArgumentsRegister(arguments), regT1, regT0);
1746
1747    skipArgumentsCreation.link(this);
1748    JITStubCall stubCall(this, cti_op_get_by_val);
1749    stubCall.addArgument(arguments);
1750    stubCall.addArgument(property);
1751    stubCall.call(dst);
1752}
1753
1754#if ENABLE(JIT_USE_SOFT_MODULO)
1755void JIT::softModulo()
1756{
1757    push(regT1);
1758    push(regT3);
1759    move(regT2, regT3);
1760    move(regT0, regT2);
1761    move(TrustedImm32(0), regT1);
1762
1763    // Check for negative result reminder
1764    Jump positiveRegT3 = branch32(GreaterThanOrEqual, regT3, TrustedImm32(0));
1765    neg32(regT3);
1766    xor32(TrustedImm32(1), regT1);
1767    positiveRegT3.link(this);
1768
1769    Jump positiveRegT2 = branch32(GreaterThanOrEqual, regT2, TrustedImm32(0));
1770    neg32(regT2);
1771    xor32(TrustedImm32(2), regT1);
1772    positiveRegT2.link(this);
1773
1774    // Save the condition for negative reminder
1775    push(regT1);
1776
1777    Jump exitBranch = branch32(LessThan, regT2, regT3);
1778
1779    // Power of two fast case
1780    move(regT3, regT0);
1781    sub32(TrustedImm32(1), regT0);
1782    Jump powerOfTwo = branchTest32(NotEqual, regT0, regT3);
1783    and32(regT0, regT2);
1784    powerOfTwo.link(this);
1785
1786    and32(regT3, regT0);
1787
1788    Jump exitBranch2 = branchTest32(Zero, regT0);
1789
1790    countLeadingZeros32(regT2, regT0);
1791    countLeadingZeros32(regT3, regT1);
1792    sub32(regT0, regT1);
1793
1794    Jump useFullTable = branch32(Equal, regT1, TrustedImm32(31));
1795
1796    neg32(regT1);
1797    add32(TrustedImm32(31), regT1);
1798
1799    int elementSizeByShift = -1;
1800#if CPU(ARM)
1801    elementSizeByShift = 3;
1802#else
1803#error "JIT_OPTIMIZE_MOD not yet supported on this platform."
1804#endif
1805    relativeTableJump(regT1, elementSizeByShift);
1806
1807    useFullTable.link(this);
1808    // Modulo table
1809    for (int i = 31; i > 0; --i) {
1810#if CPU(ARM_TRADITIONAL)
1811        m_assembler.cmp_r(regT2, m_assembler.lsl(regT3, i));
1812        m_assembler.sub_r(regT2, regT2, m_assembler.lsl(regT3, i), ARMAssembler::CS);
1813#elif CPU(ARM_THUMB2)
1814        ShiftTypeAndAmount shift(SRType_LSL, i);
1815        m_assembler.sub_S(regT1, regT2, regT3, shift);
1816        m_assembler.it(ARMv7Assembler::ConditionCS);
1817        m_assembler.mov(regT2, regT1);
1818#else
1819#error "JIT_OPTIMIZE_MOD not yet supported on this platform."
1820#endif
1821    }
1822
1823    Jump lower = branch32(Below, regT2, regT3);
1824    sub32(regT3, regT2);
1825    lower.link(this);
1826
1827    exitBranch.link(this);
1828    exitBranch2.link(this);
1829
1830    // Check for negative reminder
1831    pop(regT1);
1832    Jump positiveResult = branch32(Equal, regT1, TrustedImm32(0));
1833    neg32(regT2);
1834    positiveResult.link(this);
1835
1836    move(regT2, regT0);
1837
1838    pop(regT3);
1839    pop(regT1);
1840    ret();
1841}
1842#endif // ENABLE(JIT_USE_SOFT_MODULO)
1843
1844} // namespace JSC
1845
1846#endif // USE(JSVALUE32_64)
1847#endif // ENABLE(JIT)
1848