JITOpcodes.cpp revision 2bde8e466a4451c7319e3a072d118917957d6554
1/*
2 * Copyright (C) 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 *    notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 *    notice, this list of conditions and the following disclaimer in the
12 *    documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26
27#include "config.h"
28#if ENABLE(JIT)
29#include "JIT.h"
30
31#include "Arguments.h"
32#include "JITInlineMethods.h"
33#include "JITStubCall.h"
34#include "JSArray.h"
35#include "JSCell.h"
36#include "JSFunction.h"
37#include "JSPropertyNameIterator.h"
38#include "LinkBuffer.h"
39
40namespace JSC {
41
42#if USE(JSVALUE64)
43
44#define RECORD_JUMP_TARGET(targetOffset) \
45   do { m_labels[m_bytecodeOffset + (targetOffset)].used(); } while (false)
46
47void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, TrampolineStructure *trampolines)
48{
49#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
50    // (2) The second function provides fast property access for string length
51    Label stringLengthBegin = align();
52
53    // Check eax is a string
54    Jump string_failureCases1 = emitJumpIfNotJSCell(regT0);
55    Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr));
56
57    // Checks out okay! - get the length from the Ustring.
58    load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_length)), regT0);
59
60    Jump string_failureCases3 = branch32(Above, regT0, TrustedImm32(JSImmediate::maxImmediateInt));
61
62    // regT0 contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
63    emitFastArithIntToImmNoCheck(regT0, regT0);
64
65    ret();
66#endif
67
68    // (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
69    COMPILE_ASSERT(sizeof(CodeType) == 4, CodeTypeEnumMustBe32Bit);
70
71    // VirtualCallLink Trampoline
72    // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
73    JumpList callLinkFailures;
74    Label virtualCallLinkBegin = align();
75    compileOpCallInitializeCallFrame();
76    preserveReturnAddressAfterCall(regT3);
77    emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
78    restoreArgumentReference();
79    Call callLazyLinkCall = call();
80    callLinkFailures.append(branchTestPtr(Zero, regT0));
81    restoreReturnAddressBeforeReturn(regT3);
82    emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
83    jump(regT0);
84
85    // VirtualConstructLink Trampoline
86    // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
87    Label virtualConstructLinkBegin = align();
88    compileOpCallInitializeCallFrame();
89    preserveReturnAddressAfterCall(regT3);
90    emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
91    restoreArgumentReference();
92    Call callLazyLinkConstruct = call();
93    callLinkFailures.append(branchTestPtr(Zero, regT0));
94    restoreReturnAddressBeforeReturn(regT3);
95    emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
96    jump(regT0);
97
98    // VirtualCall Trampoline
99    // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
100    Label virtualCallBegin = align();
101    compileOpCallInitializeCallFrame();
102
103    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
104
105    Jump hasCodeBlock3 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForCall)), TrustedImm32(0));
106    preserveReturnAddressAfterCall(regT3);
107    restoreArgumentReference();
108    Call callCompileCall = call();
109    callLinkFailures.append(branchTestPtr(Zero, regT0));
110    emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
111    restoreReturnAddressBeforeReturn(regT3);
112    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
113    hasCodeBlock3.link(this);
114
115    loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForCallWithArityCheck)), regT0);
116    jump(regT0);
117
118    // VirtualConstruct Trampoline
119    // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
120    Label virtualConstructBegin = align();
121    compileOpCallInitializeCallFrame();
122
123    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
124
125    Jump hasCodeBlock4 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForConstruct)), TrustedImm32(0));
126    preserveReturnAddressAfterCall(regT3);
127    restoreArgumentReference();
128    Call callCompileConstruct = call();
129    callLinkFailures.append(branchTestPtr(Zero, regT0));
130    emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
131    restoreReturnAddressBeforeReturn(regT3);
132    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
133    hasCodeBlock4.link(this);
134
135    loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForConstructWithArityCheck)), regT0);
136    jump(regT0);
137
138    // If the parser fails we want to be able to be able to keep going,
139    // So we handle this as a parse failure.
140    callLinkFailures.link(this);
141    emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
142    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
143    restoreReturnAddressBeforeReturn(regT1);
144    move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
145    storePtr(regT1, regT2);
146    poke(callFrameRegister, 1 + OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
147    poke(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()));
148    ret();
149
150    // NativeCall Trampoline
151    Label nativeCallThunk = privateCompileCTINativeCall(globalData);
152    Label nativeConstructThunk = privateCompileCTINativeCall(globalData, true);
153
154#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
155    Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
156    Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
157    Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
158#endif
159
160    // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
161    LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()), 0);
162
163#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
164    patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
165    patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
166    patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
167#endif
168#if ENABLE(JIT_OPTIMIZE_CALL)
169    patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
170    patchBuffer.link(callLazyLinkConstruct, FunctionPtr(cti_vm_lazyLinkConstruct));
171#endif
172    patchBuffer.link(callCompileCall, FunctionPtr(cti_op_call_jitCompile));
173    patchBuffer.link(callCompileConstruct, FunctionPtr(cti_op_construct_jitCompile));
174
175    CodeRef finalCode = patchBuffer.finalizeCode();
176    *executablePool = finalCode.m_executablePool;
177
178    trampolines->ctiVirtualCallLink = patchBuffer.trampolineAt(virtualCallLinkBegin);
179    trampolines->ctiVirtualConstructLink = patchBuffer.trampolineAt(virtualConstructLinkBegin);
180    trampolines->ctiVirtualCall = patchBuffer.trampolineAt(virtualCallBegin);
181    trampolines->ctiVirtualConstruct = patchBuffer.trampolineAt(virtualConstructBegin);
182    trampolines->ctiNativeCall = patchBuffer.trampolineAt(nativeCallThunk);
183    trampolines->ctiNativeConstruct = patchBuffer.trampolineAt(nativeConstructThunk);
184#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
185    trampolines->ctiStringLengthTrampoline = patchBuffer.trampolineAt(stringLengthBegin);
186#endif
187}
188
189JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isConstruct)
190{
191    int executableOffsetToFunction = isConstruct ? OBJECT_OFFSETOF(NativeExecutable, m_constructor) : OBJECT_OFFSETOF(NativeExecutable, m_function);
192
193    Label nativeCallThunk = align();
194
195    emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
196
197#if CPU(X86_64)
198    // Load caller frame's scope chain into this callframe so that whatever we call can
199    // get to its global data.
200    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
201    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
202    emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
203
204    peek(regT1);
205    emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
206
207    // Calling convention:      f(edi, esi, edx, ecx, ...);
208    // Host function signature: f(ExecState*);
209    move(callFrameRegister, X86Registers::edi);
210
211    subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
212
213    emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::esi);
214    loadPtr(Address(X86Registers::esi, OBJECT_OFFSETOF(JSFunction, m_executable)), X86Registers::r9);
215    move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
216    call(Address(X86Registers::r9, executableOffsetToFunction));
217
218    addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
219
220#elif CPU(ARM)
221    // Load caller frame's scope chain into this callframe so that whatever we call can
222    // get to its global data.
223    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
224    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
225    emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
226
227    preserveReturnAddressAfterCall(regT3); // Callee preserved
228    emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
229
230    // Calling convention:      f(r0 == regT0, r1 == regT1, ...);
231    // Host function signature: f(ExecState*);
232    move(callFrameRegister, ARMRegisters::r0);
233
234    emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
235    move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
236    loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
237    call(Address(regT2, executableOffsetToFunction));
238
239    restoreReturnAddressBeforeReturn(regT3);
240
241#elif CPU(MIPS)
242    // Load caller frame's scope chain into this callframe so that whatever we call can
243    // get to its global data.
244    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
245    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
246    emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
247
248    preserveReturnAddressAfterCall(regT3); // Callee preserved
249    emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
250
251    // Calling convention:      f(a0, a1, a2, a3);
252    // Host function signature: f(ExecState*);
253
254    // Allocate stack space for 16 bytes (8-byte aligned)
255    // 16 bytes (unused) for 4 arguments
256    subPtr(TrustedImm32(16), stackPointerRegister);
257
258    // Setup arg0
259    move(callFrameRegister, MIPSRegisters::a0);
260
261    // Call
262    emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
263    loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
264    move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
265    call(Address(regT2, executableOffsetToFunction));
266
267    // Restore stack space
268    addPtr(TrustedImm32(16), stackPointerRegister);
269
270    restoreReturnAddressBeforeReturn(regT3);
271
272#elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
273#error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
274#else
275    UNUSED_PARAM(executableOffsetToFunction);
276    breakpoint();
277#endif
278
279    // Check for an exception
280    loadPtr(&(globalData->exception), regT2);
281    Jump exceptionHandler = branchTestPtr(NonZero, regT2);
282
283    // Return.
284    ret();
285
286    // Handle an exception
287    exceptionHandler.link(this);
288
289    // Grab the return address.
290    preserveReturnAddressAfterCall(regT1);
291
292    move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
293    storePtr(regT1, regT2);
294    poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
295
296    // Set the return address.
297    move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
298    restoreReturnAddressBeforeReturn(regT1);
299
300    ret();
301
302    return nativeCallThunk;
303}
304
305JIT::CodePtr JIT::privateCompileCTINativeCall(PassRefPtr<ExecutablePool>, JSGlobalData* globalData, NativeFunction)
306{
307    return globalData->jitStubs->ctiNativeCall();
308}
309
310void JIT::emit_op_mov(Instruction* currentInstruction)
311{
312    int dst = currentInstruction[1].u.operand;
313    int src = currentInstruction[2].u.operand;
314
315    if (m_codeBlock->isConstantRegisterIndex(src)) {
316        storePtr(ImmPtr(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
317        if (dst == m_lastResultBytecodeRegister)
318            killLastResultRegister();
319    } else if ((src == m_lastResultBytecodeRegister) || (dst == m_lastResultBytecodeRegister)) {
320        // If either the src or dst is the cached register go though
321        // get/put registers to make sure we track this correctly.
322        emitGetVirtualRegister(src, regT0);
323        emitPutVirtualRegister(dst);
324    } else {
325        // Perform the copy via regT1; do not disturb any mapping in regT0.
326        loadPtr(Address(callFrameRegister, src * sizeof(Register)), regT1);
327        storePtr(regT1, Address(callFrameRegister, dst * sizeof(Register)));
328    }
329}
330
331void JIT::emit_op_end(Instruction* currentInstruction)
332{
333    ASSERT(returnValueRegister != callFrameRegister);
334    emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
335    restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
336    ret();
337}
338
339void JIT::emit_op_jmp(Instruction* currentInstruction)
340{
341    unsigned target = currentInstruction[1].u.operand;
342    addJump(jump(), target);
343    RECORD_JUMP_TARGET(target);
344}
345
346void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
347{
348    emitTimeoutCheck();
349
350    unsigned op1 = currentInstruction[1].u.operand;
351    unsigned op2 = currentInstruction[2].u.operand;
352    unsigned target = currentInstruction[3].u.operand;
353    if (isOperandConstantImmediateInt(op2)) {
354        emitGetVirtualRegister(op1, regT0);
355        emitJumpSlowCaseIfNotImmediateInteger(regT0);
356        int32_t op2imm = getConstantOperandImmediateInt(op2);
357        addJump(branch32(LessThanOrEqual, regT0, Imm32(op2imm)), target);
358    } else {
359        emitGetVirtualRegisters(op1, regT0, op2, regT1);
360        emitJumpSlowCaseIfNotImmediateInteger(regT0);
361        emitJumpSlowCaseIfNotImmediateInteger(regT1);
362        addJump(branch32(LessThanOrEqual, regT0, regT1), target);
363    }
364}
365
366void JIT::emit_op_new_object(Instruction* currentInstruction)
367{
368    JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
369}
370
371void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
372{
373    unsigned baseVal = currentInstruction[1].u.operand;
374
375    emitGetVirtualRegister(baseVal, regT0);
376
377    // Check that baseVal is a cell.
378    emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
379
380    // Check that baseVal 'ImplementsHasInstance'.
381    loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
382    addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsHasInstance)));
383}
384
385void JIT::emit_op_instanceof(Instruction* currentInstruction)
386{
387    unsigned dst = currentInstruction[1].u.operand;
388    unsigned value = currentInstruction[2].u.operand;
389    unsigned baseVal = currentInstruction[3].u.operand;
390    unsigned proto = currentInstruction[4].u.operand;
391
392    // Load the operands (baseVal, proto, and value respectively) into registers.
393    // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
394    emitGetVirtualRegister(value, regT2);
395    emitGetVirtualRegister(baseVal, regT0);
396    emitGetVirtualRegister(proto, regT1);
397
398    // Check that proto are cells.  baseVal must be a cell - this is checked by op_check_has_instance.
399    emitJumpSlowCaseIfNotJSCell(regT2, value);
400    emitJumpSlowCaseIfNotJSCell(regT1, proto);
401
402    // Check that prototype is an object
403    loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
404    addSlowCase(branch8(NotEqual, Address(regT3, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType)));
405
406    // Fixme: this check is only needed because the JSC API allows HasInstance to be overridden; we should deprecate this.
407    // Check that baseVal 'ImplementsDefaultHasInstance'.
408    loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
409    addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
410
411    // Optimistically load the result true, and start looping.
412    // Initially, regT1 still contains proto and regT2 still contains value.
413    // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
414    move(TrustedImmPtr(JSValue::encode(jsBoolean(true))), regT0);
415    Label loop(this);
416
417    // Load the prototype of the object in regT2.  If this is equal to regT1 - WIN!
418    // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
419    loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
420    loadPtr(Address(regT2, Structure::prototypeOffset()), regT2);
421    Jump isInstance = branchPtr(Equal, regT2, regT1);
422    emitJumpIfJSCell(regT2).linkTo(loop, this);
423
424    // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
425    move(TrustedImmPtr(JSValue::encode(jsBoolean(false))), regT0);
426
427    // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
428    isInstance.link(this);
429    emitPutVirtualRegister(dst);
430}
431
432void JIT::emit_op_call(Instruction* currentInstruction)
433{
434    compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
435}
436
437void JIT::emit_op_call_eval(Instruction* currentInstruction)
438{
439    compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex++);
440}
441
442void JIT::emit_op_call_varargs(Instruction* currentInstruction)
443{
444    compileOpCallVarargs(currentInstruction);
445}
446
447void JIT::emit_op_construct(Instruction* currentInstruction)
448{
449    compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
450}
451
452void JIT::emit_op_get_global_var(Instruction* currentInstruction)
453{
454    JSVariableObject* globalObject = m_codeBlock->globalObject();
455    loadPtr(&globalObject->m_registers, regT0);
456    loadPtr(Address(regT0, currentInstruction[2].u.operand * sizeof(Register)), regT0);
457    emitPutVirtualRegister(currentInstruction[1].u.operand);
458}
459
460void JIT::emit_op_put_global_var(Instruction* currentInstruction)
461{
462    emitGetVirtualRegister(currentInstruction[2].u.operand, regT1);
463    JSVariableObject* globalObject = m_codeBlock->globalObject();
464    loadPtr(&globalObject->m_registers, regT0);
465    storePtr(regT1, Address(regT0, currentInstruction[1].u.operand * sizeof(Register)));
466}
467
468void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
469{
470    int skip = currentInstruction[3].u.operand;
471
472    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
473    bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
474    ASSERT(skip || !checkTopLevel);
475    if (checkTopLevel && skip--) {
476        Jump activationNotCreated;
477        if (checkTopLevel)
478            activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
479        loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
480        activationNotCreated.link(this);
481    }
482    while (skip--)
483        loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
484
485    loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT0);
486    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSVariableObject, m_registers)), regT0);
487    loadPtr(Address(regT0, currentInstruction[2].u.operand * sizeof(Register)), regT0);
488    emitPutVirtualRegister(currentInstruction[1].u.operand);
489}
490
491void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
492{
493    int skip = currentInstruction[2].u.operand;
494
495    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1);
496    emitGetVirtualRegister(currentInstruction[3].u.operand, regT0);
497    bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
498    ASSERT(skip || !checkTopLevel);
499    if (checkTopLevel && skip--) {
500        Jump activationNotCreated;
501        if (checkTopLevel)
502            activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
503        loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1);
504        activationNotCreated.link(this);
505    }
506    while (skip--)
507        loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1);
508
509    loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
510    loadPtr(Address(regT1, OBJECT_OFFSETOF(JSVariableObject, m_registers)), regT1);
511    storePtr(regT0, Address(regT1, currentInstruction[1].u.operand * sizeof(Register)));
512}
513
514void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
515{
516    unsigned activation = currentInstruction[1].u.operand;
517    unsigned arguments = currentInstruction[2].u.operand;
518    Jump activationCreated = branchTestPtr(NonZero, addressFor(activation));
519    Jump argumentsNotCreated = branchTestPtr(Zero, addressFor(arguments));
520    activationCreated.link(this);
521    JITStubCall stubCall(this, cti_op_tear_off_activation);
522    stubCall.addArgument(activation, regT2);
523    stubCall.addArgument(unmodifiedArgumentsRegister(arguments), regT2);
524    stubCall.call();
525    argumentsNotCreated.link(this);
526}
527
528void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
529{
530    unsigned dst = currentInstruction[1].u.operand;
531
532    Jump argsNotCreated = branchTestPtr(Zero, Address(callFrameRegister, sizeof(Register) * (unmodifiedArgumentsRegister(dst))));
533    JITStubCall stubCall(this, cti_op_tear_off_arguments);
534    stubCall.addArgument(unmodifiedArgumentsRegister(dst), regT2);
535    stubCall.call();
536    argsNotCreated.link(this);
537}
538
539void JIT::emit_op_ret(Instruction* currentInstruction)
540{
541    ASSERT(callFrameRegister != regT1);
542    ASSERT(regT1 != returnValueRegister);
543    ASSERT(returnValueRegister != callFrameRegister);
544
545    // Return the result in %eax.
546    emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
547
548    // Grab the return address.
549    emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
550
551    // Restore our caller's "r".
552    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
553
554    // Return.
555    restoreReturnAddressBeforeReturn(regT1);
556    ret();
557}
558
559void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction)
560{
561    ASSERT(callFrameRegister != regT1);
562    ASSERT(regT1 != returnValueRegister);
563    ASSERT(returnValueRegister != callFrameRegister);
564
565    // Return the result in %eax.
566    emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
567    Jump notJSCell = emitJumpIfNotJSCell(returnValueRegister);
568    loadPtr(Address(returnValueRegister, JSCell::structureOffset()), regT2);
569    Jump notObject = branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType));
570
571    // Grab the return address.
572    emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
573
574    // Restore our caller's "r".
575    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
576
577    // Return.
578    restoreReturnAddressBeforeReturn(regT1);
579    ret();
580
581    // Return 'this' in %eax.
582    notJSCell.link(this);
583    notObject.link(this);
584    emitGetVirtualRegister(currentInstruction[2].u.operand, returnValueRegister);
585
586    // Grab the return address.
587    emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
588
589    // Restore our caller's "r".
590    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
591
592    // Return.
593    restoreReturnAddressBeforeReturn(regT1);
594    ret();
595}
596
597void JIT::emit_op_new_array(Instruction* currentInstruction)
598{
599    JITStubCall stubCall(this, cti_op_new_array);
600    stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
601    stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
602    stubCall.call(currentInstruction[1].u.operand);
603}
604
605void JIT::emit_op_resolve(Instruction* currentInstruction)
606{
607    JITStubCall stubCall(this, cti_op_resolve);
608    stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
609    stubCall.call(currentInstruction[1].u.operand);
610}
611
612void JIT::emit_op_to_primitive(Instruction* currentInstruction)
613{
614    int dst = currentInstruction[1].u.operand;
615    int src = currentInstruction[2].u.operand;
616
617    emitGetVirtualRegister(src, regT0);
618
619    Jump isImm = emitJumpIfNotJSCell(regT0);
620    addSlowCase(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr)));
621    isImm.link(this);
622
623    if (dst != src)
624        emitPutVirtualRegister(dst);
625
626}
627
628void JIT::emit_op_strcat(Instruction* currentInstruction)
629{
630    JITStubCall stubCall(this, cti_op_strcat);
631    stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
632    stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
633    stubCall.call(currentInstruction[1].u.operand);
634}
635
636void JIT::emit_op_resolve_base(Instruction* currentInstruction)
637{
638    JITStubCall stubCall(this, currentInstruction[3].u.operand ? cti_op_resolve_base_strict_put : cti_op_resolve_base);
639    stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
640    stubCall.call(currentInstruction[1].u.operand);
641}
642
643void JIT::emit_op_ensure_property_exists(Instruction* currentInstruction)
644{
645    JITStubCall stubCall(this, cti_op_ensure_property_exists);
646    stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
647    stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
648    stubCall.call(currentInstruction[1].u.operand);
649}
650
651void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
652{
653    JITStubCall stubCall(this, cti_op_resolve_skip);
654    stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
655    stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
656    stubCall.call(currentInstruction[1].u.operand);
657}
658
659void JIT::emit_op_resolve_global(Instruction* currentInstruction, bool)
660{
661    // Fast case
662    void* globalObject = m_codeBlock->globalObject();
663    unsigned currentIndex = m_globalResolveInfoIndex++;
664    void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
665    void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
666
667    // Check Structure of global object
668    move(TrustedImmPtr(globalObject), regT0);
669    loadPtr(structureAddress, regT1);
670    addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, JSCell::structureOffset()))); // Structures don't match
671
672    // Load cached property
673    // Assume that the global object always uses external storage.
674    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_propertyStorage)), regT0);
675    load32(offsetAddr, regT1);
676    loadPtr(BaseIndex(regT0, regT1, ScalePtr), regT0);
677    emitPutVirtualRegister(currentInstruction[1].u.operand);
678}
679
680void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
681{
682    unsigned dst = currentInstruction[1].u.operand;
683    Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
684
685    unsigned currentIndex = m_globalResolveInfoIndex++;
686
687    linkSlowCase(iter);
688    JITStubCall stubCall(this, cti_op_resolve_global);
689    stubCall.addArgument(TrustedImmPtr(ident));
690    stubCall.addArgument(Imm32(currentIndex));
691    stubCall.addArgument(regT0);
692    stubCall.call(dst);
693}
694
695void JIT::emit_op_not(Instruction* currentInstruction)
696{
697    emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
698    xorPtr(TrustedImm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), regT0);
699    addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~JSImmediate::ExtendedPayloadBitBoolValue))));
700    xorPtr(TrustedImm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool | JSImmediate::ExtendedPayloadBitBoolValue)), regT0);
701    emitPutVirtualRegister(currentInstruction[1].u.operand);
702}
703
704void JIT::emit_op_jfalse(Instruction* currentInstruction)
705{
706    unsigned target = currentInstruction[2].u.operand;
707    emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
708
709    addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNumber(0)))), target);
710    Jump isNonZero = emitJumpIfImmediateInteger(regT0);
711
712    addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(false)))), target);
713    addSlowCase(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(true)))));
714
715    isNonZero.link(this);
716    RECORD_JUMP_TARGET(target);
717}
718
719void JIT::emit_op_jeq_null(Instruction* currentInstruction)
720{
721    unsigned src = currentInstruction[1].u.operand;
722    unsigned target = currentInstruction[2].u.operand;
723
724    emitGetVirtualRegister(src, regT0);
725    Jump isImmediate = emitJumpIfNotJSCell(regT0);
726
727    // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
728    loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
729    addJump(branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
730    Jump wasNotImmediate = jump();
731
732    // Now handle the immediate cases - undefined & null
733    isImmediate.link(this);
734    andPtr(TrustedImm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
735    addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNull()))), target);
736
737    wasNotImmediate.link(this);
738    RECORD_JUMP_TARGET(target);
739};
740void JIT::emit_op_jneq_null(Instruction* currentInstruction)
741{
742    unsigned src = currentInstruction[1].u.operand;
743    unsigned target = currentInstruction[2].u.operand;
744
745    emitGetVirtualRegister(src, regT0);
746    Jump isImmediate = emitJumpIfNotJSCell(regT0);
747
748    // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
749    loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
750    addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
751    Jump wasNotImmediate = jump();
752
753    // Now handle the immediate cases - undefined & null
754    isImmediate.link(this);
755    andPtr(TrustedImm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
756    addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsNull()))), target);
757
758    wasNotImmediate.link(this);
759    RECORD_JUMP_TARGET(target);
760}
761
762void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
763{
764    unsigned src = currentInstruction[1].u.operand;
765    JSCell* ptr = currentInstruction[2].u.jsCell.get();
766    unsigned target = currentInstruction[3].u.operand;
767
768    emitGetVirtualRegister(src, regT0);
769    addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(JSValue(ptr)))), target);
770
771    RECORD_JUMP_TARGET(target);
772}
773
774void JIT::emit_op_jsr(Instruction* currentInstruction)
775{
776    int retAddrDst = currentInstruction[1].u.operand;
777    int target = currentInstruction[2].u.operand;
778    DataLabelPtr storeLocation = storePtrWithPatch(TrustedImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
779    addJump(jump(), target);
780    m_jsrSites.append(JSRInfo(storeLocation, label()));
781    killLastResultRegister();
782    RECORD_JUMP_TARGET(target);
783}
784
785void JIT::emit_op_sret(Instruction* currentInstruction)
786{
787    jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
788    killLastResultRegister();
789}
790
791void JIT::emit_op_eq(Instruction* currentInstruction)
792{
793    emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
794    emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
795    set32Compare32(Equal, regT1, regT0, regT0);
796    emitTagAsBoolImmediate(regT0);
797    emitPutVirtualRegister(currentInstruction[1].u.operand);
798}
799
800void JIT::emit_op_bitnot(Instruction* currentInstruction)
801{
802    emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
803    emitJumpSlowCaseIfNotImmediateInteger(regT0);
804    not32(regT0);
805    emitFastArithIntToImmNoCheck(regT0, regT0);
806    emitPutVirtualRegister(currentInstruction[1].u.operand);
807}
808
809void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
810{
811    JITStubCall stubCall(this, cti_op_resolve_with_base);
812    stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
813    stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
814    stubCall.call(currentInstruction[2].u.operand);
815}
816
817void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
818{
819    JITStubCall stubCall(this, cti_op_new_func_exp);
820    stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
821    stubCall.call(currentInstruction[1].u.operand);
822}
823
824void JIT::emit_op_jtrue(Instruction* currentInstruction)
825{
826    unsigned target = currentInstruction[2].u.operand;
827    emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
828
829    Jump isZero = branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNumber(0))));
830    addJump(emitJumpIfImmediateInteger(regT0), target);
831
832    addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(true)))), target);
833    addSlowCase(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(false)))));
834
835    isZero.link(this);
836    RECORD_JUMP_TARGET(target);
837}
838
839void JIT::emit_op_neq(Instruction* currentInstruction)
840{
841    emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
842    emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
843    set32Compare32(NotEqual, regT1, regT0, regT0);
844    emitTagAsBoolImmediate(regT0);
845
846    emitPutVirtualRegister(currentInstruction[1].u.operand);
847
848}
849
850void JIT::emit_op_bitxor(Instruction* currentInstruction)
851{
852    emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
853    emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
854    xorPtr(regT1, regT0);
855    emitFastArithReTagImmediate(regT0, regT0);
856    emitPutVirtualRegister(currentInstruction[1].u.operand);
857}
858
859void JIT::emit_op_bitor(Instruction* currentInstruction)
860{
861    emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
862    emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
863    orPtr(regT1, regT0);
864    emitPutVirtualRegister(currentInstruction[1].u.operand);
865}
866
867void JIT::emit_op_throw(Instruction* currentInstruction)
868{
869    JITStubCall stubCall(this, cti_op_throw);
870    stubCall.addArgument(currentInstruction[1].u.operand, regT2);
871    stubCall.call();
872    ASSERT(regT0 == returnValueRegister);
873#ifndef NDEBUG
874    // cti_op_throw always changes it's return address,
875    // this point in the code should never be reached.
876    breakpoint();
877#endif
878}
879
880void JIT::emit_op_get_pnames(Instruction* currentInstruction)
881{
882    int dst = currentInstruction[1].u.operand;
883    int base = currentInstruction[2].u.operand;
884    int i = currentInstruction[3].u.operand;
885    int size = currentInstruction[4].u.operand;
886    int breakTarget = currentInstruction[5].u.operand;
887
888    JumpList isNotObject;
889
890    emitGetVirtualRegister(base, regT0);
891    if (!m_codeBlock->isKnownNotImmediate(base))
892        isNotObject.append(emitJumpIfNotJSCell(regT0));
893    if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
894        loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
895        isNotObject.append(branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType)));
896    }
897
898    // We could inline the case where you have a valid cache, but
899    // this call doesn't seem to be hot.
900    Label isObject(this);
901    JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
902    getPnamesStubCall.addArgument(regT0);
903    getPnamesStubCall.call(dst);
904    load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
905    storePtr(tagTypeNumberRegister, payloadFor(i));
906    store32(TrustedImm32(Int32Tag), intTagFor(size));
907    store32(regT3, intPayloadFor(size));
908    Jump end = jump();
909
910    isNotObject.link(this);
911    move(regT0, regT1);
912    and32(TrustedImm32(~JSImmediate::ExtendedTagBitUndefined), regT1);
913    addJump(branch32(Equal, regT1, TrustedImm32(JSImmediate::FullTagTypeNull)), breakTarget);
914
915    JITStubCall toObjectStubCall(this, cti_to_object);
916    toObjectStubCall.addArgument(regT0);
917    toObjectStubCall.call(base);
918    jump().linkTo(isObject, this);
919
920    end.link(this);
921}
922
923void JIT::emit_op_next_pname(Instruction* currentInstruction)
924{
925    int dst = currentInstruction[1].u.operand;
926    int base = currentInstruction[2].u.operand;
927    int i = currentInstruction[3].u.operand;
928    int size = currentInstruction[4].u.operand;
929    int it = currentInstruction[5].u.operand;
930    int target = currentInstruction[6].u.operand;
931
932    JumpList callHasProperty;
933
934    Label begin(this);
935    load32(intPayloadFor(i), regT0);
936    Jump end = branch32(Equal, regT0, intPayloadFor(size));
937
938    // Grab key @ i
939    loadPtr(addressFor(it), regT1);
940    loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
941
942    loadPtr(BaseIndex(regT2, regT0, TimesEight), regT2);
943
944    emitPutVirtualRegister(dst, regT2);
945
946    // Increment i
947    add32(TrustedImm32(1), regT0);
948    store32(regT0, intPayloadFor(i));
949
950    // Verify that i is valid:
951    emitGetVirtualRegister(base, regT0);
952
953    // Test base's structure
954    loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
955    callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
956
957    // Test base's prototype chain
958    loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
959    loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
960    addJump(branchTestPtr(Zero, Address(regT3)), target);
961
962    Label checkPrototype(this);
963    loadPtr(Address(regT2, Structure::prototypeOffset()), regT2);
964    callHasProperty.append(emitJumpIfNotJSCell(regT2));
965    loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
966    callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
967    addPtr(TrustedImm32(sizeof(Structure*)), regT3);
968    branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
969
970    // Continue loop.
971    addJump(jump(), target);
972
973    // Slow case: Ask the object if i is valid.
974    callHasProperty.link(this);
975    emitGetVirtualRegister(dst, regT1);
976    JITStubCall stubCall(this, cti_has_property);
977    stubCall.addArgument(regT0);
978    stubCall.addArgument(regT1);
979    stubCall.call();
980
981    // Test for valid key.
982    addJump(branchTest32(NonZero, regT0), target);
983    jump().linkTo(begin, this);
984
985    // End of loop.
986    end.link(this);
987}
988
989void JIT::emit_op_push_scope(Instruction* currentInstruction)
990{
991    JITStubCall stubCall(this, cti_op_push_scope);
992    stubCall.addArgument(currentInstruction[1].u.operand, regT2);
993    stubCall.call(currentInstruction[1].u.operand);
994}
995
996void JIT::emit_op_pop_scope(Instruction*)
997{
998    JITStubCall(this, cti_op_pop_scope).call();
999}
1000
1001void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
1002{
1003    unsigned dst = currentInstruction[1].u.operand;
1004    unsigned src1 = currentInstruction[2].u.operand;
1005    unsigned src2 = currentInstruction[3].u.operand;
1006
1007    emitGetVirtualRegisters(src1, regT0, src2, regT1);
1008
1009    // Jump to a slow case if either operand is a number, or if both are JSCell*s.
1010    move(regT0, regT2);
1011    orPtr(regT1, regT2);
1012    addSlowCase(emitJumpIfJSCell(regT2));
1013    addSlowCase(emitJumpIfImmediateNumber(regT2));
1014
1015    if (type == OpStrictEq)
1016        set32Compare32(Equal, regT1, regT0, regT0);
1017    else
1018        set32Compare32(NotEqual, regT1, regT0, regT0);
1019    emitTagAsBoolImmediate(regT0);
1020
1021    emitPutVirtualRegister(dst);
1022}
1023
1024void JIT::emit_op_stricteq(Instruction* currentInstruction)
1025{
1026    compileOpStrictEq(currentInstruction, OpStrictEq);
1027}
1028
1029void JIT::emit_op_nstricteq(Instruction* currentInstruction)
1030{
1031    compileOpStrictEq(currentInstruction, OpNStrictEq);
1032}
1033
1034void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
1035{
1036    int srcVReg = currentInstruction[2].u.operand;
1037    emitGetVirtualRegister(srcVReg, regT0);
1038
1039    Jump wasImmediate = emitJumpIfImmediateInteger(regT0);
1040
1041    emitJumpSlowCaseIfNotJSCell(regT0, srcVReg);
1042    loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1043    addSlowCase(branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(NumberType)));
1044
1045    wasImmediate.link(this);
1046
1047    emitPutVirtualRegister(currentInstruction[1].u.operand);
1048}
1049
1050void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
1051{
1052    JITStubCall stubCall(this, cti_op_push_new_scope);
1053    stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
1054    stubCall.addArgument(currentInstruction[3].u.operand, regT2);
1055    stubCall.call(currentInstruction[1].u.operand);
1056}
1057
1058void JIT::emit_op_catch(Instruction* currentInstruction)
1059{
1060    killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
1061    move(regT0, callFrameRegister);
1062    peek(regT3, OBJECT_OFFSETOF(struct JITStackFrame, globalData) / sizeof(void*));
1063    loadPtr(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception)), regT0);
1064    storePtr(TrustedImmPtr(JSValue::encode(JSValue())), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception)));
1065    emitPutVirtualRegister(currentInstruction[1].u.operand);
1066}
1067
1068void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
1069{
1070    JITStubCall stubCall(this, cti_op_jmp_scopes);
1071    stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1072    stubCall.call();
1073    addJump(jump(), currentInstruction[2].u.operand);
1074    RECORD_JUMP_TARGET(currentInstruction[2].u.operand);
1075}
1076
1077void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1078{
1079    unsigned tableIndex = currentInstruction[1].u.operand;
1080    unsigned defaultOffset = currentInstruction[2].u.operand;
1081    unsigned scrutinee = currentInstruction[3].u.operand;
1082
1083    // create jump table for switch destinations, track this switch statement.
1084    SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1085    m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
1086    jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1087
1088    JITStubCall stubCall(this, cti_op_switch_imm);
1089    stubCall.addArgument(scrutinee, regT2);
1090    stubCall.addArgument(Imm32(tableIndex));
1091    stubCall.call();
1092    jump(regT0);
1093}
1094
1095void JIT::emit_op_switch_char(Instruction* currentInstruction)
1096{
1097    unsigned tableIndex = currentInstruction[1].u.operand;
1098    unsigned defaultOffset = currentInstruction[2].u.operand;
1099    unsigned scrutinee = currentInstruction[3].u.operand;
1100
1101    // create jump table for switch destinations, track this switch statement.
1102    SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1103    m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
1104    jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1105
1106    JITStubCall stubCall(this, cti_op_switch_char);
1107    stubCall.addArgument(scrutinee, regT2);
1108    stubCall.addArgument(Imm32(tableIndex));
1109    stubCall.call();
1110    jump(regT0);
1111}
1112
1113void JIT::emit_op_switch_string(Instruction* currentInstruction)
1114{
1115    unsigned tableIndex = currentInstruction[1].u.operand;
1116    unsigned defaultOffset = currentInstruction[2].u.operand;
1117    unsigned scrutinee = currentInstruction[3].u.operand;
1118
1119    // create jump table for switch destinations, track this switch statement.
1120    StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1121    m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
1122
1123    JITStubCall stubCall(this, cti_op_switch_string);
1124    stubCall.addArgument(scrutinee, regT2);
1125    stubCall.addArgument(Imm32(tableIndex));
1126    stubCall.call();
1127    jump(regT0);
1128}
1129
1130void JIT::emit_op_throw_reference_error(Instruction* currentInstruction)
1131{
1132    JITStubCall stubCall(this, cti_op_throw_reference_error);
1133    stubCall.addArgument(ImmPtr(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
1134    stubCall.call();
1135}
1136
1137void JIT::emit_op_debug(Instruction* currentInstruction)
1138{
1139#if ENABLE(DEBUG_WITH_BREAKPOINT)
1140    UNUSED_PARAM(currentInstruction);
1141    breakpoint();
1142#else
1143    JITStubCall stubCall(this, cti_op_debug);
1144    stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1145    stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1146    stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1147    stubCall.call();
1148#endif
1149}
1150
1151void JIT::emit_op_eq_null(Instruction* currentInstruction)
1152{
1153    unsigned dst = currentInstruction[1].u.operand;
1154    unsigned src1 = currentInstruction[2].u.operand;
1155
1156    emitGetVirtualRegister(src1, regT0);
1157    Jump isImmediate = emitJumpIfNotJSCell(regT0);
1158
1159    loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1160    set32Test8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT0);
1161
1162    Jump wasNotImmediate = jump();
1163
1164    isImmediate.link(this);
1165
1166    andPtr(TrustedImm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
1167    setPtr(Equal, regT0, TrustedImm32(JSImmediate::FullTagTypeNull), regT0);
1168
1169    wasNotImmediate.link(this);
1170
1171    emitTagAsBoolImmediate(regT0);
1172    emitPutVirtualRegister(dst);
1173
1174}
1175
1176void JIT::emit_op_neq_null(Instruction* currentInstruction)
1177{
1178    unsigned dst = currentInstruction[1].u.operand;
1179    unsigned src1 = currentInstruction[2].u.operand;
1180
1181    emitGetVirtualRegister(src1, regT0);
1182    Jump isImmediate = emitJumpIfNotJSCell(regT0);
1183
1184    loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1185    set32Test8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT0);
1186
1187    Jump wasNotImmediate = jump();
1188
1189    isImmediate.link(this);
1190
1191    andPtr(TrustedImm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
1192    setPtr(NotEqual, regT0, TrustedImm32(JSImmediate::FullTagTypeNull), regT0);
1193
1194    wasNotImmediate.link(this);
1195
1196    emitTagAsBoolImmediate(regT0);
1197    emitPutVirtualRegister(dst);
1198}
1199
1200void JIT::emit_op_enter(Instruction*)
1201{
1202    // Even though CTI doesn't use them, we initialize our constant
1203    // registers to zap stale pointers, to avoid unnecessarily prolonging
1204    // object lifetime and increasing GC pressure.
1205    size_t count = m_codeBlock->m_numVars;
1206    for (size_t j = 0; j < count; ++j)
1207        emitInitRegister(j);
1208
1209}
1210
1211void JIT::emit_op_create_activation(Instruction* currentInstruction)
1212{
1213    unsigned dst = currentInstruction[1].u.operand;
1214
1215    Jump activationCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
1216    JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
1217    emitPutVirtualRegister(dst);
1218    activationCreated.link(this);
1219}
1220
1221void JIT::emit_op_create_arguments(Instruction* currentInstruction)
1222{
1223    unsigned dst = currentInstruction[1].u.operand;
1224
1225    Jump argsCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
1226    if (m_codeBlock->m_numParameters == 1)
1227        JITStubCall(this, cti_op_create_arguments_no_params).call();
1228    else
1229        JITStubCall(this, cti_op_create_arguments).call();
1230    emitPutVirtualRegister(dst);
1231    emitPutVirtualRegister(unmodifiedArgumentsRegister(dst));
1232    argsCreated.link(this);
1233}
1234
1235void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
1236{
1237    unsigned dst = currentInstruction[1].u.operand;
1238
1239    storePtr(TrustedImmPtr(0), Address(callFrameRegister, sizeof(Register) * dst));
1240}
1241
1242void JIT::emit_op_convert_this(Instruction* currentInstruction)
1243{
1244    emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1245
1246    emitJumpSlowCaseIfNotJSCell(regT0);
1247    loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1248    addSlowCase(branchTest8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(NeedsThisConversion)));
1249}
1250
1251void JIT::emit_op_convert_this_strict(Instruction* currentInstruction)
1252{
1253    emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1254    Jump notNull = branchTestPtr(NonZero, regT0);
1255    move(TrustedImmPtr(JSValue::encode(jsNull())), regT0);
1256    emitPutVirtualRegister(currentInstruction[1].u.operand, regT0);
1257    Jump setThis = jump();
1258    notNull.link(this);
1259    Jump isImmediate = emitJumpIfNotJSCell(regT0);
1260    loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1261    Jump notAnObject = branch8(NotEqual, Address(regT1, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType));
1262    addSlowCase(branchTest8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(NeedsThisConversion)));
1263    isImmediate.link(this);
1264    notAnObject.link(this);
1265    setThis.link(this);
1266}
1267
1268void JIT::emit_op_get_callee(Instruction* currentInstruction)
1269{
1270    unsigned result = currentInstruction[1].u.operand;
1271    emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT0);
1272    emitPutVirtualRegister(result);
1273}
1274
1275void JIT::emit_op_create_this(Instruction* currentInstruction)
1276{
1277    JITStubCall stubCall(this, cti_op_create_this);
1278    stubCall.addArgument(currentInstruction[2].u.operand, regT1);
1279    stubCall.call(currentInstruction[1].u.operand);
1280}
1281
1282void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1283{
1284    peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1285    Jump noProfiler = branchTestPtr(Zero, Address(regT1));
1286
1287    JITStubCall stubCall(this, cti_op_profile_will_call);
1288    stubCall.addArgument(currentInstruction[1].u.operand, regT1);
1289    stubCall.call();
1290    noProfiler.link(this);
1291
1292}
1293
1294void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1295{
1296    peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1297    Jump noProfiler = branchTestPtr(Zero, Address(regT1));
1298
1299    JITStubCall stubCall(this, cti_op_profile_did_call);
1300    stubCall.addArgument(currentInstruction[1].u.operand, regT1);
1301    stubCall.call();
1302    noProfiler.link(this);
1303}
1304
1305
1306// Slow cases
1307
1308void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1309{
1310    linkSlowCase(iter);
1311    linkSlowCase(iter);
1312    JITStubCall stubCall(this, cti_op_convert_this);
1313    stubCall.addArgument(regT0);
1314    stubCall.call(currentInstruction[1].u.operand);
1315}
1316
1317void JIT::emitSlow_op_convert_this_strict(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1318{
1319    linkSlowCase(iter);
1320    JITStubCall stubCall(this, cti_op_convert_this_strict);
1321    stubCall.addArgument(regT0);
1322    stubCall.call(currentInstruction[1].u.operand);
1323}
1324
1325void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1326{
1327    linkSlowCase(iter);
1328
1329    JITStubCall stubCall(this, cti_op_to_primitive);
1330    stubCall.addArgument(regT0);
1331    stubCall.call(currentInstruction[1].u.operand);
1332}
1333
1334void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1335{
1336    unsigned op2 = currentInstruction[2].u.operand;
1337    unsigned target = currentInstruction[3].u.operand;
1338    if (isOperandConstantImmediateInt(op2)) {
1339        linkSlowCase(iter);
1340        JITStubCall stubCall(this, cti_op_loop_if_lesseq);
1341        stubCall.addArgument(regT0);
1342        stubCall.addArgument(currentInstruction[2].u.operand, regT2);
1343        stubCall.call();
1344        emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
1345    } else {
1346        linkSlowCase(iter);
1347        linkSlowCase(iter);
1348        JITStubCall stubCall(this, cti_op_loop_if_lesseq);
1349        stubCall.addArgument(regT0);
1350        stubCall.addArgument(regT1);
1351        stubCall.call();
1352        emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
1353    }
1354}
1355
1356void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1357{
1358    unsigned base = currentInstruction[1].u.operand;
1359    unsigned property = currentInstruction[2].u.operand;
1360    unsigned value = currentInstruction[3].u.operand;
1361
1362    linkSlowCase(iter); // property int32 check
1363    linkSlowCaseIfNotJSCell(iter, base); // base cell check
1364    linkSlowCase(iter); // base not array check
1365    linkSlowCase(iter); // in vector check
1366
1367    JITStubCall stubPutByValCall(this, cti_op_put_by_val);
1368    stubPutByValCall.addArgument(regT0);
1369    stubPutByValCall.addArgument(property, regT2);
1370    stubPutByValCall.addArgument(value, regT2);
1371    stubPutByValCall.call();
1372}
1373
1374void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1375{
1376    linkSlowCase(iter);
1377    xorPtr(TrustedImm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), regT0);
1378    JITStubCall stubCall(this, cti_op_not);
1379    stubCall.addArgument(regT0);
1380    stubCall.call(currentInstruction[1].u.operand);
1381}
1382
1383void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1384{
1385    linkSlowCase(iter);
1386    JITStubCall stubCall(this, cti_op_jtrue);
1387    stubCall.addArgument(regT0);
1388    stubCall.call();
1389    emitJumpSlowToHot(branchTest32(Zero, regT0), currentInstruction[2].u.operand); // inverted!
1390}
1391
1392void JIT::emitSlow_op_bitnot(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1393{
1394    linkSlowCase(iter);
1395    JITStubCall stubCall(this, cti_op_bitnot);
1396    stubCall.addArgument(regT0);
1397    stubCall.call(currentInstruction[1].u.operand);
1398}
1399
1400void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1401{
1402    linkSlowCase(iter);
1403    JITStubCall stubCall(this, cti_op_jtrue);
1404    stubCall.addArgument(regT0);
1405    stubCall.call();
1406    emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand);
1407}
1408
1409void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1410{
1411    linkSlowCase(iter);
1412    JITStubCall stubCall(this, cti_op_bitxor);
1413    stubCall.addArgument(regT0);
1414    stubCall.addArgument(regT1);
1415    stubCall.call(currentInstruction[1].u.operand);
1416}
1417
1418void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1419{
1420    linkSlowCase(iter);
1421    JITStubCall stubCall(this, cti_op_bitor);
1422    stubCall.addArgument(regT0);
1423    stubCall.addArgument(regT1);
1424    stubCall.call(currentInstruction[1].u.operand);
1425}
1426
1427void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1428{
1429    linkSlowCase(iter);
1430    JITStubCall stubCall(this, cti_op_eq);
1431    stubCall.addArgument(regT0);
1432    stubCall.addArgument(regT1);
1433    stubCall.call();
1434    emitTagAsBoolImmediate(regT0);
1435    emitPutVirtualRegister(currentInstruction[1].u.operand);
1436}
1437
1438void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1439{
1440    linkSlowCase(iter);
1441    JITStubCall stubCall(this, cti_op_eq);
1442    stubCall.addArgument(regT0);
1443    stubCall.addArgument(regT1);
1444    stubCall.call();
1445    xor32(TrustedImm32(0x1), regT0);
1446    emitTagAsBoolImmediate(regT0);
1447    emitPutVirtualRegister(currentInstruction[1].u.operand);
1448}
1449
1450void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1451{
1452    linkSlowCase(iter);
1453    linkSlowCase(iter);
1454    JITStubCall stubCall(this, cti_op_stricteq);
1455    stubCall.addArgument(regT0);
1456    stubCall.addArgument(regT1);
1457    stubCall.call(currentInstruction[1].u.operand);
1458}
1459
1460void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1461{
1462    linkSlowCase(iter);
1463    linkSlowCase(iter);
1464    JITStubCall stubCall(this, cti_op_nstricteq);
1465    stubCall.addArgument(regT0);
1466    stubCall.addArgument(regT1);
1467    stubCall.call(currentInstruction[1].u.operand);
1468}
1469
1470void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1471{
1472    unsigned baseVal = currentInstruction[1].u.operand;
1473
1474    linkSlowCaseIfNotJSCell(iter, baseVal);
1475    linkSlowCase(iter);
1476    JITStubCall stubCall(this, cti_op_check_has_instance);
1477    stubCall.addArgument(baseVal, regT2);
1478    stubCall.call();
1479}
1480
1481void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1482{
1483    unsigned dst = currentInstruction[1].u.operand;
1484    unsigned value = currentInstruction[2].u.operand;
1485    unsigned baseVal = currentInstruction[3].u.operand;
1486    unsigned proto = currentInstruction[4].u.operand;
1487
1488    linkSlowCaseIfNotJSCell(iter, value);
1489    linkSlowCaseIfNotJSCell(iter, proto);
1490    linkSlowCase(iter);
1491    linkSlowCase(iter);
1492    JITStubCall stubCall(this, cti_op_instanceof);
1493    stubCall.addArgument(value, regT2);
1494    stubCall.addArgument(baseVal, regT2);
1495    stubCall.addArgument(proto, regT2);
1496    stubCall.call(dst);
1497}
1498
1499void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1500{
1501    compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call);
1502}
1503
1504void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1505{
1506    compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call_eval);
1507}
1508
1509void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1510{
1511    compileOpCallVarargsSlowCase(currentInstruction, iter);
1512}
1513
1514void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1515{
1516    compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_construct);
1517}
1518
1519void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1520{
1521    linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand);
1522    linkSlowCase(iter);
1523
1524    JITStubCall stubCall(this, cti_op_to_jsnumber);
1525    stubCall.addArgument(regT0);
1526    stubCall.call(currentInstruction[1].u.operand);
1527}
1528
1529void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1530{
1531    int dst = currentInstruction[1].u.operand;
1532    int argumentsRegister = currentInstruction[2].u.operand;
1533    addSlowCase(branchTestPtr(NonZero, addressFor(argumentsRegister)));
1534    emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1535    sub32(TrustedImm32(1), regT0);
1536    emitFastArithReTagImmediate(regT0, regT0);
1537    emitPutVirtualRegister(dst, regT0);
1538}
1539
1540void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1541{
1542    linkSlowCase(iter);
1543    unsigned dst = currentInstruction[1].u.operand;
1544    unsigned base = currentInstruction[2].u.operand;
1545    Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
1546
1547    emitGetVirtualRegister(base, regT0);
1548    JITStubCall stubCall(this, cti_op_get_by_id_generic);
1549    stubCall.addArgument(regT0);
1550    stubCall.addArgument(TrustedImmPtr(ident));
1551    stubCall.call(dst);
1552}
1553
1554void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1555{
1556    int dst = currentInstruction[1].u.operand;
1557    int argumentsRegister = currentInstruction[2].u.operand;
1558    int property = currentInstruction[3].u.operand;
1559    addSlowCase(branchTestPtr(NonZero, addressFor(argumentsRegister)));
1560    emitGetVirtualRegister(property, regT1);
1561    addSlowCase(emitJumpIfNotImmediateInteger(regT1));
1562    add32(TrustedImm32(1), regT1);
1563    // regT1 now contains the integer index of the argument we want, including this
1564    emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT2);
1565    addSlowCase(branch32(AboveOrEqual, regT1, regT2));
1566
1567    Jump skipOutofLineParams;
1568    int numArgs = m_codeBlock->m_numParameters;
1569    if (numArgs) {
1570        Jump notInInPlaceArgs = branch32(AboveOrEqual, regT1, Imm32(numArgs));
1571        addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT0);
1572        loadPtr(BaseIndex(regT0, regT1, TimesEight, 0), regT0);
1573        skipOutofLineParams = jump();
1574        notInInPlaceArgs.link(this);
1575    }
1576
1577    addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT0);
1578    mul32(TrustedImm32(sizeof(Register)), regT2, regT2);
1579    subPtr(regT2, regT0);
1580    loadPtr(BaseIndex(regT0, regT1, TimesEight, 0), regT0);
1581    if (numArgs)
1582        skipOutofLineParams.link(this);
1583    emitPutVirtualRegister(dst, regT0);
1584}
1585
1586void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1587{
1588    unsigned dst = currentInstruction[1].u.operand;
1589    unsigned arguments = currentInstruction[2].u.operand;
1590    unsigned property = currentInstruction[3].u.operand;
1591
1592    linkSlowCase(iter);
1593    Jump skipArgumentsCreation = jump();
1594
1595    linkSlowCase(iter);
1596    linkSlowCase(iter);
1597    if (m_codeBlock->m_numParameters == 1)
1598        JITStubCall(this, cti_op_create_arguments_no_params).call();
1599    else
1600        JITStubCall(this, cti_op_create_arguments).call();
1601    emitPutVirtualRegister(arguments);
1602    emitPutVirtualRegister(unmodifiedArgumentsRegister(arguments));
1603
1604    skipArgumentsCreation.link(this);
1605    JITStubCall stubCall(this, cti_op_get_by_val);
1606    stubCall.addArgument(arguments, regT2);
1607    stubCall.addArgument(property, regT2);
1608    stubCall.call(dst);
1609}
1610
1611#endif // USE(JSVALUE64)
1612
1613void JIT::emit_op_resolve_global_dynamic(Instruction* currentInstruction)
1614{
1615    int skip = currentInstruction[5].u.operand;
1616
1617    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
1618
1619    bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1620    ASSERT(skip || !checkTopLevel);
1621    if (checkTopLevel && skip--) {
1622        Jump activationNotCreated;
1623        if (checkTopLevel)
1624            activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
1625        loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
1626        addSlowCase(checkStructure(regT1, m_globalData->activationStructure.get()));
1627        loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
1628        activationNotCreated.link(this);
1629    }
1630    while (skip--) {
1631        loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
1632        addSlowCase(checkStructure(regT1, m_globalData->activationStructure.get()));
1633        loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
1634    }
1635    emit_op_resolve_global(currentInstruction, true);
1636}
1637
1638void JIT::emitSlow_op_resolve_global_dynamic(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1639{
1640    unsigned dst = currentInstruction[1].u.operand;
1641    Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
1642    int skip = currentInstruction[5].u.operand;
1643    while (skip--)
1644        linkSlowCase(iter);
1645    JITStubCall resolveStubCall(this, cti_op_resolve);
1646    resolveStubCall.addArgument(TrustedImmPtr(ident));
1647    resolveStubCall.call(dst);
1648    emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_resolve_global_dynamic));
1649
1650    unsigned currentIndex = m_globalResolveInfoIndex++;
1651
1652    linkSlowCase(iter); // We managed to skip all the nodes in the scope chain, but the cache missed.
1653    JITStubCall stubCall(this, cti_op_resolve_global);
1654    stubCall.addArgument(TrustedImmPtr(ident));
1655    stubCall.addArgument(Imm32(currentIndex));
1656    stubCall.addArgument(regT0);
1657    stubCall.call(dst);
1658}
1659
1660void JIT::emit_op_new_regexp(Instruction* currentInstruction)
1661{
1662    JITStubCall stubCall(this, cti_op_new_regexp);
1663    stubCall.addArgument(TrustedImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
1664    stubCall.call(currentInstruction[1].u.operand);
1665}
1666
1667void JIT::emit_op_load_varargs(Instruction* currentInstruction)
1668{
1669    int argCountDst = currentInstruction[1].u.operand;
1670    int argsOffset = currentInstruction[2].u.operand;
1671    int registerOffset = currentInstruction[3].u.operand;
1672    ASSERT(argsOffset <= registerOffset);
1673
1674    int expectedParams = m_codeBlock->m_numParameters - 1;
1675    // Don't do inline copying if we aren't guaranteed to have a single stream
1676    // of arguments
1677    if (expectedParams) {
1678        JITStubCall stubCall(this, cti_op_load_varargs);
1679        stubCall.addArgument(Imm32(argsOffset));
1680        stubCall.call();
1681        // Stores a naked int32 in the register file.
1682        store32(returnValueRegister, Address(callFrameRegister, argCountDst * sizeof(Register)));
1683        return;
1684    }
1685
1686#if USE(JSVALUE32_64)
1687    addSlowCase(branch32(NotEqual, tagFor(argsOffset), TrustedImm32(JSValue::EmptyValueTag)));
1688#else
1689    addSlowCase(branchTestPtr(NonZero, addressFor(argsOffset)));
1690#endif
1691    // Load arg count into regT0
1692    emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1693    store32(TrustedImm32(Int32Tag), intTagFor(argCountDst));
1694    store32(regT0, intPayloadFor(argCountDst));
1695    Jump endBranch = branch32(Equal, regT0, TrustedImm32(1));
1696
1697    mul32(TrustedImm32(sizeof(Register)), regT0, regT3);
1698    addPtr(TrustedImm32(static_cast<unsigned>(sizeof(Register) - RegisterFile::CallFrameHeaderSize * sizeof(Register))), callFrameRegister, regT1);
1699    subPtr(regT3, regT1); // regT1 is now the start of the out of line arguments
1700    addPtr(Imm32(argsOffset * sizeof(Register)), callFrameRegister, regT2); // regT2 is the target buffer
1701
1702    // Bounds check the registerfile
1703    addPtr(regT2, regT3);
1704    addPtr(Imm32((registerOffset - argsOffset) * sizeof(Register)), regT3);
1705    addSlowCase(branchPtr(Below, AbsoluteAddress(m_globalData->interpreter->registerFile().addressOfEnd()), regT3));
1706
1707    sub32(TrustedImm32(1), regT0);
1708    Label loopStart = label();
1709    loadPtr(BaseIndex(regT1, regT0, TimesEight, static_cast<unsigned>(0 - 2 * sizeof(Register))), regT3);
1710    storePtr(regT3, BaseIndex(regT2, regT0, TimesEight, static_cast<unsigned>(0 - sizeof(Register))));
1711#if USE(JSVALUE32_64)
1712    loadPtr(BaseIndex(regT1, regT0, TimesEight, static_cast<unsigned>(sizeof(void*) - 2 * sizeof(Register))), regT3);
1713    storePtr(regT3, BaseIndex(regT2, regT0, TimesEight, static_cast<unsigned>(sizeof(void*) - sizeof(Register))));
1714#endif
1715    branchSubPtr(NonZero, TrustedImm32(1), regT0).linkTo(loopStart, this);
1716    endBranch.link(this);
1717}
1718
1719void JIT::emitSlow_op_load_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1720{
1721    int argCountDst = currentInstruction[1].u.operand;
1722    int argsOffset = currentInstruction[2].u.operand;
1723    int expectedParams = m_codeBlock->m_numParameters - 1;
1724    if (expectedParams)
1725        return;
1726
1727    linkSlowCase(iter);
1728    linkSlowCase(iter);
1729    JITStubCall stubCall(this, cti_op_load_varargs);
1730    stubCall.addArgument(Imm32(argsOffset));
1731    stubCall.call();
1732
1733    store32(TrustedImm32(Int32Tag), intTagFor(argCountDst));
1734    store32(returnValueRegister, intPayloadFor(argCountDst));
1735}
1736
1737void JIT::emit_op_new_func(Instruction* currentInstruction)
1738{
1739    Jump lazyJump;
1740    int dst = currentInstruction[1].u.operand;
1741    if (currentInstruction[3].u.operand) {
1742#if USE(JSVALUE32_64)
1743        lazyJump = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
1744#else
1745        lazyJump = branchTestPtr(NonZero, addressFor(dst));
1746#endif
1747    }
1748    JITStubCall stubCall(this, cti_op_new_func);
1749    stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
1750    stubCall.call(currentInstruction[1].u.operand);
1751    if (currentInstruction[3].u.operand)
1752        lazyJump.link(this);
1753}
1754
1755} // namespace JSC
1756
1757#endif // ENABLE(JIT)
1758