quick_entrypoints_x86.S revision 4adeab196d160f70b4865fb8be048ddd2ac7ab82
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_x86.S"
18
19#include "arch/quick_alloc_entrypoints.S"
20
21// For x86, the CFA is esp+4, the address above the pushed return address on the stack.
22
23    /*
24     * Macro that sets up the callee save frame to conform with
25     * Runtime::CreateCalleeSaveMethod(kSaveAll)
26     */
27MACRO2(SETUP_SAVE_ALL_CALLEE_SAVE_FRAME, got_reg, temp_reg)
28    PUSH edi  // Save callee saves (ebx is saved/restored by the upcall)
29    PUSH esi
30    PUSH ebp
31    subl MACRO_LITERAL(12), %esp  // Grow stack by 3 words.
32    CFI_ADJUST_CFA_OFFSET(12)
33    SETUP_GOT_NOSAVE RAW_VAR(got_reg)
34    // Load Runtime::instance_ from GOT.
35    movl SYMBOL(_ZN3art7Runtime9instance_E)@GOT(REG_VAR(got_reg)), REG_VAR(temp_reg)
36    movl (REG_VAR(temp_reg)), REG_VAR(temp_reg)
37    // Push save all callee-save method.
38    pushl RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET(REG_VAR(temp_reg))
39    CFI_ADJUST_CFA_OFFSET(4)
40    // Store esp as the top quick frame.
41    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
42    // Ugly compile-time check, but we only have the preprocessor.
43    // Last +4: implicit return address pushed on stack when caller made call.
44#if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVE != 3*4 + 16 + 4)
45#error "SAVE_ALL_CALLEE_SAVE_FRAME(X86) size not as expected."
46#endif
47END_MACRO
48
49    /*
50     * Macro that sets up the callee save frame to conform with
51     * Runtime::CreateCalleeSaveMethod(kRefsOnly)
52     */
53MACRO2(SETUP_REFS_ONLY_CALLEE_SAVE_FRAME, got_reg, temp_reg)
54    PUSH edi  // Save callee saves (ebx is saved/restored by the upcall)
55    PUSH esi
56    PUSH ebp
57    subl MACRO_LITERAL(12), %esp  // Grow stack by 3 words.
58    CFI_ADJUST_CFA_OFFSET(12)
59    SETUP_GOT_NOSAVE RAW_VAR(got_reg)
60    // Load Runtime::instance_ from GOT.
61    movl SYMBOL(_ZN3art7Runtime9instance_E)@GOT(REG_VAR(got_reg)), REG_VAR(temp_reg)
62    movl (REG_VAR(temp_reg)), REG_VAR(temp_reg)
63    // Push save all callee-save method.
64    pushl RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET(REG_VAR(temp_reg))
65    CFI_ADJUST_CFA_OFFSET(4)
66    // Store esp as the top quick frame.
67    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
68
69    // Ugly compile-time check, but we only have the preprocessor.
70    // Last +4: implicit return address pushed on stack when caller made call.
71#if (FRAME_SIZE_REFS_ONLY_CALLEE_SAVE != 3*4 + 16 + 4)
72#error "REFS_ONLY_CALLEE_SAVE_FRAME(X86) size not as expected."
73#endif
74END_MACRO
75
76MACRO0(RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME)
77    addl MACRO_LITERAL(16), %esp  // Unwind stack up to saved values
78    CFI_ADJUST_CFA_OFFSET(-16)
79    POP ebp  // Restore callee saves (ebx is saved/restored by the upcall)
80    POP esi
81    POP edi
82END_MACRO
83
84    /*
85     * Macro that sets up the callee save frame to conform with
86     * Runtime::CreateCalleeSaveMethod(kRefsAndArgs)
87     */
88MACRO2(SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME, got_reg, temp_reg)
89    PUSH edi  // Save callee saves
90    PUSH esi
91    PUSH ebp
92    PUSH ebx  // Save args
93    PUSH edx
94    PUSH ecx
95    // Create space for FPR args.
96    subl MACRO_LITERAL(4 * 8), %esp
97    CFI_ADJUST_CFA_OFFSET(4 * 8)
98    // Save FPRs.
99    movsd %xmm0, 0(%esp)
100    movsd %xmm1, 8(%esp)
101    movsd %xmm2, 16(%esp)
102    movsd %xmm3, 24(%esp)
103
104    SETUP_GOT_NOSAVE RAW_VAR(got_reg)
105    // Load Runtime::instance_ from GOT.
106    movl SYMBOL(_ZN3art7Runtime9instance_E)@GOT(REG_VAR(got_reg)), REG_VAR(temp_reg)
107    movl (REG_VAR(temp_reg)), REG_VAR(temp_reg)
108    // Push save all callee-save method.
109    pushl RUNTIME_REFS_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET(REG_VAR(temp_reg))
110    CFI_ADJUST_CFA_OFFSET(4)
111    // Store esp as the stop quick frame.
112    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
113
114    // Ugly compile-time check, but we only have the preprocessor.
115    // Last +4: implicit return address pushed on stack when caller made call.
116#if (FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE != 7*4 + 4*8 + 4)
117#error "REFS_AND_ARGS_CALLEE_SAVE_FRAME(X86) size not as expected."
118#endif
119END_MACRO
120
121    /*
122     * Macro that sets up the callee save frame to conform with
123     * Runtime::CreateCalleeSaveMethod(kRefsAndArgs) where the method is passed in EAX.
124     */
125MACRO0(SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_EAX)
126    // Save callee and GPR args, mixed together to agree with core spills bitmap.
127    PUSH edi  // Save callee saves
128    PUSH esi
129    PUSH ebp
130    PUSH ebx  // Save args
131    PUSH edx
132    PUSH ecx
133
134    // Create space for FPR args.
135    subl MACRO_LITERAL(32), %esp
136    CFI_ADJUST_CFA_OFFSET(32)
137
138    // Save FPRs.
139    movsd %xmm0, 0(%esp)
140    movsd %xmm1, 8(%esp)
141    movsd %xmm2, 16(%esp)
142    movsd %xmm3, 24(%esp)
143
144    PUSH eax  // Store the ArtMethod reference at the bottom of the stack.
145    // Store esp as the stop quick frame.
146    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
147END_MACRO
148
149MACRO0(RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME)
150    // Restore FPRs. EAX is still on the stack.
151    movsd 4(%esp), %xmm0
152    movsd 12(%esp), %xmm1
153    movsd 20(%esp), %xmm2
154    movsd 28(%esp), %xmm3
155
156    addl MACRO_LITERAL(36), %esp  // Remove FPRs and EAX.
157    CFI_ADJUST_CFA_OFFSET(-36)
158
159    POP ecx                       // Restore args except eax
160    POP edx
161    POP ebx
162    POP ebp                       // Restore callee saves
163    POP esi
164    POP edi
165END_MACRO
166
167// Restore register and jump to routine
168// Inputs:  EDI contains pointer to code.
169// Notes: Need to pop EAX too (restores Method*)
170MACRO0(RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME_AND_JUMP)
171    POP eax  // Restore Method*
172
173    // Restore FPRs.
174    movsd 0(%esp), %xmm0
175    movsd 8(%esp), %xmm1
176    movsd 16(%esp), %xmm2
177    movsd 24(%esp), %xmm3
178
179    addl MACRO_LITERAL(32), %esp  // Remove FPRs.
180    CFI_ADJUST_CFA_OFFSET(-32)
181
182    POP ecx  // Restore args except eax
183    POP edx
184    POP ebx
185    POP ebp  // Restore callee saves
186    POP esi
187    xchgl 0(%esp),%edi // restore EDI and place code pointer as only value on stack
188    ret
189END_MACRO
190
191    /*
192     * Macro that set calls through to artDeliverPendingExceptionFromCode, where the pending
193     * exception is Thread::Current()->exception_.
194     */
195MACRO0(DELIVER_PENDING_EXCEPTION)
196    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx  // save callee saves for throw
197    // Outgoing argument set up
198    subl MACRO_LITERAL(12), %esp              // Alignment padding
199    CFI_ADJUST_CFA_OFFSET(12)
200    pushl %fs:THREAD_SELF_OFFSET               // pass Thread::Current()
201    CFI_ADJUST_CFA_OFFSET(4)
202    call SYMBOL(artDeliverPendingExceptionFromCode)  // artDeliverPendingExceptionFromCode(Thread*)
203    UNREACHABLE
204END_MACRO
205
206MACRO2(NO_ARG_RUNTIME_EXCEPTION, c_name, cxx_name)
207    DEFINE_FUNCTION VAR(c_name)
208    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME  ebx, ebx  // save all registers as basis for long jump context
209    // Outgoing argument set up
210    subl MACRO_LITERAL(12), %esp                // alignment padding
211    CFI_ADJUST_CFA_OFFSET(12)
212    pushl %fs:THREAD_SELF_OFFSET                // pass Thread::Current()
213    CFI_ADJUST_CFA_OFFSET(4)
214    call CALLVAR(cxx_name)                      // cxx_name(Thread*)
215    UNREACHABLE
216    END_FUNCTION VAR(c_name)
217END_MACRO
218
219MACRO2(ONE_ARG_RUNTIME_EXCEPTION, c_name, cxx_name)
220    DEFINE_FUNCTION VAR(c_name)
221    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx  // save all registers as basis for long jump context
222    mov %esp, %ecx
223    // Outgoing argument set up
224    subl MACRO_LITERAL(8), %esp               // alignment padding
225    CFI_ADJUST_CFA_OFFSET(8)
226    pushl %fs:THREAD_SELF_OFFSET               // pass Thread::Current()
227    CFI_ADJUST_CFA_OFFSET(4)
228    PUSH eax                                   // pass arg1
229    call CALLVAR(cxx_name)                     // cxx_name(arg1, Thread*)
230    UNREACHABLE
231    END_FUNCTION VAR(c_name)
232END_MACRO
233
234MACRO2(TWO_ARG_RUNTIME_EXCEPTION, c_name, cxx_name)
235    DEFINE_FUNCTION VAR(c_name)
236    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx  // save all registers as basis for long jump context
237    // Outgoing argument set up
238    PUSH eax                                   // alignment padding
239    pushl %fs:THREAD_SELF_OFFSET               // pass Thread::Current()
240    CFI_ADJUST_CFA_OFFSET(4)
241    PUSH ecx                                   // pass arg2
242    PUSH eax                                   // pass arg1
243    call CALLVAR(cxx_name)                     // cxx_name(arg1, arg2, Thread*)
244    UNREACHABLE
245    END_FUNCTION VAR(c_name)
246END_MACRO
247
248    /*
249     * Called by managed code to create and deliver a NullPointerException.
250     */
251NO_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode
252
253    /*
254     * Called by managed code to create and deliver an ArithmeticException.
255     */
256NO_ARG_RUNTIME_EXCEPTION art_quick_throw_div_zero, artThrowDivZeroFromCode
257
258    /*
259     * Called by managed code to create and deliver a StackOverflowError.
260     */
261NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode
262
263    /*
264     * Called by managed code, saves callee saves and then calls artThrowException
265     * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception.
266     */
267ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode
268
269    /*
270     * Called by managed code to create and deliver a NoSuchMethodError.
271     */
272ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method, artThrowNoSuchMethodFromCode
273
274    /*
275     * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds
276     * index, arg2 holds limit.
277     */
278TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds, artThrowArrayBoundsFromCode
279
280    /*
281     * All generated callsites for interface invokes and invocation slow paths will load arguments
282     * as usual - except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
283     * the method_idx.  This wrapper will save arg1-arg3 and call the appropriate C helper.
284     * NOTE: "this" is first visible argument of the target, and so can be found in arg1/r1.
285     *
286     * The helper will attempt to locate the target and return a 64-bit result in r0/r1 consisting
287     * of the target Method* in r0 and method->code_ in r1.
288     *
289     * If unsuccessful, the helper will return null/null will bea pending exception in the
290     * thread and we branch to another stub to deliver it.
291     *
292     * On success this wrapper will restore arguments and *jump* to the target, leaving the lr
293     * pointing back to the original caller.
294     */
295MACRO1(INVOKE_TRAMPOLINE_BODY, cxx_name)
296    SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME ebx, ebx
297    movl %esp, %edx  // remember SP
298
299    // Outgoing argument set up
300    PUSH edx                      // pass SP
301    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
302    CFI_ADJUST_CFA_OFFSET(4)
303    PUSH ecx                      // pass arg2
304    PUSH eax                      // pass arg1
305    call CALLVAR(cxx_name)        // cxx_name(arg1, arg2, Thread*, SP)
306    movl %edx, %edi               // save code pointer in EDI
307    addl MACRO_LITERAL(20), %esp  // Pop arguments skip eax
308    CFI_ADJUST_CFA_OFFSET(-20)
309
310    // Restore FPRs.
311    movsd 0(%esp), %xmm0
312    movsd 8(%esp), %xmm1
313    movsd 16(%esp), %xmm2
314    movsd 24(%esp), %xmm3
315
316    // Remove space for FPR args.
317    addl MACRO_LITERAL(4 * 8), %esp
318    CFI_ADJUST_CFA_OFFSET(-4 * 8)
319
320    POP ecx  // Restore args except eax
321    POP edx
322    POP ebx
323    POP ebp  // Restore callee saves
324    POP esi
325    // Swap EDI callee save with code pointer.
326    xchgl %edi, (%esp)
327    testl %eax, %eax              // Branch forward if exception pending.
328    jz    1f
329    // Tail call to intended method.
330    ret
3311:
332    addl MACRO_LITERAL(4), %esp   // Pop code pointer off stack
333    CFI_ADJUST_CFA_OFFSET(-4)
334    DELIVER_PENDING_EXCEPTION
335END_MACRO
336MACRO2(INVOKE_TRAMPOLINE, c_name, cxx_name)
337    DEFINE_FUNCTION VAR(c_name)
338    INVOKE_TRAMPOLINE_BODY RAW_VAR(cxx_name)
339    END_FUNCTION VAR(c_name)
340END_MACRO
341
342INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck
343
344INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck
345INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck
346INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck
347INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck
348
349    /*
350     * Helper for quick invocation stub to set up XMM registers.
351     * Increments shorty and arg_array and clobbers temp_char.
352     * Branches to finished if it encounters the end of the shorty.
353     */
354MACRO5(LOOP_OVER_SHORTY_LOADING_XMMS, xmm_reg, shorty, arg_array, temp_char, finished)
3551: // LOOP
356    movb (REG_VAR(shorty)), REG_VAR(temp_char)     // temp_char := *shorty
357    addl MACRO_LITERAL(1), REG_VAR(shorty)         // shorty++
358    cmpb MACRO_LITERAL(0), REG_VAR(temp_char)      // if (temp_char == '\0')
359    je VAR(finished)                               //   goto finished
360    cmpb MACRO_LITERAL(68), REG_VAR(temp_char)     // if (temp_char == 'D')
361    je 2f                                          //   goto FOUND_DOUBLE
362    cmpb MACRO_LITERAL(70), REG_VAR(temp_char)     // if (temp_char == 'F')
363    je 3f                                          //   goto FOUND_FLOAT
364    addl MACRO_LITERAL(4), REG_VAR(arg_array)      // arg_array++
365    //  Handle extra space in arg array taken by a long.
366    cmpb MACRO_LITERAL(74), REG_VAR(temp_char)     // if (temp_char != 'J')
367    jne 1b                                         //   goto LOOP
368    addl MACRO_LITERAL(4), REG_VAR(arg_array)      // arg_array++
369    jmp 1b                                         // goto LOOP
3702:  // FOUND_DOUBLE
371    movsd (REG_VAR(arg_array)), REG_VAR(xmm_reg)
372    addl MACRO_LITERAL(8), REG_VAR(arg_array)      // arg_array+=2
373    jmp 4f
3743:  // FOUND_FLOAT
375    movss (REG_VAR(arg_array)), REG_VAR(xmm_reg)
376    addl MACRO_LITERAL(4), REG_VAR(arg_array)      // arg_array++
3774:
378END_MACRO
379
380    /*
381     * Helper for quick invocation stub to set up GPR registers.
382     * Increments shorty and arg_array, and returns the current short character in
383     * temp_char. Branches to finished if it encounters the end of the shorty.
384     */
385MACRO4(SKIP_OVER_FLOATS, shorty, arg_array, temp_char, finished)
3861: // LOOP:
387    movb (REG_VAR(shorty)), REG_VAR(temp_char)     // temp_char := *shorty
388    addl MACRO_LITERAL(1), REG_VAR(shorty)         // shorty++
389    cmpb MACRO_LITERAL(0), REG_VAR(temp_char)      // if (temp_char == '\0')
390    je VAR(finished)                               //   goto finished
391    cmpb MACRO_LITERAL(70), REG_VAR(temp_char)     // if (temp_char == 'F')
392    je 3f                                          //   goto SKIP_FLOAT
393    cmpb MACRO_LITERAL(68), REG_VAR(temp_char)     // if (temp_char == 'D')
394    je 4f                                          //   goto SKIP_DOUBLE
395    jmp 5f                                         // goto end
3963:  // SKIP_FLOAT
397    addl MACRO_LITERAL(4), REG_VAR(arg_array)      // arg_array++
398    jmp 1b                                         // goto LOOP
3994:  // SKIP_DOUBLE
400    addl MACRO_LITERAL(8), REG_VAR(arg_array)      // arg_array+=2
401    jmp 1b                                         // goto LOOP
4025:
403END_MACRO
404
405  /*
406     * Quick invocation stub (non-static).
407     * On entry:
408     *   [sp] = return address
409     *   [sp + 4] = method pointer
410     *   [sp + 8] = argument array or null for no argument methods
411     *   [sp + 12] = size of argument array in bytes
412     *   [sp + 16] = (managed) thread pointer
413     *   [sp + 20] = JValue* result
414     *   [sp + 24] = shorty
415     */
416DEFINE_FUNCTION art_quick_invoke_stub
417    // Save the non-volatiles.
418    PUSH ebp                      // save ebp
419    PUSH ebx                      // save ebx
420    PUSH esi                      // save esi
421    PUSH edi                      // save edi
422    // Set up argument XMM registers.
423    mov 24+16(%esp), %esi         // ESI := shorty + 1  ; ie skip return arg character.
424    addl LITERAL(1), %esi
425    mov 8+16(%esp), %edi          // EDI := arg_array + 4 ; ie skip this pointer.
426    addl LITERAL(4), %edi
427    // Clobbers ESI, EDI, EAX.
428    LOOP_OVER_SHORTY_LOADING_XMMS xmm0, esi, edi, al, .Lxmm_setup_finished
429    LOOP_OVER_SHORTY_LOADING_XMMS xmm1, esi, edi, al, .Lxmm_setup_finished
430    LOOP_OVER_SHORTY_LOADING_XMMS xmm2, esi, edi, al, .Lxmm_setup_finished
431    LOOP_OVER_SHORTY_LOADING_XMMS xmm3, esi, edi, al, .Lxmm_setup_finished
432    .balign 16
433.Lxmm_setup_finished:
434    mov %esp, %ebp                // copy value of stack pointer into base pointer
435    CFI_DEF_CFA_REGISTER(ebp)
436    mov 28(%ebp), %ebx            // get arg array size
437    // reserve space for return addr, method*, ebx, ebp, esi, and edi in frame
438    addl LITERAL(36), %ebx
439    // align frame size to 16 bytes
440    andl LITERAL(0xFFFFFFF0), %ebx
441    subl LITERAL(20), %ebx        // remove space for return address, ebx, ebp, esi and edi
442    subl %ebx, %esp               // reserve stack space for argument array
443
444    movl LITERAL(0), (%esp)       // store null for method*
445
446    // Copy arg array into stack.
447    movl 28(%ebp), %ecx           // ECX = size of args
448    movl 24(%ebp), %esi           // ESI = argument array
449    leal 4(%esp), %edi            // EDI = just after Method* in stack arguments
450    rep movsb                     // while (ecx--) { *edi++ = *esi++ }
451
452    mov 40(%ebp), %esi            // ESI := shorty + 1  ; ie skip return arg character.
453    addl LITERAL(1), %esi
454    mov 24(%ebp), %edi            // EDI := arg_array
455    mov 0(%edi), %ecx             // ECX := this pointer
456    addl LITERAL(4), %edi         // EDI := arg_array + 4 ; ie skip this pointer.
457
458    // Enumerate the possible cases for loading GPRS.
459    // edx (and maybe ebx):
460    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished
461    cmpb LITERAL(74), %al         // if (al == 'J') goto FOUND_LONG
462    je .LfirstLong
463    // Must be an integer value.
464    movl (%edi), %edx
465    addl LITERAL(4), %edi         // arg_array++
466
467    // Now check ebx
468    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished
469    // Must be first word of a long, or an integer. First word of long doesn't
470    // go into EBX, but can be loaded there anyways, as it is harmless.
471    movl (%edi), %ebx
472    jmp .Lgpr_setup_finished
473.LfirstLong:
474    movl (%edi), %edx
475    movl 4(%edi), %ebx
476    // Nothing left to load.
477.Lgpr_setup_finished:
478    mov 20(%ebp), %eax            // move method pointer into eax
479    call *ART_METHOD_QUICK_CODE_OFFSET_32(%eax) // call the method
480    mov %ebp, %esp                // restore stack pointer
481    CFI_DEF_CFA_REGISTER(esp)
482    POP edi                       // pop edi
483    POP esi                       // pop esi
484    POP ebx                       // pop ebx
485    POP ebp                       // pop ebp
486    mov 20(%esp), %ecx            // get result pointer
487    mov %eax, (%ecx)              // store the result assuming its a long, int or Object*
488    mov %edx, 4(%ecx)             // store the other half of the result
489    mov 24(%esp), %edx            // get the shorty
490    cmpb LITERAL(68), (%edx)      // test if result type char == 'D'
491    je .Lreturn_double_quick
492    cmpb LITERAL(70), (%edx)      // test if result type char == 'F'
493    je .Lreturn_float_quick
494    ret
495.Lreturn_double_quick:
496    movsd %xmm0, (%ecx)           // store the floating point result
497    ret
498.Lreturn_float_quick:
499    movss %xmm0, (%ecx)           // store the floating point result
500    ret
501END_FUNCTION art_quick_invoke_stub
502
503  /*
504     * Quick invocation stub (static).
505     * On entry:
506     *   [sp] = return address
507     *   [sp + 4] = method pointer
508     *   [sp + 8] = argument array or null for no argument methods
509     *   [sp + 12] = size of argument array in bytes
510     *   [sp + 16] = (managed) thread pointer
511     *   [sp + 20] = JValue* result
512     *   [sp + 24] = shorty
513     */
514DEFINE_FUNCTION art_quick_invoke_static_stub
515    // Save the non-volatiles.
516    PUSH ebp                      // save ebp
517    PUSH ebx                      // save ebx
518    PUSH esi                      // save esi
519    PUSH edi                      // save edi
520    // Set up argument XMM registers.
521    mov 24+16(%esp), %esi         // ESI := shorty + 1  ; ie skip return arg character.
522    addl LITERAL(1), %esi
523    mov 8+16(%esp), %edi          // EDI := arg_array
524    // Clobbers ESI, EDI, EAX.
525    LOOP_OVER_SHORTY_LOADING_XMMS xmm0, esi, edi, al, .Lxmm_setup_finished2
526    LOOP_OVER_SHORTY_LOADING_XMMS xmm1, esi, edi, al, .Lxmm_setup_finished2
527    LOOP_OVER_SHORTY_LOADING_XMMS xmm2, esi, edi, al, .Lxmm_setup_finished2
528    LOOP_OVER_SHORTY_LOADING_XMMS xmm3, esi, edi, al, .Lxmm_setup_finished2
529    .balign 16
530.Lxmm_setup_finished2:
531    mov %esp, %ebp                // copy value of stack pointer into base pointer
532    CFI_DEF_CFA_REGISTER(ebp)
533    mov 28(%ebp), %ebx            // get arg array size
534    // reserve space for return addr, method*, ebx, ebp, esi, and edi in frame
535    addl LITERAL(36), %ebx
536    // align frame size to 16 bytes
537    andl LITERAL(0xFFFFFFF0), %ebx
538    subl LITERAL(20), %ebx        // remove space for return address, ebx, ebp, esi and edi
539    subl %ebx, %esp               // reserve stack space for argument array
540
541    movl LITERAL(0), (%esp)       // store null for method*
542
543    // Copy arg array into stack.
544    movl 28(%ebp), %ecx           // ECX = size of args
545    movl 24(%ebp), %esi           // ESI = argument array
546    leal 4(%esp), %edi            // EDI = just after Method* in stack arguments
547    rep movsb                     // while (ecx--) { *edi++ = *esi++ }
548
549    mov 40(%ebp), %esi            // ESI := shorty + 1  ; ie skip return arg character.
550    addl LITERAL(1), %esi
551    mov 24(%ebp), %edi            // EDI := arg_array
552
553    // Enumerate the possible cases for loading GPRS.
554    // ecx (and maybe edx)
555    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished2
556    cmpb LITERAL(74), %al         // if (al == 'J') goto FOUND_LONG
557    je .LfirstLong2
558    // Must be an integer value.  Load into ECX.
559    movl (%edi), %ecx
560    addl LITERAL(4), %edi         // arg_array++
561
562    // Now check edx (and maybe ebx).
563    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished2
564    cmpb LITERAL(74), %al         // if (al == 'J') goto FOUND_LONG
565    je .LSecondLong2
566    // Must be an integer.  Load into EDX.
567    movl (%edi), %edx
568    addl LITERAL(4), %edi         // arg_array++
569
570    // Is there anything for ebx?
571    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished2
572    // Must be first word of a long, or an integer. First word of long doesn't
573    // go into EBX, but can be loaded there anyways, as it is harmless.
574    movl (%edi), %ebx
575    jmp .Lgpr_setup_finished2
576.LSecondLong2:
577    // EDX:EBX is long.  That is all.
578    movl (%edi), %edx
579    movl 4(%edi), %ebx
580    jmp .Lgpr_setup_finished2
581.LfirstLong2:
582    // ECX:EDX is a long
583    movl (%edi), %ecx
584    movl 4(%edi), %edx
585    addl LITERAL(8), %edi         // arg_array += 2
586
587    // Anything for EBX?
588    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished2
589    // Must be first word of a long, or an integer. First word of long doesn't
590    // go into EBX, but can be loaded there anyways, as it is harmless.
591    movl (%edi), %ebx
592    jmp .Lgpr_setup_finished2
593    // Nothing left to load.
594.Lgpr_setup_finished2:
595    mov 20(%ebp), %eax            // move method pointer into eax
596    call *ART_METHOD_QUICK_CODE_OFFSET_32(%eax) // call the method
597    mov %ebp, %esp                // restore stack pointer
598    CFI_DEF_CFA_REGISTER(esp)
599    POP edi                       // pop edi
600    POP esi                       // pop esi
601    POP ebx                       // pop ebx
602    POP ebp                       // pop ebp
603    mov 20(%esp), %ecx            // get result pointer
604    mov %eax, (%ecx)              // store the result assuming its a long, int or Object*
605    mov %edx, 4(%ecx)             // store the other half of the result
606    mov 24(%esp), %edx            // get the shorty
607    cmpb LITERAL(68), (%edx)      // test if result type char == 'D'
608    je .Lreturn_double_quick2
609    cmpb LITERAL(70), (%edx)      // test if result type char == 'F'
610    je .Lreturn_float_quick2
611    ret
612.Lreturn_double_quick2:
613    movsd %xmm0, (%ecx)           // store the floating point result
614    ret
615.Lreturn_float_quick2:
616    movss %xmm0, (%ecx)           // store the floating point result
617    ret
618END_FUNCTION art_quick_invoke_static_stub
619
620MACRO3(NO_ARG_DOWNCALL, c_name, cxx_name, return_macro)
621    DEFINE_FUNCTION VAR(c_name)
622    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx  // save ref containing registers for GC
623    // Outgoing argument set up
624    subl MACRO_LITERAL(12), %esp                // push padding
625    CFI_ADJUST_CFA_OFFSET(12)
626    pushl %fs:THREAD_SELF_OFFSET                // pass Thread::Current()
627    CFI_ADJUST_CFA_OFFSET(4)
628    call CALLVAR(cxx_name)                      // cxx_name(Thread*)
629    addl MACRO_LITERAL(16), %esp                // pop arguments
630    CFI_ADJUST_CFA_OFFSET(-16)
631    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME         // restore frame up to return address
632    CALL_MACRO(return_macro)                    // return or deliver exception
633    END_FUNCTION VAR(c_name)
634END_MACRO
635
636MACRO3(ONE_ARG_DOWNCALL, c_name, cxx_name, return_macro)
637    DEFINE_FUNCTION VAR(c_name)
638    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME  ebx, ebx  // save ref containing registers for GC
639    // Outgoing argument set up
640    subl MACRO_LITERAL(8), %esp                  // push padding
641    CFI_ADJUST_CFA_OFFSET(8)
642    pushl %fs:THREAD_SELF_OFFSET                 // pass Thread::Current()
643    CFI_ADJUST_CFA_OFFSET(4)
644    PUSH eax                                     // pass arg1
645    call CALLVAR(cxx_name)                       // cxx_name(arg1, Thread*)
646    addl MACRO_LITERAL(16), %esp                 // pop arguments
647    CFI_ADJUST_CFA_OFFSET(-16)
648    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME          // restore frame up to return address
649    CALL_MACRO(return_macro)                     // return or deliver exception
650    END_FUNCTION VAR(c_name)
651END_MACRO
652
653MACRO3(TWO_ARG_DOWNCALL, c_name, cxx_name, return_macro)
654    DEFINE_FUNCTION VAR(c_name)
655    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME  ebx, ebx  // save ref containing registers for GC
656    // Outgoing argument set up
657    PUSH eax                                     // push padding
658    pushl %fs:THREAD_SELF_OFFSET                 // pass Thread::Current()
659    CFI_ADJUST_CFA_OFFSET(4)
660    PUSH ecx                                     // pass arg2
661    PUSH eax                                     // pass arg1
662    call CALLVAR(cxx_name)                       // cxx_name(arg1, arg2, Thread*)
663    addl MACRO_LITERAL(16), %esp                 // pop arguments
664    CFI_ADJUST_CFA_OFFSET(-16)
665    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME          // restore frame up to return address
666    CALL_MACRO(return_macro)                     // return or deliver exception
667    END_FUNCTION VAR(c_name)
668END_MACRO
669
670MACRO3(THREE_ARG_DOWNCALL, c_name, cxx_name, return_macro)
671    DEFINE_FUNCTION VAR(c_name)
672    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME  ebx, ebx  // save ref containing registers for GC
673    // Outgoing argument set up
674    pushl %fs:THREAD_SELF_OFFSET                 // pass Thread::Current()
675    CFI_ADJUST_CFA_OFFSET(4)
676    PUSH edx                                     // pass arg3
677    PUSH ecx                                     // pass arg2
678    PUSH eax                                     // pass arg1
679    call CALLVAR(cxx_name)                       // cxx_name(arg1, arg2, arg3, Thread*)
680    addl MACRO_LITERAL(16), %esp                 // pop arguments
681    CFI_ADJUST_CFA_OFFSET(-16)
682    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME          // restore frame up to return address
683    CALL_MACRO(return_macro)                     // return or deliver exception
684    END_FUNCTION VAR(c_name)
685END_MACRO
686
687MACRO3(FOUR_ARG_DOWNCALL, c_name, cxx_name, return_macro)
688    DEFINE_FUNCTION VAR(c_name)
689    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME  ebx, ebx  // save ref containing registers for GC
690    // Outgoing argument set up
691    subl MACRO_LITERAL(12), %esp                 // alignment padding
692    CFI_ADJUST_CFA_OFFSET(12)
693    pushl %fs:THREAD_SELF_OFFSET                 // pass Thread::Current()
694    CFI_ADJUST_CFA_OFFSET(4)
695    PUSH ebx                                     // pass arg4
696    PUSH edx                                     // pass arg3
697    PUSH ecx                                     // pass arg2
698    PUSH eax                                     // pass arg1
699    call CALLVAR(cxx_name)                       // cxx_name(arg1, arg2, arg3, arg4, Thread*)
700    addl MACRO_LITERAL(32), %esp                 // pop arguments
701    CFI_ADJUST_CFA_OFFSET(-32)
702    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME          // restore frame up to return address
703    CALL_MACRO(return_macro)                     // return or deliver exception
704    END_FUNCTION VAR(c_name)
705END_MACRO
706
707MACRO3(ONE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
708    DEFINE_FUNCTION VAR(c_name)
709    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME  ebx, ebx       // save ref containing registers for GC
710    // Outgoing argument set up
711    mov FRAME_SIZE_REFS_ONLY_CALLEE_SAVE(%esp), %ecx  // get referrer
712    PUSH eax                                          // push padding
713    pushl %fs:THREAD_SELF_OFFSET                      // pass Thread::Current()
714    CFI_ADJUST_CFA_OFFSET(4)
715    PUSH ecx                                          // pass referrer
716    PUSH eax                                          // pass arg1
717    call CALLVAR(cxx_name)                            // cxx_name(arg1, referrer, Thread*)
718    addl MACRO_LITERAL(16), %esp                      // pop arguments
719    CFI_ADJUST_CFA_OFFSET(-16)
720    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME               // restore frame up to return address
721    CALL_MACRO(return_macro)                          // return or deliver exception
722    END_FUNCTION VAR(c_name)
723END_MACRO
724
725MACRO3(TWO_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
726    DEFINE_FUNCTION VAR(c_name)
727    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx        // save ref containing registers for GC
728    // Outgoing argument set up
729    mov FRAME_SIZE_REFS_ONLY_CALLEE_SAVE(%esp), %edx  // get referrer
730    pushl %fs:THREAD_SELF_OFFSET                      // pass Thread::Current()
731    CFI_ADJUST_CFA_OFFSET(4)
732    PUSH edx                                          // pass referrer
733    PUSH ecx                                          // pass arg2
734    PUSH eax                                          // pass arg1
735    call CALLVAR(cxx_name)                            // cxx_name(arg1, arg2, referrer, Thread*)
736    addl MACRO_LITERAL(16), %esp                      // pop arguments
737    CFI_ADJUST_CFA_OFFSET(-16)
738    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME               // restore frame up to return address
739    CALL_MACRO(return_macro)                          // return or deliver exception
740    END_FUNCTION VAR(c_name)
741END_MACRO
742
743MACRO3(THREE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
744    DEFINE_FUNCTION VAR(c_name)
745    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx        // save ref containing registers for GC
746    // Outgoing argument set up
747    mov FRAME_SIZE_REFS_ONLY_CALLEE_SAVE(%esp), %ebx  // get referrer
748    subl MACRO_LITERAL(12), %esp                      // alignment padding
749    CFI_ADJUST_CFA_OFFSET(12)
750    pushl %fs:THREAD_SELF_OFFSET                      // pass Thread::Current()
751    CFI_ADJUST_CFA_OFFSET(4)
752    PUSH ebx                                          // pass referrer
753    PUSH edx                                          // pass arg3
754    PUSH ecx                                          // pass arg2
755    PUSH eax                                          // pass arg1
756    call CALLVAR(cxx_name)                            // cxx_name(arg1, arg2, arg3, referrer,
757                                                      //          Thread*)
758    addl LITERAL(32), %esp                            // pop arguments
759    CFI_ADJUST_CFA_OFFSET(-32)
760    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME               // restore frame up to return address
761    CALL_MACRO(return_macro)                          // return or deliver exception
762    END_FUNCTION VAR(c_name)
763END_MACRO
764
765MACRO0(RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER)
766    testl %eax, %eax               // eax == 0 ?
767    jz  1f                         // if eax == 0 goto 1
768    ret                            // return
7691:                                 // deliver exception on current thread
770    DELIVER_PENDING_EXCEPTION
771END_MACRO
772
773MACRO0(RETURN_IF_EAX_ZERO)
774    testl %eax, %eax               // eax == 0 ?
775    jnz  1f                        // if eax != 0 goto 1
776    ret                            // return
7771:                                 // deliver exception on current thread
778    DELIVER_PENDING_EXCEPTION
779END_MACRO
780
781MACRO0(RETURN_OR_DELIVER_PENDING_EXCEPTION)
782    cmpl MACRO_LITERAL(0),%fs:THREAD_EXCEPTION_OFFSET // exception field == 0 ?
783    jne 1f                                            // if exception field != 0 goto 1
784    ret                                               // return
7851:                                                    // deliver exception on current thread
786    DELIVER_PENDING_EXCEPTION
787END_MACRO
788
789// Generate the allocation entrypoints for each allocator.
790GENERATE_ALLOC_ENTRYPOINTS_FOR_EACH_ALLOCATOR
791GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_tlab, TLAB)
792
793ONE_ARG_DOWNCALL art_quick_resolve_string, artResolveStringFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
794ONE_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
795ONE_ARG_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
796ONE_ARG_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
797
798TWO_ARG_REF_DOWNCALL art_quick_handle_fill_data, artHandleFillArrayDataFromCode, RETURN_IF_EAX_ZERO
799
800DEFINE_FUNCTION art_quick_lock_object
801    testl %eax, %eax                      // null check object/eax
802    jz   .Lslow_lock
803.Lretry_lock:
804    movl MIRROR_OBJECT_LOCK_WORD_OFFSET(%eax), %ecx  // ecx := lock word
805    test LITERAL(LOCK_WORD_STATE_MASK), %ecx         // test the 2 high bits.
806    jne  .Lslow_lock                      // slow path if either of the two high bits are set.
807    movl %ecx, %edx                       // save lock word (edx) to keep read barrier bits.
808    andl LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED), %ecx  // zero the read barrier bits.
809    test %ecx, %ecx
810    jnz  .Lalready_thin                   // lock word contains a thin lock
811    // unlocked case - edx: original lock word, eax: obj.
812    movl %eax, %ecx                       // remember object in case of retry
813    movl %edx, %eax                       // eax: lock word zero except for read barrier bits.
814    movl %fs:THREAD_ID_OFFSET, %edx       // load thread id.
815    or   %eax, %edx                       // edx: thread id with count of 0 + read barrier bits.
816    lock cmpxchg  %edx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%ecx)  // eax: old val, edx: new val.
817    jnz  .Llock_cmpxchg_fail              // cmpxchg failed retry
818    ret
819.Lalready_thin:  // edx: lock word (with high 2 bits zero and original rb bits), eax: obj.
820    movl %fs:THREAD_ID_OFFSET, %ecx       // ecx := thread id
821    cmpw %cx, %dx                         // do we hold the lock already?
822    jne  .Lslow_lock
823    movl %edx, %ecx                       // copy the lock word to check count overflow.
824    andl LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED), %ecx  // zero the read barrier bits.
825    addl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %ecx  // increment recursion count for overflow check.
826    test LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK), %ecx  // overflowed if either of the upper two bits (28-29) are set.
827    jne  .Lslow_lock                      // count overflowed so go slow
828    movl %eax, %ecx                       // save obj to use eax for cmpxchg.
829    movl %edx, %eax                       // copy the lock word as the old val for cmpxchg.
830    addl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %edx  // increment recursion count again for real.
831    // update lockword, cmpxchg necessary for read barrier bits.
832    lock cmpxchg  %edx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%ecx)  // eax: old val, edx: new val.
833    jnz  .Llock_cmpxchg_fail              // cmpxchg failed retry
834    ret
835.Llock_cmpxchg_fail:
836    movl  %ecx, %eax                      // restore eax
837    jmp  .Lretry_lock
838.Lslow_lock:
839    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME  ebx, ebx  // save ref containing registers for GC
840    // Outgoing argument set up
841    subl LITERAL(8), %esp                 // alignment padding
842    CFI_ADJUST_CFA_OFFSET(8)
843    pushl %fs:THREAD_SELF_OFFSET          // pass Thread::Current()
844    CFI_ADJUST_CFA_OFFSET(4)
845    PUSH eax                              // pass object
846    call SYMBOL(artLockObjectFromCode)    // artLockObjectFromCode(object, Thread*)
847    addl LITERAL(16), %esp                // pop arguments
848    CFI_ADJUST_CFA_OFFSET(-16)
849    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME   // restore frame up to return address
850    RETURN_IF_EAX_ZERO
851END_FUNCTION art_quick_lock_object
852
853DEFINE_FUNCTION art_quick_unlock_object
854    testl %eax, %eax                      // null check object/eax
855    jz   .Lslow_unlock
856.Lretry_unlock:
857    movl MIRROR_OBJECT_LOCK_WORD_OFFSET(%eax), %ecx  // ecx := lock word
858    movl %fs:THREAD_ID_OFFSET, %edx       // edx := thread id
859    test LITERAL(LOCK_WORD_STATE_MASK), %ecx
860    jnz  .Lslow_unlock                    // lock word contains a monitor
861    cmpw %cx, %dx                         // does the thread id match?
862    jne  .Lslow_unlock
863    movl %ecx, %edx                       // copy the lock word to detect new count of 0.
864    andl LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED), %edx  // zero the read barrier bits.
865    cmpl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %edx
866    jae  .Lrecursive_thin_unlock
867    // update lockword, cmpxchg necessary for read barrier bits.
868    movl %eax, %edx                       // edx: obj
869    movl %ecx, %eax                       // eax: old lock word.
870    andl LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK), %ecx  // ecx: new lock word zero except original rb bits.
871#ifndef USE_READ_BARRIER
872    movl %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edx)
873#else
874    lock cmpxchg  %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edx)  // eax: old val, ecx: new val.
875    jnz  .Lunlock_cmpxchg_fail            // cmpxchg failed retry
876#endif
877    ret
878.Lrecursive_thin_unlock:  // ecx: original lock word, eax: obj
879    // update lockword, cmpxchg necessary for read barrier bits.
880    movl %eax, %edx                       // edx: obj
881    movl %ecx, %eax                       // eax: old lock word.
882    subl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %ecx  // ecx: new lock word with decremented count.
883#ifndef USE_READ_BARRIER
884    mov  %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edx)
885#else
886    lock cmpxchg  %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edx)  // eax: old val, ecx: new val.
887    jnz  .Lunlock_cmpxchg_fail            // cmpxchg failed retry
888#endif
889    ret
890.Lunlock_cmpxchg_fail:  // edx: obj
891    movl %edx, %eax                       // restore eax
892    jmp  .Lretry_unlock
893.Lslow_unlock:
894    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME  ebx, ebx  // save ref containing registers for GC
895    // Outgoing argument set up
896    subl LITERAL(8), %esp                 // alignment padding
897    CFI_ADJUST_CFA_OFFSET(8)
898    pushl %fs:THREAD_SELF_OFFSET          // pass Thread::Current()
899    CFI_ADJUST_CFA_OFFSET(4)
900    PUSH eax                              // pass object
901    call SYMBOL(artUnlockObjectFromCode)  // artUnlockObjectFromCode(object, Thread*)
902    addl LITERAL(16), %esp                // pop arguments
903    CFI_ADJUST_CFA_OFFSET(-16)
904    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME   // restore frame up to return address
905    RETURN_IF_EAX_ZERO
906END_FUNCTION art_quick_unlock_object
907
908DEFINE_FUNCTION art_quick_is_assignable
909    PUSH eax                              // alignment padding
910    PUSH ecx                              // pass arg2 - obj->klass
911    PUSH eax                              // pass arg1 - checked class
912    call SYMBOL(artIsAssignableFromCode)  // (Class* klass, Class* ref_klass)
913    addl LITERAL(12), %esp                // pop arguments
914    CFI_ADJUST_CFA_OFFSET(-12)
915    ret
916END_FUNCTION art_quick_is_assignable
917
918DEFINE_FUNCTION art_quick_check_cast
919    PUSH eax                              // alignment padding
920    PUSH ecx                              // pass arg2 - obj->klass
921    PUSH eax                              // pass arg1 - checked class
922    call SYMBOL(artIsAssignableFromCode)  // (Class* klass, Class* ref_klass)
923    testl %eax, %eax
924    jz 1f                                 // jump forward if not assignable
925    addl LITERAL(12), %esp                // pop arguments
926    CFI_ADJUST_CFA_OFFSET(-12)
927    ret
928
929    CFI_ADJUST_CFA_OFFSET(12)             // Reset unwind info so following code unwinds.
9301:
931    POP eax                               // pop arguments
932    POP ecx
933    addl LITERAL(4), %esp
934    CFI_ADJUST_CFA_OFFSET(-4)
935    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME  ebx, ebx  // save all registers as basis for long jump context
936    // Outgoing argument set up
937    PUSH eax                              // alignment padding
938    pushl %fs:THREAD_SELF_OFFSET          // pass Thread::Current()
939    CFI_ADJUST_CFA_OFFSET(4)
940    PUSH ecx                              // pass arg2
941    PUSH eax                              // pass arg1
942    call SYMBOL(artThrowClassCastException) // (Class* a, Class* b, Thread*)
943    UNREACHABLE
944END_FUNCTION art_quick_check_cast
945
946// Restore reg's value if reg is not the same as exclude_reg, otherwise just adjust stack.
947MACRO2(POP_REG_NE, reg, exclude_reg)
948    .ifc RAW_VAR(reg), RAW_VAR(exclude_reg)
949      addl MACRO_LITERAL(4), %esp
950      CFI_ADJUST_CFA_OFFSET(-4)
951    .else
952      POP RAW_VAR(reg)
953    .endif
954END_MACRO
955
956    /*
957     * Macro to insert read barrier, only used in art_quick_aput_obj.
958     * obj_reg and dest_reg are registers, offset is a defined literal such as
959     * MIRROR_OBJECT_CLASS_OFFSET.
960     * pop_eax is a boolean flag, indicating if eax is popped after the call.
961     * TODO: When read barrier has a fast path, add heap unpoisoning support for the fast path.
962     */
963MACRO4(READ_BARRIER, obj_reg, offset, dest_reg, pop_eax)
964#ifdef USE_READ_BARRIER
965    PUSH eax                        // save registers used in art_quick_aput_obj
966    PUSH ebx
967    PUSH edx
968    PUSH ecx
969    // Outgoing argument set up
970    pushl MACRO_LITERAL((RAW_VAR(offset)))  // pass offset, double parentheses are necessary
971    CFI_ADJUST_CFA_OFFSET(4)
972    PUSH RAW_VAR(obj_reg)           // pass obj_reg
973    PUSH eax                        // pass ref, just pass eax for now since parameter ref is unused
974    call SYMBOL(artReadBarrierSlow) // artReadBarrierSlow(ref, obj_reg, offset)
975    // No need to unpoison return value in eax, artReadBarrierSlow() would do the unpoisoning.
976    .ifnc RAW_VAR(dest_reg), eax
977      movl %eax, REG_VAR(dest_reg)  // save loaded ref in dest_reg
978    .endif
979    addl MACRO_LITERAL(12), %esp    // pop arguments
980    CFI_ADJUST_CFA_OFFSET(-12)
981    POP_REG_NE ecx, RAW_VAR(dest_reg) // Restore args except dest_reg
982    POP_REG_NE edx, RAW_VAR(dest_reg)
983    POP_REG_NE ebx, RAW_VAR(dest_reg)
984    .ifc RAW_VAR(pop_eax), true
985      POP_REG_NE eax, RAW_VAR(dest_reg)
986    .endif
987#else
988    movl RAW_VAR(offset)(REG_VAR(obj_reg)), REG_VAR(dest_reg)
989    UNPOISON_HEAP_REF RAW_VAR(dest_reg)
990#endif  // USE_READ_BARRIER
991END_MACRO
992
993    /*
994     * Entry from managed code for array put operations of objects where the value being stored
995     * needs to be checked for compatibility.
996     * eax = array, ecx = index, edx = value
997     */
998DEFINE_FUNCTION art_quick_aput_obj_with_null_and_bound_check
999    testl %eax, %eax
1000    jnz SYMBOL(art_quick_aput_obj_with_bound_check)
1001    jmp SYMBOL(art_quick_throw_null_pointer_exception)
1002END_FUNCTION art_quick_aput_obj_with_null_and_bound_check
1003
1004DEFINE_FUNCTION art_quick_aput_obj_with_bound_check
1005    movl MIRROR_ARRAY_LENGTH_OFFSET(%eax), %ebx
1006    cmpl %ebx, %ecx
1007    jb SYMBOL(art_quick_aput_obj)
1008    mov %ecx, %eax
1009    mov %ebx, %ecx
1010    jmp SYMBOL(art_quick_throw_array_bounds)
1011END_FUNCTION art_quick_aput_obj_with_bound_check
1012
1013DEFINE_FUNCTION art_quick_aput_obj
1014    test %edx, %edx              // store of null
1015    jz .Ldo_aput_null
1016    READ_BARRIER eax, MIRROR_OBJECT_CLASS_OFFSET, ebx, true
1017    READ_BARRIER ebx, MIRROR_CLASS_COMPONENT_TYPE_OFFSET, ebx, true
1018    // value's type == array's component type - trivial assignability
1019#if defined(USE_READ_BARRIER)
1020    READ_BARRIER edx, MIRROR_OBJECT_CLASS_OFFSET, eax, false
1021    cmpl %eax, %ebx
1022    POP eax                      // restore eax from the push in the beginning of READ_BARRIER macro
1023    // This asymmetric push/pop saves a push of eax and maintains stack alignment.
1024#elif defined(USE_HEAP_POISONING)
1025    PUSH eax                     // save eax
1026    movl MIRROR_OBJECT_CLASS_OFFSET(%edx), %eax
1027    UNPOISON_HEAP_REF eax
1028    cmpl %eax, %ebx
1029    POP eax                      // restore eax
1030#else
1031    cmpl MIRROR_OBJECT_CLASS_OFFSET(%edx), %ebx
1032#endif
1033    jne .Lcheck_assignability
1034.Ldo_aput:
1035    POISON_HEAP_REF edx
1036    movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4)
1037    movl %fs:THREAD_CARD_TABLE_OFFSET, %edx
1038    shrl LITERAL(7), %eax
1039    movb %dl, (%edx, %eax)
1040    ret
1041.Ldo_aput_null:
1042    movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4)
1043    ret
1044.Lcheck_assignability:
1045    PUSH eax                      // save arguments
1046    PUSH ecx
1047    PUSH edx
1048#if defined(USE_READ_BARRIER)
1049    subl LITERAL(4), %esp         // alignment padding
1050    CFI_ADJUST_CFA_OFFSET(4)
1051    READ_BARRIER edx, MIRROR_OBJECT_CLASS_OFFSET, eax, true
1052    subl LITERAL(4), %esp         // alignment padding
1053    CFI_ADJUST_CFA_OFFSET(4)
1054    PUSH eax                      // pass arg2 - type of the value to be stored
1055#elif defined(USE_HEAP_POISONING)
1056    subl LITERAL(8), %esp         // alignment padding
1057    CFI_ADJUST_CFA_OFFSET(8)
1058    movl MIRROR_OBJECT_CLASS_OFFSET(%edx), %eax
1059    UNPOISON_HEAP_REF eax
1060    PUSH eax                      // pass arg2 - type of the value to be stored
1061#else
1062    subl LITERAL(8), %esp         // alignment padding
1063    CFI_ADJUST_CFA_OFFSET(8)
1064    pushl MIRROR_OBJECT_CLASS_OFFSET(%edx)  // pass arg2 - type of the value to be stored
1065    CFI_ADJUST_CFA_OFFSET(4)
1066#endif
1067    PUSH ebx                      // pass arg1 - component type of the array
1068    call SYMBOL(artIsAssignableFromCode)  // (Class* a, Class* b)
1069    addl LITERAL(16), %esp        // pop arguments
1070    CFI_ADJUST_CFA_OFFSET(-16)
1071    testl %eax, %eax
1072    jz   .Lthrow_array_store_exception
1073    POP  edx
1074    POP  ecx
1075    POP  eax
1076    POISON_HEAP_REF edx
1077    movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4)  // do the aput
1078    movl %fs:THREAD_CARD_TABLE_OFFSET, %edx
1079    shrl LITERAL(7), %eax
1080    movb %dl, (%edx, %eax)
1081    ret
1082    CFI_ADJUST_CFA_OFFSET(12)     // 3 POP after the jz for unwinding.
1083.Lthrow_array_store_exception:
1084    POP  edx
1085    POP  ecx
1086    POP  eax
1087    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx // save all registers as basis for long jump context
1088    // Outgoing argument set up
1089    PUSH eax                      // alignment padding
1090    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1091    CFI_ADJUST_CFA_OFFSET(4)
1092    PUSH edx                      // pass arg2 - value
1093    PUSH eax                      // pass arg1 - array
1094    call SYMBOL(artThrowArrayStoreException) // (array, value, Thread*)
1095    UNREACHABLE
1096END_FUNCTION art_quick_aput_obj
1097
1098DEFINE_FUNCTION art_quick_memcpy
1099    SETUP_GOT_NOSAVE ebx          // clobbers EBX
1100    PUSH edx                      // pass arg3
1101    PUSH ecx                      // pass arg2
1102    PUSH eax                      // pass arg1
1103    call PLT_SYMBOL(memcpy)       // (void*, const void*, size_t)
1104    addl LITERAL(12), %esp        // pop arguments
1105    CFI_ADJUST_CFA_OFFSET(-12)
1106    ret
1107END_FUNCTION art_quick_memcpy
1108
1109NO_ARG_DOWNCALL art_quick_test_suspend, artTestSuspendFromCode, ret
1110
1111DEFINE_FUNCTION art_quick_d2l
1112    subl LITERAL(12), %esp        // alignment padding, room for argument
1113    CFI_ADJUST_CFA_OFFSET(12)
1114    movsd %xmm0, 0(%esp)          // arg a
1115    call SYMBOL(art_d2l)          // (jdouble a)
1116    addl LITERAL(12), %esp        // pop arguments
1117    CFI_ADJUST_CFA_OFFSET(-12)
1118    ret
1119END_FUNCTION art_quick_d2l
1120
1121DEFINE_FUNCTION art_quick_f2l
1122    subl LITERAL(12), %esp        // alignment padding
1123    CFI_ADJUST_CFA_OFFSET(12)
1124    movss %xmm0, 0(%esp)          // arg a
1125    call SYMBOL(art_f2l)          // (jfloat a)
1126    addl LITERAL(12), %esp        // pop arguments
1127    CFI_ADJUST_CFA_OFFSET(-12)
1128    ret
1129END_FUNCTION art_quick_f2l
1130
1131DEFINE_FUNCTION art_quick_ldiv
1132    subl LITERAL(12), %esp        // alignment padding
1133    CFI_ADJUST_CFA_OFFSET(12)
1134    PUSH ebx                      // pass arg4 b.hi
1135    PUSH edx                      // pass arg3 b.lo
1136    PUSH ecx                      // pass arg2 a.hi
1137    PUSH eax                      // pass arg1 a.lo
1138    call SYMBOL(artLdiv)          // (jlong a, jlong b)
1139    addl LITERAL(28), %esp        // pop arguments
1140    CFI_ADJUST_CFA_OFFSET(-28)
1141    ret
1142END_FUNCTION art_quick_ldiv
1143
1144DEFINE_FUNCTION art_quick_lmod
1145    subl LITERAL(12), %esp        // alignment padding
1146    CFI_ADJUST_CFA_OFFSET(12)
1147    PUSH ebx                      // pass arg4 b.hi
1148    PUSH edx                      // pass arg3 b.lo
1149    PUSH ecx                      // pass arg2 a.hi
1150    PUSH eax                      // pass arg1 a.lo
1151    call SYMBOL(artLmod)          // (jlong a, jlong b)
1152    addl LITERAL(28), %esp        // pop arguments
1153    CFI_ADJUST_CFA_OFFSET(-28)
1154    ret
1155END_FUNCTION art_quick_lmod
1156
1157DEFINE_FUNCTION art_quick_lmul
1158    imul %eax, %ebx               // ebx = a.lo(eax) * b.hi(ebx)
1159    imul %edx, %ecx               // ecx = b.lo(edx) * a.hi(ecx)
1160    mul  %edx                     // edx:eax = a.lo(eax) * b.lo(edx)
1161    add  %ebx, %ecx
1162    add  %ecx, %edx               // edx += (a.lo * b.hi) + (b.lo * a.hi)
1163    ret
1164END_FUNCTION art_quick_lmul
1165
1166DEFINE_FUNCTION art_quick_lshl
1167    // ecx:eax << edx
1168    xchg %edx, %ecx
1169    shld %cl,%eax,%edx
1170    shl  %cl,%eax
1171    test LITERAL(32), %cl
1172    jz  1f
1173    mov %eax, %edx
1174    xor %eax, %eax
11751:
1176    ret
1177END_FUNCTION art_quick_lshl
1178
1179DEFINE_FUNCTION art_quick_lshr
1180    // ecx:eax >> edx
1181    xchg %edx, %ecx
1182    shrd %cl,%edx,%eax
1183    sar  %cl,%edx
1184    test LITERAL(32),%cl
1185    jz  1f
1186    mov %edx, %eax
1187    sar LITERAL(31), %edx
11881:
1189    ret
1190END_FUNCTION art_quick_lshr
1191
1192DEFINE_FUNCTION art_quick_lushr
1193    // ecx:eax >>> edx
1194    xchg %edx, %ecx
1195    shrd %cl,%edx,%eax
1196    shr  %cl,%edx
1197    test LITERAL(32),%cl
1198    jz  1f
1199    mov %edx, %eax
1200    xor %edx, %edx
12011:
1202    ret
1203END_FUNCTION art_quick_lushr
1204
1205ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1206ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1207ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1208ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1209ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1210ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1211ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1212
1213TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1214TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1215TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1216TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1217TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1218TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1219TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1220
1221TWO_ARG_REF_DOWNCALL art_quick_set8_static, artSet8StaticFromCode, RETURN_IF_EAX_ZERO
1222TWO_ARG_REF_DOWNCALL art_quick_set16_static, artSet16StaticFromCode, RETURN_IF_EAX_ZERO
1223TWO_ARG_REF_DOWNCALL art_quick_set32_static, artSet32StaticFromCode, RETURN_IF_EAX_ZERO
1224TWO_ARG_REF_DOWNCALL art_quick_set_obj_static, artSetObjStaticFromCode, RETURN_IF_EAX_ZERO
1225
1226THREE_ARG_REF_DOWNCALL art_quick_set8_instance, artSet8InstanceFromCode, RETURN_IF_EAX_ZERO
1227THREE_ARG_REF_DOWNCALL art_quick_set16_instance, artSet16InstanceFromCode, RETURN_IF_EAX_ZERO
1228THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCode, RETURN_IF_EAX_ZERO
1229THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCode, RETURN_IF_EAX_ZERO
1230
1231// Call artSet64InstanceFromCode with 4 word size arguments and the referrer.
1232DEFINE_FUNCTION art_quick_set64_instance
1233    movd %ebx, %xmm0
1234    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx  // save ref containing registers for GC
1235    movd %xmm0, %ebx
1236    // Outgoing argument set up
1237    subl LITERAL(8), %esp         // alignment padding
1238    CFI_ADJUST_CFA_OFFSET(8)
1239    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1240    CFI_ADJUST_CFA_OFFSET(4)
1241    pushl (FRAME_SIZE_REFS_ONLY_CALLEE_SAVE+12)(%esp)  // pass referrer
1242    CFI_ADJUST_CFA_OFFSET(4)
1243    PUSH ebx                      // pass high half of new_val
1244    PUSH edx                      // pass low half of new_val
1245    PUSH ecx                      // pass object
1246    PUSH eax                      // pass field_idx
1247    call SYMBOL(artSet64InstanceFromCode)  // (field_idx, Object*, new_val, referrer, Thread*)
1248    addl LITERAL(32), %esp        // pop arguments
1249    CFI_ADJUST_CFA_OFFSET(-32)
1250    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME     // restore frame up to return address
1251    RETURN_IF_EAX_ZERO            // return or deliver exception
1252END_FUNCTION art_quick_set64_instance
1253
1254// Call artSet64StaticFromCode with 3 word size arguments plus with the referrer in the 2nd position
1255// so that new_val is aligned on even registers were we passing arguments in registers.
1256DEFINE_FUNCTION art_quick_set64_static
1257    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME  ebx, ebx  // save ref containing registers for GC
1258    mov FRAME_SIZE_REFS_ONLY_CALLEE_SAVE(%esp), %ebx  // get referrer
1259    subl LITERAL(12), %esp        // alignment padding
1260    CFI_ADJUST_CFA_OFFSET(12)
1261    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1262    CFI_ADJUST_CFA_OFFSET(4)
1263    PUSH edx                      // pass high half of new_val
1264    PUSH ecx                      // pass low half of new_val
1265    PUSH ebx                      // pass referrer
1266    PUSH eax                      // pass field_idx
1267    call SYMBOL(artSet64StaticFromCode)  // (field_idx, referrer, new_val, Thread*)
1268    addl LITERAL(32), %esp        // pop arguments
1269    CFI_ADJUST_CFA_OFFSET(-32)
1270    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME  // restore frame up to return address
1271    RETURN_IF_EAX_ZERO            // return or deliver exception
1272END_FUNCTION art_quick_set64_static
1273
1274DEFINE_FUNCTION art_quick_proxy_invoke_handler
1275    SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_EAX
1276    PUSH esp                      // pass SP
1277    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1278    CFI_ADJUST_CFA_OFFSET(4)
1279    PUSH ecx                      // pass receiver
1280    PUSH eax                      // pass proxy method
1281    call SYMBOL(artQuickProxyInvokeHandler) // (proxy method, receiver, Thread*, SP)
1282    movd %eax, %xmm0              // place return value also into floating point return value
1283    movd %edx, %xmm1
1284    punpckldq %xmm1, %xmm0
1285    addl LITERAL(16 + FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE - FRAME_SIZE_REFS_ONLY_CALLEE_SAVE), %esp
1286    CFI_ADJUST_CFA_OFFSET(-(16 + FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE - FRAME_SIZE_REFS_ONLY_CALLEE_SAVE))
1287    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
1288    RETURN_OR_DELIVER_PENDING_EXCEPTION    // return or deliver exception
1289END_FUNCTION art_quick_proxy_invoke_handler
1290
1291    /*
1292     * Called to resolve an imt conflict. xmm7 is a hidden argument that holds the target method's
1293     * dex method index.
1294     */
1295DEFINE_FUNCTION art_quick_imt_conflict_trampoline
1296    movd %xmm7, %eax              // get target method index stored in xmm7
1297    INVOKE_TRAMPOLINE_BODY artInvokeInterfaceTrampoline
1298END_FUNCTION art_quick_imt_conflict_trampoline
1299
1300DEFINE_FUNCTION art_quick_resolution_trampoline
1301    SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME ebx, ebx
1302    movl %esp, %edi
1303    PUSH EDI                      // pass SP. do not just PUSH ESP; that messes up unwinding
1304    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1305    CFI_ADJUST_CFA_OFFSET(4)
1306    PUSH ecx                      // pass receiver
1307    PUSH eax                      // pass method
1308    call SYMBOL(artQuickResolutionTrampoline) // (Method* called, receiver, Thread*, SP)
1309    movl %eax, %edi               // remember code pointer in EDI
1310    addl LITERAL(16), %esp        // pop arguments
1311    CFI_ADJUST_CFA_OFFSET(-16)
1312    test %eax, %eax               // if code pointer is null goto deliver pending exception
1313    jz 1f
1314    RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME_AND_JUMP
13151:
1316    RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
1317    DELIVER_PENDING_EXCEPTION
1318END_FUNCTION art_quick_resolution_trampoline
1319
1320DEFINE_FUNCTION art_quick_generic_jni_trampoline
1321    SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_EAX
1322    movl %esp, %ebp               // save SP at callee-save frame
1323    CFI_DEF_CFA_REGISTER(ebp)
1324    subl LITERAL(5120), %esp
1325    // prepare for artQuickGenericJniTrampoline call
1326    // (Thread*,  SP)
1327    //  (esp)    4(esp)   <= C calling convention
1328    //  fs:...    ebp     <= where they are
1329
1330    subl LITERAL(8), %esp         // Padding for 16B alignment.
1331    pushl %ebp                    // Pass SP (to ArtMethod).
1332    pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current().
1333    call SYMBOL(artQuickGenericJniTrampoline)  // (Thread*, sp)
1334
1335    // The C call will have registered the complete save-frame on success.
1336    // The result of the call is:
1337    // eax: pointer to native code, 0 on error.
1338    // edx: pointer to the bottom of the used area of the alloca, can restore stack till there.
1339
1340    // Check for error = 0.
1341    test %eax, %eax
1342    jz .Lexception_in_native
1343
1344    // Release part of the alloca.
1345    movl %edx, %esp
1346
1347    // On x86 there are no registers passed, so nothing to pop here.
1348    // Native call.
1349    call *%eax
1350
1351    // result sign extension is handled in C code
1352    // prepare for artQuickGenericJniEndTrampoline call
1353    // (Thread*, result, result_f)
1354    //  (esp)    4(esp)  12(esp)    <= C calling convention
1355    //  fs:...  eax:edx   fp0      <= where they are
1356
1357    subl LITERAL(20), %esp        // Padding & pass float result.
1358    fstpl (%esp)
1359    pushl %edx                    // Pass int result.
1360    pushl %eax
1361    pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current().
1362    call SYMBOL(artQuickGenericJniEndTrampoline)
1363
1364    // Pending exceptions possible.
1365    mov %fs:THREAD_EXCEPTION_OFFSET, %ebx
1366    testl %ebx, %ebx
1367    jnz .Lexception_in_native
1368
1369    // Tear down the alloca.
1370    movl %ebp, %esp
1371    CFI_DEF_CFA_REGISTER(esp)
1372
1373
1374    // Tear down the callee-save frame.
1375    // Remove space for FPR args and EAX
1376    addl LITERAL(4 + 4 * 8), %esp
1377    CFI_ADJUST_CFA_OFFSET(-(4 + 4 * 8))
1378
1379    POP ecx
1380    addl LITERAL(4), %esp         // Avoid edx, as it may be part of the result.
1381    CFI_ADJUST_CFA_OFFSET(-4)
1382    POP ebx
1383    POP ebp  // Restore callee saves
1384    POP esi
1385    POP edi
1386    // Quick expects the return value to be in xmm0.
1387    movd %eax, %xmm0
1388    movd %edx, %xmm1
1389    punpckldq %xmm1, %xmm0
1390    ret
1391.Lexception_in_native:
1392    movl %fs:THREAD_TOP_QUICK_FRAME_OFFSET, %esp
1393    // Do a call to push a new save-all frame required by the runtime.
1394    call .Lexception_call
1395.Lexception_call:
1396    DELIVER_PENDING_EXCEPTION
1397END_FUNCTION art_quick_generic_jni_trampoline
1398
1399DEFINE_FUNCTION art_quick_to_interpreter_bridge
1400    SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME  ebx, ebx  // save frame
1401    mov %esp, %edx                // remember SP
1402    PUSH eax                      // alignment padding
1403    PUSH edx                      // pass SP
1404    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1405    CFI_ADJUST_CFA_OFFSET(4)
1406    PUSH eax                      // pass  method
1407    call SYMBOL(artQuickToInterpreterBridge)  // (method, Thread*, SP)
1408    addl LITERAL(16), %esp        // pop arguments
1409    CFI_ADJUST_CFA_OFFSET(-16)
1410
1411    // Return eax:edx in xmm0 also.
1412    movd %eax, %xmm0
1413    movd %edx, %xmm1
1414    punpckldq %xmm1, %xmm0
1415
1416    addl LITERAL(48), %esp        // Remove FPRs and EAX, ECX, EDX, EBX.
1417    CFI_ADJUST_CFA_OFFSET(-48)
1418
1419    POP ebp                       // Restore callee saves
1420    POP esi
1421    POP edi
1422
1423    RETURN_OR_DELIVER_PENDING_EXCEPTION    // return or deliver exception
1424END_FUNCTION art_quick_to_interpreter_bridge
1425
1426    /*
1427     * Routine that intercepts method calls and returns.
1428     */
1429DEFINE_FUNCTION art_quick_instrumentation_entry
1430    SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME ebx, edx
1431    PUSH eax                      // Save eax which will be clobbered by the callee-save method.
1432    subl LITERAL(12), %esp        // Align stack.
1433    CFI_ADJUST_CFA_OFFSET(12)
1434    pushl FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE-4+16(%esp)  // Pass LR.
1435    CFI_ADJUST_CFA_OFFSET(4)
1436    pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current().
1437    CFI_ADJUST_CFA_OFFSET(4)
1438    PUSH ecx                      // Pass receiver.
1439    PUSH eax                      // Pass Method*.
1440    call SYMBOL(artInstrumentationMethodEntryFromCode) // (Method*, Object*, Thread*, LR)
1441    addl LITERAL(28), %esp        // Pop arguments upto saved Method*.
1442    CFI_ADJUST_CFA_OFFSET(-28)
1443    movl 60(%esp), %edi           // Restore edi.
1444    movl %eax, 60(%esp)           // Place code* over edi, just under return pc.
1445    movl SYMBOL(art_quick_instrumentation_exit)@GOT(%ebx), %ebx
1446    // Place instrumentation exit as return pc. ebx holds the GOT computed on entry.
1447    movl %ebx, 64(%esp)
1448    movl 0(%esp), %eax           // Restore eax.
1449    // Restore FPRs (extra 4 bytes of offset due to EAX push at top).
1450    movsd 8(%esp), %xmm0
1451    movsd 16(%esp), %xmm1
1452    movsd 24(%esp), %xmm2
1453    movsd 32(%esp), %xmm3
1454
1455    // Restore GPRs.
1456    movl 40(%esp), %ecx           // Restore ecx.
1457    movl 44(%esp), %edx           // Restore edx.
1458    movl 48(%esp), %ebx           // Restore ebx.
1459    movl 52(%esp), %ebp           // Restore ebp.
1460    movl 56(%esp), %esi           // Restore esi.
1461    addl LITERAL(60), %esp        // Wind stack back upto code*.
1462    CFI_ADJUST_CFA_OFFSET(-60)
1463    ret                           // Call method (and pop).
1464END_FUNCTION art_quick_instrumentation_entry
1465
1466DEFINE_FUNCTION art_quick_instrumentation_exit
1467    pushl LITERAL(0)              // Push a fake return PC as there will be none on the stack.
1468    CFI_ADJUST_CFA_OFFSET(4)
1469    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx
1470    mov  %esp, %ecx               // Remember SP
1471    subl LITERAL(8), %esp         // Save float return value.
1472    CFI_ADJUST_CFA_OFFSET(8)
1473    movq %xmm0, (%esp)
1474    PUSH edx                      // Save gpr return value.
1475    PUSH eax
1476    subl LITERAL(16), %esp        // Align stack
1477    CFI_ADJUST_CFA_OFFSET(16)
1478    movq %xmm0, (%esp)            // Pass float return value.
1479    PUSH edx                      // Pass gpr return value.
1480    PUSH eax
1481    PUSH ecx                      // Pass SP.
1482    pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current.
1483    CFI_ADJUST_CFA_OFFSET(4)
1484    call SYMBOL(artInstrumentationMethodExitFromCode)  // (Thread*, SP, gpr_result, fpr_result)
1485    mov   %eax, %ecx              // Move returned link register.
1486    addl LITERAL(32), %esp        // Pop arguments.
1487    CFI_ADJUST_CFA_OFFSET(-32)
1488    movl %edx, %ebx               // Move returned link register for deopt
1489                                  // (ebx is pretending to be our LR).
1490    POP eax                       // Restore gpr return value.
1491    POP edx
1492    movq (%esp), %xmm0            // Restore fpr return value.
1493    addl LITERAL(8), %esp
1494    CFI_ADJUST_CFA_OFFSET(-8)
1495    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
1496    addl LITERAL(4), %esp         // Remove fake return pc.
1497    CFI_ADJUST_CFA_OFFSET(-4)
1498    jmp   *%ecx                   // Return.
1499END_FUNCTION art_quick_instrumentation_exit
1500
1501    /*
1502     * Instrumentation has requested that we deoptimize into the interpreter. The deoptimization
1503     * will long jump to the upcall with a special exception of -1.
1504     */
1505DEFINE_FUNCTION art_quick_deoptimize
1506    PUSH ebx                      // Entry point for a jump. Fake that we were called.
1507    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx
1508    subl LITERAL(12), %esp        // Align stack.
1509    CFI_ADJUST_CFA_OFFSET(12)
1510    pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current().
1511    CFI_ADJUST_CFA_OFFSET(4)
1512    call SYMBOL(artDeoptimize)    // artDeoptimize(Thread*)
1513    UNREACHABLE
1514END_FUNCTION art_quick_deoptimize
1515
1516    /*
1517     * Compiled code has requested that we deoptimize into the interpreter. The deoptimization
1518     * will long jump to the interpreter bridge.
1519     */
1520DEFINE_FUNCTION art_quick_deoptimize_from_compiled_code
1521    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx
1522    subl LITERAL(12), %esp                      // Align stack.
1523    CFI_ADJUST_CFA_OFFSET(12)
1524    pushl %fs:THREAD_SELF_OFFSET                // Pass Thread::Current().
1525    CFI_ADJUST_CFA_OFFSET(4)
1526    call SYMBOL(artDeoptimizeFromCompiledCode)  // artDeoptimizeFromCompiledCode(Thread*)
1527    UNREACHABLE
1528END_FUNCTION art_quick_deoptimize_from_compiled_code
1529
1530    /*
1531     * String's compareTo.
1532     *
1533     * On entry:
1534     *    eax:   this string object (known non-null)
1535     *    ecx:   comp string object (known non-null)
1536     */
1537DEFINE_FUNCTION art_quick_string_compareto
1538    PUSH esi                      // push callee save reg
1539    PUSH edi                      // push callee save reg
1540    mov MIRROR_STRING_COUNT_OFFSET(%eax), %edx
1541    mov MIRROR_STRING_COUNT_OFFSET(%ecx), %ebx
1542    lea MIRROR_STRING_VALUE_OFFSET(%eax), %esi
1543    lea MIRROR_STRING_VALUE_OFFSET(%ecx), %edi
1544    /* Calculate min length and count diff */
1545    mov   %edx, %ecx
1546    mov   %edx, %eax
1547    subl  %ebx, %eax
1548    cmovg %ebx, %ecx
1549    /*
1550     * At this point we have:
1551     *   eax: value to return if first part of strings are equal
1552     *   ecx: minimum among the lengths of the two strings
1553     *   esi: pointer to this string data
1554     *   edi: pointer to comp string data
1555     */
1556    jecxz .Lkeep_length
1557    repe cmpsw                    // find nonmatching chars in [%esi] and [%edi], up to length %ecx
1558    jne .Lnot_equal
1559.Lkeep_length:
1560    POP edi                       // pop callee save reg
1561    POP esi                       // pop callee save reg
1562    ret
1563    .balign 16
1564.Lnot_equal:
1565    movzwl  -2(%esi), %eax        // get last compared char from this string
1566    movzwl  -2(%edi), %ecx        // get last compared char from comp string
1567    subl  %ecx, %eax              // return the difference
1568    POP edi                       // pop callee save reg
1569    POP esi                       // pop callee save reg
1570    ret
1571END_FUNCTION art_quick_string_compareto
1572
1573// Return from a nested signal:
1574// Entry:
1575//  eax: address of jmp_buf in TLS
1576
1577DEFINE_FUNCTION art_nested_signal_return
1578    SETUP_GOT_NOSAVE ebx            // sets %ebx for call into PLT
1579    movl LITERAL(1), %ecx
1580    PUSH ecx                        // second arg to longjmp (1)
1581    PUSH eax                        // first arg to longjmp (jmp_buf)
1582    call PLT_SYMBOL(longjmp)
1583    UNREACHABLE
1584END_FUNCTION art_nested_signal_return
1585
1586DEFINE_FUNCTION art_quick_read_barrier_slow
1587    PUSH edx                        // pass arg3 - offset
1588    PUSH ecx                        // pass arg2 - obj
1589    PUSH eax                        // pass arg1 - ref
1590    call SYMBOL(artReadBarrierSlow) // artReadBarrierSlow(ref, obj, offset)
1591    addl LITERAL(12), %esp          // pop arguments
1592    CFI_ADJUST_CFA_OFFSET(-12)
1593    ret
1594END_FUNCTION art_quick_read_barrier_slow
1595
1596    // TODO: implement these!
1597UNIMPLEMENTED art_quick_memcmp16
1598