quick_entrypoints_x86.S revision 2cebb24bfc3247d3e9be138a3350106737455918
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_x86.S"
18
19// For x86, the CFA is esp+4, the address above the pushed return address on the stack.
20
21    /*
22     * Macro that sets up the callee save frame to conform with
23     * Runtime::CreateCalleeSaveMethod(kSaveAll)
24     */
25MACRO2(SETUP_SAVE_ALL_CALLEE_SAVE_FRAME, got_reg, temp_reg)
26    PUSH edi  // Save callee saves (ebx is saved/restored by the upcall)
27    PUSH esi
28    PUSH ebp
29    subl  MACRO_LITERAL(12), %esp  // Grow stack by 3 words.
30    CFI_ADJUST_CFA_OFFSET(12)
31    SETUP_GOT_NOSAVE RAW_VAR(got_reg, 0)
32    // Load Runtime::instance_ from GOT.
33    movl SYMBOL(_ZN3art7Runtime9instance_E)@GOT(REG_VAR(got_reg, 0)), REG_VAR(temp_reg, 1)
34    movl (REG_VAR(temp_reg, 1)), REG_VAR(temp_reg, 1)
35    // Push save all callee-save method.
36    pushl RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET(REG_VAR(temp_reg, 1))
37    CFI_ADJUST_CFA_OFFSET(4)
38    // Store esp as the top quick frame.
39    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
40    // Ugly compile-time check, but we only have the preprocessor.
41    // Last +4: implicit return address pushed on stack when caller made call.
42#if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVE != 3*4 + 16 + 4)
43#error "SAVE_ALL_CALLEE_SAVE_FRAME(X86) size not as expected."
44#endif
45END_MACRO
46
47    /*
48     * Macro that sets up the callee save frame to conform with
49     * Runtime::CreateCalleeSaveMethod(kRefsOnly)
50     */
51MACRO2(SETUP_REFS_ONLY_CALLEE_SAVE_FRAME, got_reg, temp_reg)
52    PUSH edi  // Save callee saves (ebx is saved/restored by the upcall)
53    PUSH esi
54    PUSH ebp
55    subl  MACRO_LITERAL(12), %esp  // Grow stack by 3 words.
56    CFI_ADJUST_CFA_OFFSET(12)
57    SETUP_GOT_NOSAVE VAR(got_reg, 0)
58    // Load Runtime::instance_ from GOT.
59    movl SYMBOL(_ZN3art7Runtime9instance_E)@GOT(REG_VAR(got_reg, 0)), REG_VAR(temp_reg, 1)
60    movl (REG_VAR(temp_reg, 1)), REG_VAR(temp_reg, 1)
61    // Push save all callee-save method.
62    pushl RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET(REG_VAR(temp_reg, 1))
63    CFI_ADJUST_CFA_OFFSET(4)
64    // Store esp as the top quick frame.
65    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
66
67    // Ugly compile-time check, but we only have the preprocessor.
68    // Last +4: implicit return address pushed on stack when caller made call.
69#if (FRAME_SIZE_REFS_ONLY_CALLEE_SAVE != 3*4 + 16 + 4)
70#error "REFS_ONLY_CALLEE_SAVE_FRAME(X86) size not as expected."
71#endif
72END_MACRO
73
74MACRO0(RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME)
75    addl MACRO_LITERAL(16), %esp  // Unwind stack up to saved values
76    CFI_ADJUST_CFA_OFFSET(-16)
77    POP ebp  // Restore callee saves (ebx is saved/restored by the upcall)
78    POP esi
79    POP edi
80END_MACRO
81
82    /*
83     * Macro that sets up the callee save frame to conform with
84     * Runtime::CreateCalleeSaveMethod(kRefsAndArgs)
85     */
86MACRO2(SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME, got_reg, temp_reg)
87    PUSH edi  // Save callee saves
88    PUSH esi
89    PUSH ebp
90    PUSH ebx  // Save args
91    PUSH edx
92    PUSH ecx
93    // Create space for FPR args.
94    subl MACRO_LITERAL(4 * 8), %esp
95    CFI_ADJUST_CFA_OFFSET(4 * 8)
96    // Save FPRs.
97    movsd %xmm0, 0(%esp)
98    movsd %xmm1, 8(%esp)
99    movsd %xmm2, 16(%esp)
100    movsd %xmm3, 24(%esp)
101
102    SETUP_GOT_NOSAVE VAR(got_reg, 0)
103    // Load Runtime::instance_ from GOT.
104    movl SYMBOL(_ZN3art7Runtime9instance_E)@GOT(REG_VAR(got_reg, 0)), REG_VAR(temp_reg, 1)
105    movl (REG_VAR(temp_reg, 1)), REG_VAR(temp_reg, 1)
106    // Push save all callee-save method.
107    pushl RUNTIME_REFS_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET(REG_VAR(temp_reg, 1))
108    CFI_ADJUST_CFA_OFFSET(4)
109    // Store esp as the stop quick frame.
110    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
111
112    // Ugly compile-time check, but we only have the preprocessor.
113    // Last +4: implicit return address pushed on stack when caller made call.
114#if (FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE != 7*4 + 4*8 + 4)
115#error "REFS_AND_ARGS_CALLEE_SAVE_FRAME(X86) size not as expected."
116#endif
117END_MACRO
118
119    /*
120     * Macro that sets up the callee save frame to conform with
121     * Runtime::CreateCalleeSaveMethod(kRefsAndArgs) where the method is passed in EAX.
122     */
123MACRO0(SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_EAX)
124    // Save callee and GPR args, mixed together to agree with core spills bitmap.
125    PUSH edi  // Save callee saves
126    PUSH esi
127    PUSH ebp
128    PUSH ebx  // Save args
129    PUSH edx
130    PUSH ecx
131
132    // Create space for FPR args.
133    subl MACRO_LITERAL(32), %esp
134    CFI_ADJUST_CFA_OFFSET(32)
135
136    // Save FPRs.
137    movsd %xmm0, 0(%esp)
138    movsd %xmm1, 8(%esp)
139    movsd %xmm2, 16(%esp)
140    movsd %xmm3, 24(%esp)
141
142    PUSH eax  // Store the ArtMethod reference at the bottom of the stack.
143    // Store esp as the stop quick frame.
144    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
145END_MACRO
146
147MACRO0(RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME)
148    // Restore FPRs. EAX is still on the stack.
149    movsd 4(%esp), %xmm0
150    movsd 12(%esp), %xmm1
151    movsd 20(%esp), %xmm2
152    movsd 28(%esp), %xmm3
153
154    addl MACRO_LITERAL(36), %esp  // Remove FPRs and EAX.
155    CFI_ADJUST_CFA_OFFSET(-36)
156
157    POP ecx  // Restore args except eax
158    POP edx
159    POP ebx
160    POP ebp  // Restore callee saves
161    POP esi
162    POP edi
163END_MACRO
164
165// Restore register and jump to routine
166// Inputs:  EDI contains pointer to code.
167// Notes: Need to pop EAX too (restores Method*)
168MACRO0(RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME_AND_JUMP)
169    POP eax  // Restore Method*
170
171    // Restore FPRs.
172    movsd 0(%esp), %xmm0
173    movsd 8(%esp), %xmm1
174    movsd 16(%esp), %xmm2
175    movsd 24(%esp), %xmm3
176
177    addl MACRO_LITERAL(32), %esp  // Remove FPRs.
178    CFI_ADJUST_CFA_OFFSET(-32)
179
180    POP ecx  // Restore args except eax
181    POP edx
182    POP ebx
183    POP ebp  // Restore callee saves
184    POP esi
185    xchgl 0(%esp),%edi // restore EDI and place code pointer as only value on stack
186    ret
187END_MACRO
188
189    /*
190     * Macro that set calls through to artDeliverPendingExceptionFromCode, where the pending
191     * exception is Thread::Current()->exception_.
192     */
193MACRO0(DELIVER_PENDING_EXCEPTION)
194    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx  // save callee saves for throw
195    // Outgoing argument set up
196    subl  MACRO_LITERAL(12), %esp              // Alignment padding
197    CFI_ADJUST_CFA_OFFSET(12)
198    pushl %fs:THREAD_SELF_OFFSET               // pass Thread::Current()
199    CFI_ADJUST_CFA_OFFSET(4)
200    call SYMBOL(artDeliverPendingExceptionFromCode)  // artDeliverPendingExceptionFromCode(Thread*)
201    int3                                       // unreached
202END_MACRO
203
204MACRO2(NO_ARG_RUNTIME_EXCEPTION, c_name, cxx_name)
205    DEFINE_FUNCTION RAW_VAR(c_name, 0)
206    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME  ebx, ebx  // save all registers as basis for long jump context
207    // Outgoing argument set up
208    subl  MACRO_LITERAL(12), %esp  // alignment padding
209    CFI_ADJUST_CFA_OFFSET(12)
210    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
211    CFI_ADJUST_CFA_OFFSET(4)
212    call VAR(cxx_name, 1)         // cxx_name(Thread*)
213    int3                          // unreached
214    END_FUNCTION RAW_VAR(c_name, 0)
215END_MACRO
216
217MACRO2(ONE_ARG_RUNTIME_EXCEPTION, c_name, cxx_name)
218    DEFINE_FUNCTION RAW_VAR(c_name, 0)
219    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx  // save all registers as basis for long jump context
220    mov %esp, %ecx
221    // Outgoing argument set up
222    subl  MACRO_LITERAL(8), %esp  // alignment padding
223    CFI_ADJUST_CFA_OFFSET(8)
224    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
225    CFI_ADJUST_CFA_OFFSET(4)
226    PUSH eax                      // pass arg1
227    call VAR(cxx_name, 1)         // cxx_name(arg1, Thread*)
228    int3                          // unreached
229    END_FUNCTION RAW_VAR(c_name, 0)
230END_MACRO
231
232MACRO2(TWO_ARG_RUNTIME_EXCEPTION, c_name, cxx_name)
233    DEFINE_FUNCTION RAW_VAR(c_name, 0)
234    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx  // save all registers as basis for long jump context
235    // Outgoing argument set up
236    PUSH eax                      // alignment padding
237    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
238    CFI_ADJUST_CFA_OFFSET(4)
239    PUSH ecx                      // pass arg2
240    PUSH eax                      // pass arg1
241    call VAR(cxx_name, 1)         // cxx_name(arg1, arg2, Thread*)
242    int3                          // unreached
243    END_FUNCTION RAW_VAR(c_name, 0)
244END_MACRO
245
246    /*
247     * Called by managed code to create and deliver a NullPointerException.
248     */
249NO_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode
250
251    /*
252     * Called by managed code to create and deliver an ArithmeticException.
253     */
254NO_ARG_RUNTIME_EXCEPTION art_quick_throw_div_zero, artThrowDivZeroFromCode
255
256    /*
257     * Called by managed code to create and deliver a StackOverflowError.
258     */
259NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode
260
261    /*
262     * Called by managed code, saves callee saves and then calls artThrowException
263     * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception.
264     */
265ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode
266
267    /*
268     * Called by managed code to create and deliver a NoSuchMethodError.
269     */
270ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method, artThrowNoSuchMethodFromCode
271
272    /*
273     * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds
274     * index, arg2 holds limit.
275     */
276TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds, artThrowArrayBoundsFromCode
277
278    /*
279     * All generated callsites for interface invokes and invocation slow paths will load arguments
280     * as usual - except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
281     * the method_idx.  This wrapper will save arg1-arg3, load the caller's Method*, align the
282     * stack and call the appropriate C helper.
283     * NOTE: "this" is first visible argument of the target, and so can be found in arg1/r1.
284     *
285     * The helper will attempt to locate the target and return a 64-bit result in r0/r1 consisting
286     * of the target Method* in r0 and method->code_ in r1.
287     *
288     * If unsuccessful, the helper will return null/null will bea pending exception in the
289     * thread and we branch to another stub to deliver it.
290     *
291     * On success this wrapper will restore arguments and *jump* to the target, leaving the lr
292     * pointing back to the original caller.
293     */
294MACRO2(INVOKE_TRAMPOLINE, c_name, cxx_name)
295    DEFINE_FUNCTION RAW_VAR(c_name, 0)
296    SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME ebx, ebx
297    movl %esp, %edx  // remember SP
298
299    // Outgoing argument set up
300    subl MACRO_LITERAL(12), %esp  // alignment padding
301    CFI_ADJUST_CFA_OFFSET(12)
302    PUSH edx                      // pass SP
303    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
304    CFI_ADJUST_CFA_OFFSET(4)
305    pushl 32+32(%edx)             // pass caller Method*
306    CFI_ADJUST_CFA_OFFSET(4)
307    PUSH ecx                      // pass arg2
308    PUSH eax                      // pass arg1
309    call VAR(cxx_name, 1)         // cxx_name(arg1, arg2, arg3, Thread*, SP)
310    movl %edx, %edi               // save code pointer in EDI
311    addl MACRO_LITERAL(36), %esp  // Pop arguments skip eax
312    CFI_ADJUST_CFA_OFFSET(-36)
313
314    // Restore FPRs.
315    movsd 0(%esp), %xmm0
316    movsd 8(%esp), %xmm1
317    movsd 16(%esp), %xmm2
318    movsd 24(%esp), %xmm3
319
320    // Remove space for FPR args.
321    addl MACRO_LITERAL(4 * 8), %esp
322    CFI_ADJUST_CFA_OFFSET(-4 * 8)
323
324    POP ecx  // Restore args except eax
325    POP edx
326    POP ebx
327    POP ebp  // Restore callee saves
328    POP esi
329    // Swap EDI callee save with code pointer.
330    xchgl %edi, (%esp)
331    testl %eax, %eax              // Branch forward if exception pending.
332    jz    1f
333    // Tail call to intended method.
334    ret
3351:
336    addl MACRO_LITERAL(4), %esp   // Pop code pointer off stack
337    CFI_ADJUST_CFA_OFFSET(-4)
338    DELIVER_PENDING_EXCEPTION
339    END_FUNCTION RAW_VAR(c_name, 0)
340END_MACRO
341
342INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline, artInvokeInterfaceTrampoline
343INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck
344
345INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck
346INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck
347INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck
348INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck
349
350    /*
351     * Helper for quick invocation stub to set up XMM registers.
352     * Increments shorty and arg_array and clobbers temp_char.
353     * Branches to finished if it encounters the end of the shorty.
354     */
355MACRO5(LOOP_OVER_SHORTY_LOADING_XMMS, xmm_reg, shorty, arg_array, temp_char, finished)
3561: // LOOP
357    movb (REG_VAR(shorty, 1)), REG_VAR(temp_char, 3)  // temp_char := *shorty
358    addl MACRO_LITERAL(1), REG_VAR(shorty, 1)         // shorty++
359    cmpb MACRO_LITERAL(0), REG_VAR(temp_char, 3)      // if (temp_char == '\0')
360    je RAW_VAR(finished, 4)                           //   goto finished
361    cmpb MACRO_LITERAL(68), REG_VAR(temp_char, 3)     // if (temp_char == 'D')
362    je 2f                                             //   goto FOUND_DOUBLE
363    cmpb MACRO_LITERAL(70), REG_VAR(temp_char, 3)     // if (temp_char == 'F')
364    je 3f                                             //   goto FOUND_FLOAT
365    addl MACRO_LITERAL(4), REG_VAR(arg_array, 2)      // arg_array++
366    //  Handle extra space in arg array taken by a long.
367    cmpb MACRO_LITERAL(74), REG_VAR(temp_char, 3)     // if (temp_char != 'J')
368    jne 1b                                            //   goto LOOP
369    addl MACRO_LITERAL(4), REG_VAR(arg_array, 2)      // arg_array++
370    jmp 1b                                            // goto LOOP
3712:  // FOUND_DOUBLE
372    movsd (REG_VAR(arg_array, 2)), REG_VAR(xmm_reg, 0)
373    addl MACRO_LITERAL(8), REG_VAR(arg_array, 2)      // arg_array+=2
374    jmp 4f
3753:  // FOUND_FLOAT
376    movss (REG_VAR(arg_array, 2)), REG_VAR(xmm_reg, 0)
377    addl MACRO_LITERAL(4), REG_VAR(arg_array, 2)      // arg_array++
3784:
379END_MACRO
380
381    /*
382     * Helper for quick invocation stub to set up GPR registers.
383     * Increments shorty and arg_array, and returns the current short character in
384     * temp_char. Branches to finished if it encounters the end of the shorty.
385     */
386MACRO4(SKIP_OVER_FLOATS, shorty, arg_array, temp_char, finished)
3871: // LOOP:
388    movb (REG_VAR(shorty, 0)), REG_VAR(temp_char, 2)  // temp_char := *shorty
389    addl MACRO_LITERAL(1), REG_VAR(shorty, 0)         // shorty++
390    cmpb MACRO_LITERAL(0), REG_VAR(temp_char, 2)      // if (temp_char == '\0')
391    je RAW_VAR(finished, 3)                           //   goto finished
392    cmpb MACRO_LITERAL(70), REG_VAR(temp_char, 2)     // if (temp_char == 'F')
393    je 3f                                             //   goto SKIP_FLOAT
394    cmpb MACRO_LITERAL(68), REG_VAR(temp_char, 2)     // if (temp_char == 'D')
395    je 4f                                             //   goto SKIP_DOUBLE
396    jmp 5f                                            // goto end
3973:  // SKIP_FLOAT
398    addl MACRO_LITERAL(4), REG_VAR(arg_array, 1)      // arg_array++
399    jmp 1b                                            // goto LOOP
4004:  // SKIP_DOUBLE
401    addl MACRO_LITERAL(8), REG_VAR(arg_array, 1)      // arg_array+=2
402    jmp 1b                                            // goto LOOP
4035:
404END_MACRO
405
406  /*
407     * Quick invocation stub (non-static).
408     * On entry:
409     *   [sp] = return address
410     *   [sp + 4] = method pointer
411     *   [sp + 8] = argument array or null for no argument methods
412     *   [sp + 12] = size of argument array in bytes
413     *   [sp + 16] = (managed) thread pointer
414     *   [sp + 20] = JValue* result
415     *   [sp + 24] = shorty
416     */
417DEFINE_FUNCTION art_quick_invoke_stub
418    // Save the non-volatiles.
419    PUSH ebp                      // save ebp
420    PUSH ebx                      // save ebx
421    PUSH esi                      // save esi
422    PUSH edi                      // save edi
423    // Set up argument XMM registers.
424    mov 24+16(%esp), %esi         // ESI := shorty + 1  ; ie skip return arg character.
425    addl LITERAL(1), %esi
426    mov 8+16(%esp), %edi          // EDI := arg_array + 4 ; ie skip this pointer.
427    addl LITERAL(4), %edi
428    // Clobbers ESI, EDI, EAX.
429    LOOP_OVER_SHORTY_LOADING_XMMS xmm0, esi, edi, al, .Lxmm_setup_finished
430    LOOP_OVER_SHORTY_LOADING_XMMS xmm1, esi, edi, al, .Lxmm_setup_finished
431    LOOP_OVER_SHORTY_LOADING_XMMS xmm2, esi, edi, al, .Lxmm_setup_finished
432    LOOP_OVER_SHORTY_LOADING_XMMS xmm3, esi, edi, al, .Lxmm_setup_finished
433    .balign 16
434.Lxmm_setup_finished:
435    mov %esp, %ebp                // copy value of stack pointer into base pointer
436    CFI_DEF_CFA_REGISTER(ebp)
437    mov 28(%ebp), %ebx            // get arg array size
438    // reserve space for return addr, method*, ebx, ebp, esi, and edi in frame
439    addl LITERAL(36), %ebx
440    // align frame size to 16 bytes
441    andl LITERAL(0xFFFFFFF0), %ebx
442    subl LITERAL(20), %ebx        // remove space for return address, ebx, ebp, esi and edi
443    subl %ebx, %esp               // reserve stack space for argument array
444
445    movl LITERAL(0), (%esp)       // store null for method*
446
447    // Copy arg array into stack.
448    movl 28(%ebp), %ecx           // ECX = size of args
449    movl 24(%ebp), %esi           // ESI = argument array
450    leal 4(%esp), %edi            // EDI = just after Method* in stack arguments
451    rep movsb                     // while (ecx--) { *edi++ = *esi++ }
452
453    mov 40(%ebp), %esi            // ESI := shorty + 1  ; ie skip return arg character.
454    addl LITERAL(1), %esi
455    mov 24(%ebp), %edi            // EDI := arg_array
456    mov 0(%edi), %ecx             // ECX := this pointer
457    addl LITERAL(4), %edi         // EDI := arg_array + 4 ; ie skip this pointer.
458
459    // Enumerate the possible cases for loading GPRS.
460    // edx (and maybe ebx):
461    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished
462    cmpb LITERAL(74), %al         // if (al == 'J') goto FOUND_LONG
463    je .LfirstLong
464    // Must be an integer value.
465    movl (%edi), %edx
466    addl LITERAL(4), %edi         // arg_array++
467
468    // Now check ebx
469    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished
470    // Must be first word of a long, or an integer. First word of long doesn't
471    // go into EBX, but can be loaded there anyways, as it is harmless.
472    movl (%edi), %ebx
473    jmp .Lgpr_setup_finished
474.LfirstLong:
475    movl (%edi), %edx
476    movl 4(%edi), %ebx
477    // Nothing left to load.
478.Lgpr_setup_finished:
479    mov 20(%ebp), %eax            // move method pointer into eax
480    call *MIRROR_ART_METHOD_QUICK_CODE_OFFSET_32(%eax) // call the method
481    mov %ebp, %esp                // restore stack pointer
482    CFI_DEF_CFA_REGISTER(esp)
483    POP edi                       // pop edi
484    POP esi                       // pop esi
485    POP ebx                       // pop ebx
486    POP ebp                       // pop ebp
487    mov 20(%esp), %ecx            // get result pointer
488    mov %eax, (%ecx)              // store the result assuming its a long, int or Object*
489    mov %edx, 4(%ecx)             // store the other half of the result
490    mov 24(%esp), %edx            // get the shorty
491    cmpb LITERAL(68), (%edx)      // test if result type char == 'D'
492    je .Lreturn_double_quick
493    cmpb LITERAL(70), (%edx)      // test if result type char == 'F'
494    je .Lreturn_float_quick
495    ret
496.Lreturn_double_quick:
497    movsd %xmm0, (%ecx)           // store the floating point result
498    ret
499.Lreturn_float_quick:
500    movss %xmm0, (%ecx)           // store the floating point result
501    ret
502END_FUNCTION art_quick_invoke_stub
503
504  /*
505     * Quick invocation stub (static).
506     * On entry:
507     *   [sp] = return address
508     *   [sp + 4] = method pointer
509     *   [sp + 8] = argument array or null for no argument methods
510     *   [sp + 12] = size of argument array in bytes
511     *   [sp + 16] = (managed) thread pointer
512     *   [sp + 20] = JValue* result
513     *   [sp + 24] = shorty
514     */
515DEFINE_FUNCTION art_quick_invoke_static_stub
516    // Save the non-volatiles.
517    PUSH ebp                      // save ebp
518    PUSH ebx                      // save ebx
519    PUSH esi                      // save esi
520    PUSH edi                      // save edi
521    // Set up argument XMM registers.
522    mov 24+16(%esp), %esi         // ESI := shorty + 1  ; ie skip return arg character.
523    addl LITERAL(1), %esi
524    mov 8+16(%esp), %edi          // EDI := arg_array
525    // Clobbers ESI, EDI, EAX.
526    LOOP_OVER_SHORTY_LOADING_XMMS xmm0, esi, edi, al, .Lxmm_setup_finished2
527    LOOP_OVER_SHORTY_LOADING_XMMS xmm1, esi, edi, al, .Lxmm_setup_finished2
528    LOOP_OVER_SHORTY_LOADING_XMMS xmm2, esi, edi, al, .Lxmm_setup_finished2
529    LOOP_OVER_SHORTY_LOADING_XMMS xmm3, esi, edi, al, .Lxmm_setup_finished2
530    .balign 16
531.Lxmm_setup_finished2:
532    mov %esp, %ebp                // copy value of stack pointer into base pointer
533    CFI_DEF_CFA_REGISTER(ebp)
534    mov 28(%ebp), %ebx            // get arg array size
535    // reserve space for return addr, method*, ebx, ebp, esi, and edi in frame
536    addl LITERAL(36), %ebx
537    // align frame size to 16 bytes
538    andl LITERAL(0xFFFFFFF0), %ebx
539    subl LITERAL(20), %ebx        // remove space for return address, ebx, ebp, esi and edi
540    subl %ebx, %esp               // reserve stack space for argument array
541
542    movl LITERAL(0), (%esp)       // store null for method*
543
544    // Copy arg array into stack.
545    movl 28(%ebp), %ecx           // ECX = size of args
546    movl 24(%ebp), %esi           // ESI = argument array
547    leal 4(%esp), %edi            // EDI = just after Method* in stack arguments
548    rep movsb                     // while (ecx--) { *edi++ = *esi++ }
549
550    mov 40(%ebp), %esi            // ESI := shorty + 1  ; ie skip return arg character.
551    addl LITERAL(1), %esi
552    mov 24(%ebp), %edi            // EDI := arg_array
553
554    // Enumerate the possible cases for loading GPRS.
555    // ecx (and maybe edx)
556    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished2
557    cmpb LITERAL(74), %al         // if (al == 'J') goto FOUND_LONG
558    je .LfirstLong2
559    // Must be an integer value.  Load into ECX.
560    movl (%edi), %ecx
561    addl LITERAL(4), %edi         // arg_array++
562
563    // Now check edx (and maybe ebx).
564    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished2
565    cmpb LITERAL(74), %al         // if (al == 'J') goto FOUND_LONG
566    je .LSecondLong2
567    // Must be an integer.  Load into EDX.
568    movl (%edi), %edx
569    addl LITERAL(4), %edi         // arg_array++
570
571    // Is there anything for ebx?
572    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished2
573    // Must be first word of a long, or an integer. First word of long doesn't
574    // go into EBX, but can be loaded there anyways, as it is harmless.
575    movl (%edi), %ebx
576    jmp .Lgpr_setup_finished2
577.LSecondLong2:
578    // EDX:EBX is long.  That is all.
579    movl (%edi), %edx
580    movl 4(%edi), %ebx
581    jmp .Lgpr_setup_finished2
582.LfirstLong2:
583    // ECX:EDX is a long
584    movl (%edi), %ecx
585    movl 4(%edi), %edx
586    addl LITERAL(8), %edi         // arg_array += 2
587
588    // Anything for EBX?
589    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished2
590    // Must be first word of a long, or an integer. First word of long doesn't
591    // go into EBX, but can be loaded there anyways, as it is harmless.
592    movl (%edi), %ebx
593    jmp .Lgpr_setup_finished2
594    // Nothing left to load.
595.Lgpr_setup_finished2:
596    mov 20(%ebp), %eax            // move method pointer into eax
597    call *MIRROR_ART_METHOD_QUICK_CODE_OFFSET_32(%eax) // call the method
598    mov %ebp, %esp                // restore stack pointer
599    CFI_DEF_CFA_REGISTER(esp)
600    POP edi                       // pop edi
601    POP esi                       // pop esi
602    POP ebx                       // pop ebx
603    POP ebp                       // pop ebp
604    mov 20(%esp), %ecx            // get result pointer
605    mov %eax, (%ecx)              // store the result assuming its a long, int or Object*
606    mov %edx, 4(%ecx)             // store the other half of the result
607    mov 24(%esp), %edx            // get the shorty
608    cmpb LITERAL(68), (%edx)      // test if result type char == 'D'
609    je .Lreturn_double_quick2
610    cmpb LITERAL(70), (%edx)      // test if result type char == 'F'
611    je .Lreturn_float_quick2
612    ret
613.Lreturn_double_quick2:
614    movsd %xmm0, (%ecx)           // store the floating point result
615    ret
616.Lreturn_float_quick2:
617    movss %xmm0, (%ecx)           // store the floating point result
618    ret
619END_FUNCTION art_quick_invoke_static_stub
620
621MACRO3(NO_ARG_DOWNCALL, c_name, cxx_name, return_macro)
622    DEFINE_FUNCTION RAW_VAR(c_name, 0)
623    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx  // save ref containing registers for GC
624    // Outgoing argument set up
625    subl MACRO_LITERAL(12), %esp  // push padding
626    CFI_ADJUST_CFA_OFFSET(12)
627    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
628    CFI_ADJUST_CFA_OFFSET(4)
629    call VAR(cxx_name, 1)         // cxx_name(Thread*)
630    addl MACRO_LITERAL(16), %esp  // pop arguments
631    CFI_ADJUST_CFA_OFFSET(-16)
632    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME  // restore frame up to return address
633    CALL_MACRO(return_macro, 2)   // return or deliver exception
634    END_FUNCTION RAW_VAR(c_name, 0)
635END_MACRO
636
637MACRO3(ONE_ARG_DOWNCALL, c_name, cxx_name, return_macro)
638    DEFINE_FUNCTION RAW_VAR(c_name, 0)
639    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME  ebx, ebx  // save ref containing registers for GC
640    // Outgoing argument set up
641    subl MACRO_LITERAL(8), %esp   // push padding
642    CFI_ADJUST_CFA_OFFSET(8)
643    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
644    CFI_ADJUST_CFA_OFFSET(4)
645    PUSH eax                      // pass arg1
646    call VAR(cxx_name, 1)         // cxx_name(arg1, Thread*)
647    addl MACRO_LITERAL(16), %esp  // pop arguments
648    CFI_ADJUST_CFA_OFFSET(-16)
649    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME  // restore frame up to return address
650    CALL_MACRO(return_macro, 2)   // return or deliver exception
651    END_FUNCTION RAW_VAR(c_name, 0)
652END_MACRO
653
654MACRO3(TWO_ARG_DOWNCALL, c_name, cxx_name, return_macro)
655    DEFINE_FUNCTION RAW_VAR(c_name, 0)
656    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME  ebx, ebx  // save ref containing registers for GC
657    // Outgoing argument set up
658    PUSH eax                      // push padding
659    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
660    CFI_ADJUST_CFA_OFFSET(4)
661    PUSH ecx                      // pass arg2
662    PUSH eax                      // pass arg1
663    call VAR(cxx_name, 1)         // cxx_name(arg1, arg2, Thread*)
664    addl MACRO_LITERAL(16), %esp  // pop arguments
665    CFI_ADJUST_CFA_OFFSET(-16)
666    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME  // restore frame up to return address
667    CALL_MACRO(return_macro, 2)   // return or deliver exception
668    END_FUNCTION RAW_VAR(c_name, 0)
669END_MACRO
670
671MACRO3(THREE_ARG_DOWNCALL, c_name, cxx_name, return_macro)
672    DEFINE_FUNCTION RAW_VAR(c_name, 0)
673    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME  ebx, ebx  // save ref containing registers for GC
674    // Outgoing argument set up
675    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
676    CFI_ADJUST_CFA_OFFSET(4)
677    PUSH edx                      // pass arg3
678    PUSH ecx                      // pass arg2
679    PUSH eax                      // pass arg1
680    call VAR(cxx_name, 1)         // cxx_name(arg1, arg2, arg3, Thread*)
681    addl MACRO_LITERAL(16), %esp  // pop arguments
682    CFI_ADJUST_CFA_OFFSET(-16)
683    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME  // restore frame up to return address
684    CALL_MACRO(return_macro, 2)   // return or deliver exception
685    END_FUNCTION RAW_VAR(c_name, 0)
686END_MACRO
687
688MACRO3(ONE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
689    DEFINE_FUNCTION RAW_VAR(c_name, 0)
690    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME  ebx, ebx // save ref containing registers for GC
691    // Outgoing argument set up
692    mov FRAME_SIZE_REFS_ONLY_CALLEE_SAVE(%esp), %ecx  // get referrer
693    PUSH eax                      // push padding
694    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
695    CFI_ADJUST_CFA_OFFSET(4)
696    PUSH ecx                      // pass referrer
697    PUSH eax                      // pass arg1
698    call VAR(cxx_name, 1)         // cxx_name(arg1, referrer, Thread*)
699    addl MACRO_LITERAL(16), %esp  // pop arguments
700    CFI_ADJUST_CFA_OFFSET(-16)
701    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME  // restore frame up to return address
702    CALL_MACRO(return_macro, 2)   // return or deliver exception
703    END_FUNCTION RAW_VAR(c_name, 0)
704END_MACRO
705
706MACRO3(TWO_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
707    DEFINE_FUNCTION RAW_VAR(c_name, 0)
708    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx // save ref containing registers for GC
709    // Outgoing argument set up
710    mov FRAME_SIZE_REFS_ONLY_CALLEE_SAVE(%esp), %edx  // get referrer
711    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
712    CFI_ADJUST_CFA_OFFSET(4)
713    PUSH edx                      // pass referrer
714    PUSH ecx                      // pass arg2
715    PUSH eax                      // pass arg1
716    call VAR(cxx_name, 1)         // cxx_name(arg1, arg2, referrer, Thread*)
717    addl MACRO_LITERAL(16), %esp  // pop arguments
718    CFI_ADJUST_CFA_OFFSET(-16)
719    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME  // restore frame up to return address
720    CALL_MACRO(return_macro, 2)   // return or deliver exception
721    END_FUNCTION RAW_VAR(c_name, 0)
722END_MACRO
723
724MACRO3(THREE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
725    DEFINE_FUNCTION RAW_VAR(c_name, 0)
726    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx  // save ref containing registers for GC
727    // Outgoing argument set up
728    mov FRAME_SIZE_REFS_ONLY_CALLEE_SAVE(%esp), %ebx  // get referrer
729    subl MACRO_LITERAL(12), %esp  // alignment padding
730    CFI_ADJUST_CFA_OFFSET(12)
731    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
732    CFI_ADJUST_CFA_OFFSET(4)
733    PUSH ebx                      // pass referrer
734    PUSH edx                      // pass arg3
735    PUSH ecx                      // pass arg2
736    PUSH eax                      // pass arg1
737    call VAR(cxx_name, 1)         // cxx_name(arg1, arg2, arg3, referrer, Thread*)
738    addl LITERAL(32), %esp        // pop arguments
739    CFI_ADJUST_CFA_OFFSET(-32)
740    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME     // restore frame up to return address
741    CALL_MACRO(return_macro, 2)   // return or deliver exception
742    END_FUNCTION RAW_VAR(c_name, 0)
743END_MACRO
744
745MACRO0(RETURN_IF_RESULT_IS_NON_ZERO)
746    testl %eax, %eax               // eax == 0 ?
747    jz  1f                         // if eax == 0 goto 1
748    ret                            // return
7491:                                 // deliver exception on current thread
750    DELIVER_PENDING_EXCEPTION
751END_MACRO
752
753MACRO0(RETURN_IF_EAX_ZERO)
754    testl %eax, %eax               // eax == 0 ?
755    jnz  1f                        // if eax != 0 goto 1
756    ret                            // return
7571:                                 // deliver exception on current thread
758    DELIVER_PENDING_EXCEPTION
759END_MACRO
760
761MACRO0(RETURN_OR_DELIVER_PENDING_EXCEPTION)
762    cmpl MACRO_LITERAL(0),%fs:THREAD_EXCEPTION_OFFSET // exception field == 0 ?
763    jne 1f                         // if exception field != 0 goto 1
764    ret                            // return
7651:                                 // deliver exception on current thread
766    DELIVER_PENDING_EXCEPTION
767END_MACRO
768
769// Generate the allocation entrypoints for each allocator.
770// TODO: use arch/quick_alloc_entrypoints.S. Currently we don't as we need to use concatenation
771// macros to work around differences between OS/X's as and binutils as (OS/X lacks named arguments
772// to macros and the VAR macro won't concatenate arguments properly), this also breaks having
773// multi-line macros that use each other (hence using 1 macro per newline below).
774#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(c_suffix, cxx_suffix) \
775  TWO_ARG_DOWNCALL art_quick_alloc_object ## c_suffix, artAllocObjectFromCode ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
776#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(c_suffix, cxx_suffix) \
777  TWO_ARG_DOWNCALL art_quick_alloc_object_resolved ## c_suffix, artAllocObjectFromCodeResolved ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
778#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(c_suffix, cxx_suffix) \
779  TWO_ARG_DOWNCALL art_quick_alloc_object_initialized ## c_suffix, artAllocObjectFromCodeInitialized ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
780#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(c_suffix, cxx_suffix) \
781  TWO_ARG_DOWNCALL art_quick_alloc_object_with_access_check ## c_suffix, artAllocObjectFromCodeWithAccessCheck ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
782#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(c_suffix, cxx_suffix) \
783  THREE_ARG_DOWNCALL art_quick_alloc_array ## c_suffix, artAllocArrayFromCode ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
784#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(c_suffix, cxx_suffix) \
785  THREE_ARG_DOWNCALL art_quick_alloc_array_resolved ## c_suffix, artAllocArrayFromCodeResolved ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
786#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(c_suffix, cxx_suffix) \
787  THREE_ARG_DOWNCALL art_quick_alloc_array_with_access_check ## c_suffix, artAllocArrayFromCodeWithAccessCheck ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
788#define GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(c_suffix, cxx_suffix) \
789  THREE_ARG_DOWNCALL art_quick_check_and_alloc_array ## c_suffix, artCheckAndAllocArrayFromCode ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
790#define GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(c_suffix, cxx_suffix) \
791  THREE_ARG_DOWNCALL art_quick_check_and_alloc_array_with_access_check ## c_suffix, artCheckAndAllocArrayFromCodeWithAccessCheck ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
792
793GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_dlmalloc, DlMalloc)
794GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_dlmalloc, DlMalloc)
795GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_dlmalloc, DlMalloc)
796GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_dlmalloc, DlMalloc)
797GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_dlmalloc, DlMalloc)
798GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_dlmalloc, DlMalloc)
799GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_dlmalloc, DlMalloc)
800GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_dlmalloc, DlMalloc)
801GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_dlmalloc, DlMalloc)
802
803GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_dlmalloc_instrumented, DlMallocInstrumented)
804GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_dlmalloc_instrumented, DlMallocInstrumented)
805GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_dlmalloc_instrumented, DlMallocInstrumented)
806GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_dlmalloc_instrumented, DlMallocInstrumented)
807GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_dlmalloc_instrumented, DlMallocInstrumented)
808GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_dlmalloc_instrumented, DlMallocInstrumented)
809GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_dlmalloc_instrumented, DlMallocInstrumented)
810GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_dlmalloc_instrumented, DlMallocInstrumented)
811GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_dlmalloc_instrumented, DlMallocInstrumented)
812
813GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_rosalloc, RosAlloc)
814GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_rosalloc, RosAlloc)
815GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_rosalloc, RosAlloc)
816GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_rosalloc, RosAlloc)
817GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_rosalloc, RosAlloc)
818GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_rosalloc, RosAlloc)
819GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_rosalloc, RosAlloc)
820GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_rosalloc, RosAlloc)
821GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_rosalloc, RosAlloc)
822
823GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_rosalloc_instrumented, RosAllocInstrumented)
824GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_rosalloc_instrumented, RosAllocInstrumented)
825GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_rosalloc_instrumented, RosAllocInstrumented)
826GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_rosalloc_instrumented, RosAllocInstrumented)
827GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_rosalloc_instrumented, RosAllocInstrumented)
828GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_rosalloc_instrumented, RosAllocInstrumented)
829GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_rosalloc_instrumented, RosAllocInstrumented)
830GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_rosalloc_instrumented, RosAllocInstrumented)
831GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_rosalloc_instrumented, RosAllocInstrumented)
832
833GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_bump_pointer, BumpPointer)
834GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_bump_pointer, BumpPointer)
835GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_bump_pointer, BumpPointer)
836GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_bump_pointer, BumpPointer)
837GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_bump_pointer, BumpPointer)
838GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_bump_pointer, BumpPointer)
839GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_bump_pointer, BumpPointer)
840GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_bump_pointer, BumpPointer)
841GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_bump_pointer, BumpPointer)
842
843GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_bump_pointer_instrumented, BumpPointerInstrumented)
844GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_bump_pointer_instrumented, BumpPointerInstrumented)
845GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_bump_pointer_instrumented, BumpPointerInstrumented)
846GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_bump_pointer_instrumented, BumpPointerInstrumented)
847GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_bump_pointer_instrumented, BumpPointerInstrumented)
848GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_bump_pointer_instrumented, BumpPointerInstrumented)
849GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_bump_pointer_instrumented, BumpPointerInstrumented)
850GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_bump_pointer_instrumented, BumpPointerInstrumented)
851GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_bump_pointer_instrumented, BumpPointerInstrumented)
852
853GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_tlab, TLAB)
854GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_tlab, TLAB)
855GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_tlab, TLAB)
856GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_tlab, TLAB)
857GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_tlab, TLAB)
858GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_tlab, TLAB)
859GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_tlab, TLAB)
860GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_tlab, TLAB)
861GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_tlab, TLAB)
862
863GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_tlab_instrumented, TLABInstrumented)
864GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_tlab_instrumented, TLABInstrumented)
865GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_tlab_instrumented, TLABInstrumented)
866GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_tlab_instrumented, TLABInstrumented)
867GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_tlab_instrumented, TLABInstrumented)
868GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_tlab_instrumented, TLABInstrumented)
869GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_tlab_instrumented, TLABInstrumented)
870GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_tlab_instrumented, TLABInstrumented)
871GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_tlab_instrumented, TLABInstrumented)
872
873GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_region, Region)
874GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_region, Region)
875GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_region, Region)
876GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_region, Region)
877GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_region, Region)
878GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_region, Region)
879GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region, Region)
880GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_region, Region)
881GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region, Region)
882
883GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_region_instrumented, RegionInstrumented)
884GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_region_instrumented, RegionInstrumented)
885GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_region_instrumented, RegionInstrumented)
886GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_region_instrumented, RegionInstrumented)
887GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_region_instrumented, RegionInstrumented)
888GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_region_instrumented, RegionInstrumented)
889GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region_instrumented, RegionInstrumented)
890GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_region_instrumented, RegionInstrumented)
891GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region_instrumented, RegionInstrumented)
892
893GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_region_tlab, RegionTLAB)
894GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_region_tlab, RegionTLAB)
895GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_region_tlab, RegionTLAB)
896GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_region_tlab, RegionTLAB)
897GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_region_tlab, RegionTLAB)
898GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_region_tlab, RegionTLAB)
899GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region_tlab, RegionTLAB)
900GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_region_tlab, RegionTLAB)
901GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region_tlab, RegionTLAB)
902
903GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_region_tlab_instrumented, RegionTLABInstrumented)
904GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_region_tlab_instrumented, RegionTLABInstrumented)
905GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_region_tlab_instrumented, RegionTLABInstrumented)
906GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_region_tlab_instrumented, RegionTLABInstrumented)
907GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_region_tlab_instrumented, RegionTLABInstrumented)
908GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_region_tlab_instrumented, RegionTLABInstrumented)
909GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region_tlab_instrumented, RegionTLABInstrumented)
910GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_region_tlab_instrumented, RegionTLABInstrumented)
911GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region_tlab_instrumented, RegionTLABInstrumented)
912
913TWO_ARG_DOWNCALL art_quick_resolve_string, artResolveStringFromCode, RETURN_IF_RESULT_IS_NON_ZERO
914TWO_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO
915TWO_ARG_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode, RETURN_IF_RESULT_IS_NON_ZERO
916TWO_ARG_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode, RETURN_IF_RESULT_IS_NON_ZERO
917
918TWO_ARG_REF_DOWNCALL art_quick_handle_fill_data, artHandleFillArrayDataFromCode, RETURN_IF_EAX_ZERO
919
920DEFINE_FUNCTION art_quick_lock_object
921    testl %eax, %eax                      // null check object/eax
922    jz   .Lslow_lock
923.Lretry_lock:
924    movl MIRROR_OBJECT_LOCK_WORD_OFFSET(%eax), %ecx  // ecx := lock word
925    test LITERAL(LOCK_WORD_STATE_MASK), %ecx         // test the 2 high bits.
926    jne  .Lslow_lock                      // slow path if either of the two high bits are set.
927    movl %ecx, %edx                       // save lock word (edx) to keep read barrier bits.
928    andl LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED), %ecx  // zero the read barrier bits.
929    test %ecx, %ecx
930    jnz  .Lalready_thin                   // lock word contains a thin lock
931    // unlocked case - edx: original lock word, eax: obj.
932    movl %eax, %ecx                       // remember object in case of retry
933    movl %edx, %eax                       // eax: lock word zero except for read barrier bits.
934    movl %fs:THREAD_ID_OFFSET, %edx       // load thread id.
935    or   %eax, %edx                       // edx: thread id with count of 0 + read barrier bits.
936    lock cmpxchg  %edx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%ecx)  // eax: old val, edx: new val.
937    jnz  .Llock_cmpxchg_fail              // cmpxchg failed retry
938    ret
939.Lalready_thin:  // edx: lock word (with high 2 bits zero and original rb bits), eax: obj.
940    movl %fs:THREAD_ID_OFFSET, %ecx       // ecx := thread id
941    cmpw %cx, %dx                         // do we hold the lock already?
942    jne  .Lslow_lock
943    movl %edx, %ecx                       // copy the lock word to check count overflow.
944    andl LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED), %ecx  // zero the read barrier bits.
945    addl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %ecx  // increment recursion count for overflow check.
946    test LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK), %ecx  // overflowed if either of the upper two bits (28-29) are set.
947    jne  .Lslow_lock                      // count overflowed so go slow
948    movl %eax, %ecx                       // save obj to use eax for cmpxchg.
949    movl %edx, %eax                       // copy the lock word as the old val for cmpxchg.
950    addl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %edx  // increment recursion count again for real.
951    // update lockword, cmpxchg necessary for read barrier bits.
952    lock cmpxchg  %edx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%ecx)  // eax: old val, edx: new val.
953    jnz  .Llock_cmpxchg_fail              // cmpxchg failed retry
954    ret
955.Llock_cmpxchg_fail:
956    movl  %ecx, %eax                      // restore eax
957    jmp  .Lretry_lock
958.Lslow_lock:
959    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME  ebx, ebx  // save ref containing registers for GC
960    // Outgoing argument set up
961    subl LITERAL(8), %esp         // alignment padding
962    CFI_ADJUST_CFA_OFFSET(8)
963    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
964    CFI_ADJUST_CFA_OFFSET(4)
965    PUSH eax                      // pass object
966    call SYMBOL(artLockObjectFromCode)  // artLockObjectFromCode(object, Thread*)
967    addl LITERAL(16), %esp  // pop arguments
968    CFI_ADJUST_CFA_OFFSET(-16)
969    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME  // restore frame up to return address
970    RETURN_IF_EAX_ZERO
971END_FUNCTION art_quick_lock_object
972
973DEFINE_FUNCTION art_quick_unlock_object
974    testl %eax, %eax                      // null check object/eax
975    jz   .Lslow_unlock
976.Lretry_unlock:
977    movl MIRROR_OBJECT_LOCK_WORD_OFFSET(%eax), %ecx  // ecx := lock word
978    movl %fs:THREAD_ID_OFFSET, %edx       // edx := thread id
979    test LITERAL(LOCK_WORD_STATE_MASK), %ecx
980    jnz  .Lslow_unlock                    // lock word contains a monitor
981    cmpw %cx, %dx                         // does the thread id match?
982    jne  .Lslow_unlock
983    movl %ecx, %edx                       // copy the lock word to detect new count of 0.
984    andl LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED), %edx  // zero the read barrier bits.
985    cmpl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %edx
986    jae  .Lrecursive_thin_unlock
987    // update lockword, cmpxchg necessary for read barrier bits.
988    movl %eax, %edx                       // edx: obj
989    movl %ecx, %eax                       // eax: old lock word.
990    andl LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK), %ecx  // ecx: new lock word zero except original rb bits.
991#ifndef USE_READ_BARRIER
992    movl %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edx)
993#else
994    lock cmpxchg  %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edx)  // eax: old val, ecx: new val.
995    jnz  .Lunlock_cmpxchg_fail            // cmpxchg failed retry
996#endif
997    ret
998.Lrecursive_thin_unlock:  // ecx: original lock word, eax: obj
999    // update lockword, cmpxchg necessary for read barrier bits.
1000    movl %eax, %edx                       // edx: obj
1001    movl %ecx, %eax                       // eax: old lock word.
1002    subl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %ecx  // ecx: new lock word with decremented count.
1003#ifndef USE_READ_BARRIER
1004    mov  %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edx)
1005#else
1006    lock cmpxchg  %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edx)  // eax: old val, ecx: new val.
1007    jnz  .Lunlock_cmpxchg_fail            // cmpxchg failed retry
1008#endif
1009    ret
1010.Lunlock_cmpxchg_fail:  // edx: obj
1011    movl %edx, %eax                       // restore eax
1012    jmp  .Lretry_unlock
1013.Lslow_unlock:
1014    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME  ebx, ebx  // save ref containing registers for GC
1015    // Outgoing argument set up
1016    subl LITERAL(8), %esp         // alignment padding
1017    CFI_ADJUST_CFA_OFFSET(8)
1018    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1019    CFI_ADJUST_CFA_OFFSET(4)
1020    PUSH eax                      // pass object
1021    call SYMBOL(artUnlockObjectFromCode)  // artUnlockObjectFromCode(object, Thread*)
1022    addl LITERAL(16), %esp  // pop arguments
1023    CFI_ADJUST_CFA_OFFSET(-16)
1024    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME  // restore frame up to return address
1025    RETURN_IF_EAX_ZERO
1026END_FUNCTION art_quick_unlock_object
1027
1028DEFINE_FUNCTION art_quick_is_assignable
1029    PUSH eax                     // alignment padding
1030    PUSH ecx                     // pass arg2 - obj->klass
1031    PUSH eax                     // pass arg1 - checked class
1032    call SYMBOL(artIsAssignableFromCode)  // (Class* klass, Class* ref_klass)
1033    addl LITERAL(12), %esp        // pop arguments
1034    CFI_ADJUST_CFA_OFFSET(-12)
1035    ret
1036END_FUNCTION art_quick_is_assignable
1037
1038DEFINE_FUNCTION art_quick_check_cast
1039    PUSH eax                     // alignment padding
1040    PUSH ecx                     // pass arg2 - obj->klass
1041    PUSH eax                     // pass arg1 - checked class
1042    call SYMBOL(artIsAssignableFromCode)  // (Class* klass, Class* ref_klass)
1043    testl %eax, %eax
1044    jz 1f                         // jump forward if not assignable
1045    addl LITERAL(12), %esp        // pop arguments
1046    CFI_ADJUST_CFA_OFFSET(-12)
1047    ret
10481:
1049    POP eax                       // pop arguments
1050    POP ecx
1051    addl LITERAL(4), %esp
1052    CFI_ADJUST_CFA_OFFSET(-12)
1053    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME  ebx, ebx  // save all registers as basis for long jump context
1054    // Outgoing argument set up
1055    PUSH eax                      // alignment padding
1056    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1057    CFI_ADJUST_CFA_OFFSET(4)
1058    PUSH ecx                      // pass arg2
1059    PUSH eax                      // pass arg1
1060    call SYMBOL(artThrowClassCastException) // (Class* a, Class* b, Thread*)
1061    int3                          // unreached
1062END_FUNCTION art_quick_check_cast
1063
1064    /*
1065     * Entry from managed code for array put operations of objects where the value being stored
1066     * needs to be checked for compatibility.
1067     * eax = array, ecx = index, edx = value
1068     */
1069DEFINE_FUNCTION art_quick_aput_obj_with_null_and_bound_check
1070    testl %eax, %eax
1071    jnz SYMBOL(art_quick_aput_obj_with_bound_check)
1072    jmp SYMBOL(art_quick_throw_null_pointer_exception)
1073END_FUNCTION art_quick_aput_obj_with_null_and_bound_check
1074
1075DEFINE_FUNCTION art_quick_aput_obj_with_bound_check
1076    movl MIRROR_ARRAY_LENGTH_OFFSET(%eax), %ebx
1077    cmpl %ebx, %ecx
1078    jb SYMBOL(art_quick_aput_obj)
1079    mov %ecx, %eax
1080    mov %ebx, %ecx
1081    jmp SYMBOL(art_quick_throw_array_bounds)
1082END_FUNCTION art_quick_aput_obj_with_bound_check
1083
1084DEFINE_FUNCTION art_quick_aput_obj
1085    test %edx, %edx              // store of null
1086    jz .Ldo_aput_null
1087    movl MIRROR_OBJECT_CLASS_OFFSET(%eax), %ebx
1088    movl MIRROR_CLASS_COMPONENT_TYPE_OFFSET(%ebx), %ebx
1089    // value's type == array's component type - trivial assignability
1090    cmpl MIRROR_OBJECT_CLASS_OFFSET(%edx), %ebx
1091    jne .Lcheck_assignability
1092.Ldo_aput:
1093    movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4)
1094    movl %fs:THREAD_CARD_TABLE_OFFSET, %edx
1095    shrl LITERAL(7), %eax
1096    movb %dl, (%edx, %eax)
1097    ret
1098.Ldo_aput_null:
1099    movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4)
1100    ret
1101.Lcheck_assignability:
1102    PUSH eax                     // save arguments
1103    PUSH ecx
1104    PUSH edx
1105    subl LITERAL(8), %esp        // alignment padding
1106    CFI_ADJUST_CFA_OFFSET(8)
1107    pushl MIRROR_OBJECT_CLASS_OFFSET(%edx)  // pass arg2 - type of the value to be stored
1108    CFI_ADJUST_CFA_OFFSET(4)
1109    PUSH ebx                     // pass arg1 - component type of the array
1110    call SYMBOL(artIsAssignableFromCode)  // (Class* a, Class* b)
1111    addl LITERAL(16), %esp       // pop arguments
1112    CFI_ADJUST_CFA_OFFSET(-16)
1113    testl %eax, %eax
1114    jz   .Lthrow_array_store_exception
1115    POP  edx
1116    POP  ecx
1117    POP  eax
1118    movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4)  // do the aput
1119    movl %fs:THREAD_CARD_TABLE_OFFSET, %edx
1120    shrl LITERAL(7), %eax
1121    movb %dl, (%edx, %eax)
1122    ret
1123.Lthrow_array_store_exception:
1124    POP  edx
1125    POP  ecx
1126    POP  eax
1127    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx // save all registers as basis for long jump context
1128    // Outgoing argument set up
1129    PUSH eax                      // alignment padding
1130    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1131    CFI_ADJUST_CFA_OFFSET(4)
1132    PUSH edx                      // pass arg2 - value
1133    PUSH eax                      // pass arg1 - array
1134    call SYMBOL(artThrowArrayStoreException) // (array, value, Thread*)
1135    int3                          // unreached
1136END_FUNCTION art_quick_aput_obj
1137
1138DEFINE_FUNCTION art_quick_memcpy
1139    SETUP_GOT_NOSAVE ebx          // clobbers EBX
1140    PUSH edx                      // pass arg3
1141    PUSH ecx                      // pass arg2
1142    PUSH eax                      // pass arg1
1143    call PLT_SYMBOL(memcpy)       // (void*, const void*, size_t)
1144    addl LITERAL(12), %esp        // pop arguments
1145    CFI_ADJUST_CFA_OFFSET(-12)
1146    ret
1147END_FUNCTION art_quick_memcpy
1148
1149NO_ARG_DOWNCALL art_quick_test_suspend, artTestSuspendFromCode, ret
1150
1151DEFINE_FUNCTION art_quick_d2l
1152    subl LITERAL(12), %esp        // alignment padding, room for argument
1153    CFI_ADJUST_CFA_OFFSET(12)
1154    movsd %xmm0, 0(%esp)          // arg a
1155    call SYMBOL(art_d2l)          // (jdouble a)
1156    addl LITERAL(12), %esp        // pop arguments
1157    CFI_ADJUST_CFA_OFFSET(-12)
1158    ret
1159END_FUNCTION art_quick_d2l
1160
1161DEFINE_FUNCTION art_quick_f2l
1162    subl LITERAL(12), %esp        // alignment padding
1163    CFI_ADJUST_CFA_OFFSET(12)
1164    movss %xmm0, 0(%esp)          // arg a
1165    call SYMBOL(art_f2l)          // (jfloat a)
1166    addl LITERAL(12), %esp        // pop arguments
1167    CFI_ADJUST_CFA_OFFSET(-12)
1168    ret
1169END_FUNCTION art_quick_f2l
1170
1171DEFINE_FUNCTION art_quick_ldiv
1172    subl LITERAL(12), %esp       // alignment padding
1173    CFI_ADJUST_CFA_OFFSET(12)
1174    PUSH ebx                     // pass arg4 b.hi
1175    PUSH edx                     // pass arg3 b.lo
1176    PUSH ecx                     // pass arg2 a.hi
1177    PUSH eax                     // pass arg1 a.lo
1178    call SYMBOL(artLdiv)     // (jlong a, jlong b)
1179    addl LITERAL(28), %esp       // pop arguments
1180    CFI_ADJUST_CFA_OFFSET(-28)
1181    ret
1182END_FUNCTION art_quick_ldiv
1183
1184DEFINE_FUNCTION art_quick_lmod
1185    subl LITERAL(12), %esp       // alignment padding
1186    CFI_ADJUST_CFA_OFFSET(12)
1187    PUSH ebx                     // pass arg4 b.hi
1188    PUSH edx                     // pass arg3 b.lo
1189    PUSH ecx                     // pass arg2 a.hi
1190    PUSH eax                     // pass arg1 a.lo
1191    call SYMBOL(artLmod)     // (jlong a, jlong b)
1192    addl LITERAL(28), %esp       // pop arguments
1193    CFI_ADJUST_CFA_OFFSET(-28)
1194    ret
1195END_FUNCTION art_quick_lmod
1196
1197DEFINE_FUNCTION art_quick_lmul
1198    imul %eax, %ebx              // ebx = a.lo(eax) * b.hi(ebx)
1199    imul %edx, %ecx              // ecx = b.lo(edx) * a.hi(ecx)
1200    mul  %edx                    // edx:eax = a.lo(eax) * b.lo(edx)
1201    add  %ebx, %ecx
1202    add  %ecx, %edx              // edx += (a.lo * b.hi) + (b.lo * a.hi)
1203    ret
1204END_FUNCTION art_quick_lmul
1205
1206DEFINE_FUNCTION art_quick_lshl
1207    // ecx:eax << edx
1208    xchg %edx, %ecx
1209    shld %cl,%eax,%edx
1210    shl  %cl,%eax
1211    test LITERAL(32), %cl
1212    jz  1f
1213    mov %eax, %edx
1214    xor %eax, %eax
12151:
1216    ret
1217END_FUNCTION art_quick_lshl
1218
1219DEFINE_FUNCTION art_quick_lshr
1220    // ecx:eax >> edx
1221    xchg %edx, %ecx
1222    shrd %cl,%edx,%eax
1223    sar  %cl,%edx
1224    test LITERAL(32),%cl
1225    jz  1f
1226    mov %edx, %eax
1227    sar LITERAL(31), %edx
12281:
1229    ret
1230END_FUNCTION art_quick_lshr
1231
1232DEFINE_FUNCTION art_quick_lushr
1233    // ecx:eax >>> edx
1234    xchg %edx, %ecx
1235    shrd %cl,%edx,%eax
1236    shr  %cl,%edx
1237    test LITERAL(32),%cl
1238    jz  1f
1239    mov %edx, %eax
1240    xor %edx, %edx
12411:
1242    ret
1243END_FUNCTION art_quick_lushr
1244
1245ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1246ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1247ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1248ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1249ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1250ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1251ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1252
1253TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1254TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1255TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1256TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1257TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1258TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1259TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1260
1261TWO_ARG_REF_DOWNCALL art_quick_set8_static, artSet8StaticFromCode, RETURN_IF_EAX_ZERO
1262TWO_ARG_REF_DOWNCALL art_quick_set16_static, artSet16StaticFromCode, RETURN_IF_EAX_ZERO
1263TWO_ARG_REF_DOWNCALL art_quick_set32_static, artSet32StaticFromCode, RETURN_IF_EAX_ZERO
1264TWO_ARG_REF_DOWNCALL art_quick_set_obj_static, artSetObjStaticFromCode, RETURN_IF_EAX_ZERO
1265
1266THREE_ARG_REF_DOWNCALL art_quick_set8_instance, artSet8InstanceFromCode, RETURN_IF_EAX_ZERO
1267THREE_ARG_REF_DOWNCALL art_quick_set16_instance, artSet16InstanceFromCode, RETURN_IF_EAX_ZERO
1268THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCode, RETURN_IF_EAX_ZERO
1269THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCode, RETURN_IF_EAX_ZERO
1270
1271// Call artSet64InstanceFromCode with 4 word size arguments and the referrer.
1272DEFINE_FUNCTION art_quick_set64_instance
1273    movd %ebx, %xmm0
1274    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx  // save ref containing registers for GC
1275    movd %xmm0, %ebx
1276    // Outgoing argument set up
1277    subl LITERAL(8), %esp         // alignment padding
1278    CFI_ADJUST_CFA_OFFSET(8)
1279    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1280    CFI_ADJUST_CFA_OFFSET(4)
1281    pushl (FRAME_SIZE_REFS_ONLY_CALLEE_SAVE+12)(%esp)  // pass referrer
1282    CFI_ADJUST_CFA_OFFSET(4)
1283    PUSH ebx                      // pass high half of new_val
1284    PUSH edx                      // pass low half of new_val
1285    PUSH ecx                      // pass object
1286    PUSH eax                      // pass field_idx
1287    call SYMBOL(artSet64InstanceFromCode)  // (field_idx, Object*, new_val, referrer, Thread*)
1288    addl LITERAL(32), %esp        // pop arguments
1289    CFI_ADJUST_CFA_OFFSET(-32)
1290    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME     // restore frame up to return address
1291    RETURN_IF_EAX_ZERO            // return or deliver exception
1292END_FUNCTION art_quick_set64_instance
1293
1294// Call artSet64StaticFromCode with 3 word size arguments plus with the referrer in the 2nd position
1295// so that new_val is aligned on even registers were we passing arguments in registers.
1296DEFINE_FUNCTION art_quick_set64_static
1297    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME  ebx, ebx  // save ref containing registers for GC
1298    mov FRAME_SIZE_REFS_ONLY_CALLEE_SAVE(%esp), %ebx  // get referrer
1299    subl LITERAL(12), %esp        // alignment padding
1300    CFI_ADJUST_CFA_OFFSET(12)
1301    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1302    CFI_ADJUST_CFA_OFFSET(4)
1303    PUSH edx                      // pass high half of new_val
1304    PUSH ecx                      // pass low half of new_val
1305    PUSH ebx                      // pass referrer
1306    PUSH eax                      // pass field_idx
1307    call SYMBOL(artSet64StaticFromCode)  // (field_idx, referrer, new_val, Thread*)
1308    addl LITERAL(32), %esp        // pop arguments
1309    CFI_ADJUST_CFA_OFFSET(-32)
1310    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME  // restore frame up to return address
1311    RETURN_IF_EAX_ZERO            // return or deliver exception
1312END_FUNCTION art_quick_set64_static
1313
1314DEFINE_FUNCTION art_quick_proxy_invoke_handler
1315    SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_EAX
1316    PUSH esp                      // pass SP
1317    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1318    CFI_ADJUST_CFA_OFFSET(4)
1319    PUSH ecx                      // pass receiver
1320    PUSH eax                      // pass proxy method
1321    call SYMBOL(artQuickProxyInvokeHandler) // (proxy method, receiver, Thread*, SP)
1322    movd %eax, %xmm0              // place return value also into floating point return value
1323    movd %edx, %xmm1
1324    punpckldq %xmm1, %xmm0
1325    addl LITERAL(76), %esp        // pop arguments
1326    CFI_ADJUST_CFA_OFFSET(-76)
1327    RETURN_OR_DELIVER_PENDING_EXCEPTION    // return or deliver exception
1328END_FUNCTION art_quick_proxy_invoke_handler
1329
1330    /*
1331     * Called to resolve an imt conflict. xmm0 is a hidden argument that holds the target method's
1332     * dex method index.
1333     */
1334DEFINE_FUNCTION art_quick_imt_conflict_trampoline
1335    PUSH ecx
1336    movl 8(%esp), %eax            // load caller Method*
1337    movl MIRROR_ART_METHOD_DEX_CACHE_METHODS_OFFSET(%eax), %eax  // load dex_cache_resolved_methods
1338    movd %xmm7, %ecx              // get target method index stored in xmm0
1339    movl MIRROR_OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4), %eax  // load the target method
1340    POP ecx
1341    jmp SYMBOL(art_quick_invoke_interface_trampoline)
1342END_FUNCTION art_quick_imt_conflict_trampoline
1343
1344DEFINE_FUNCTION art_quick_resolution_trampoline
1345    SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME ebx, ebx
1346    movl %esp, %edi
1347    PUSH EDI                      // pass SP. do not just PUSH ESP; that messes up unwinding
1348    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1349    CFI_ADJUST_CFA_OFFSET(4)
1350    PUSH ecx                      // pass receiver
1351    PUSH eax                      // pass method
1352    call SYMBOL(artQuickResolutionTrampoline) // (Method* called, receiver, Thread*, SP)
1353    movl %eax, %edi               // remember code pointer in EDI
1354    addl LITERAL(16), %esp        // pop arguments
1355    test %eax, %eax               // if code pointer is null goto deliver pending exception
1356    jz 1f
1357    RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME_AND_JUMP
13581:
1359    RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
1360    DELIVER_PENDING_EXCEPTION
1361END_FUNCTION art_quick_resolution_trampoline
1362
1363DEFINE_FUNCTION art_quick_generic_jni_trampoline
1364    SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_EAX
1365    movl %esp, %ebp                 // save SP at callee-save frame
1366    CFI_DEF_CFA_REGISTER(ebp)
1367    subl LITERAL(5120), %esp
1368    // prepare for artQuickGenericJniTrampoline call
1369    // (Thread*,  SP)
1370    //  (esp)    4(esp)   <= C calling convention
1371    //  fs:...    ebp     <= where they are
1372
1373    subl LITERAL(8), %esp         // Padding for 16B alignment.
1374    pushl %ebp                    // Pass SP (to ArtMethod).
1375    pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current().
1376    call SYMBOL(artQuickGenericJniTrampoline)  // (Thread*, sp)
1377
1378    // The C call will have registered the complete save-frame on success.
1379    // The result of the call is:
1380    // eax: pointer to native code, 0 on error.
1381    // edx: pointer to the bottom of the used area of the alloca, can restore stack till there.
1382
1383    // Check for error = 0.
1384    test %eax, %eax
1385    jz .Lexception_in_native
1386
1387    // Release part of the alloca.
1388    movl %edx, %esp
1389
1390    // On x86 there are no registers passed, so nothing to pop here.
1391    // Native call.
1392    call *%eax
1393
1394    // result sign extension is handled in C code
1395    // prepare for artQuickGenericJniEndTrampoline call
1396    // (Thread*, result, result_f)
1397    //  (esp)    4(esp)  12(esp)    <= C calling convention
1398    //  fs:...  eax:edx   fp0      <= where they are
1399
1400    subl LITERAL(20), %esp         // Padding & pass float result.
1401    fstpl (%esp)
1402    pushl %edx                    // Pass int result.
1403    pushl %eax
1404    pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current().
1405    call SYMBOL(artQuickGenericJniEndTrampoline)
1406
1407    // Pending exceptions possible.
1408    mov %fs:THREAD_EXCEPTION_OFFSET, %ebx
1409    testl %ebx, %ebx
1410    jnz .Lexception_in_native
1411
1412    // Tear down the alloca.
1413    movl %ebp, %esp
1414    CFI_DEF_CFA_REGISTER(esp)
1415
1416
1417    // Tear down the callee-save frame.
1418    // Remove space for FPR args and EAX
1419    addl LITERAL(4 + 4 * 8), %esp
1420    CFI_ADJUST_CFA_OFFSET(-(4 + 4 * 8))
1421
1422    POP ecx
1423    addl LITERAL(4), %esp     // Avoid edx, as it may be part of the result.
1424    CFI_ADJUST_CFA_OFFSET(-4)
1425    POP ebx
1426    POP ebp  // Restore callee saves
1427    POP esi
1428    POP edi
1429    // Quick expects the return value to be in xmm0.
1430    movd %eax, %xmm0
1431    movd %edx, %xmm1
1432    punpckldq %xmm1, %xmm0
1433    ret
1434.Lexception_in_native:
1435    movl %fs:THREAD_TOP_QUICK_FRAME_OFFSET, %esp
1436    // Do a call to push a new save-all frame required by the runtime.
1437    call .Lexception_call
1438.Lexception_call:
1439    DELIVER_PENDING_EXCEPTION
1440END_FUNCTION art_quick_generic_jni_trampoline
1441
1442DEFINE_FUNCTION art_quick_to_interpreter_bridge
1443    SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME  ebx, ebx  // save frame
1444    mov %esp, %edx                // remember SP
1445    PUSH eax                      // alignment padding
1446    PUSH edx                      // pass SP
1447    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1448    CFI_ADJUST_CFA_OFFSET(4)
1449    PUSH eax                      // pass  method
1450    call SYMBOL(artQuickToInterpreterBridge)  // (method, Thread*, SP)
1451    addl LITERAL(16), %esp        // pop arguments
1452    CFI_ADJUST_CFA_OFFSET(-16)
1453
1454    // Return eax:edx in xmm0 also.
1455    movd %eax, %xmm0
1456    movd %edx, %xmm1
1457    punpckldq %xmm1, %xmm0
1458
1459    addl LITERAL(48), %esp        // Remove FPRs and EAX, ECX, EDX, EBX.
1460    CFI_ADJUST_CFA_OFFSET(-48)
1461
1462    POP ebp  // Restore callee saves
1463    POP esi
1464    POP edi
1465
1466    RETURN_OR_DELIVER_PENDING_EXCEPTION    // return or deliver exception
1467END_FUNCTION art_quick_to_interpreter_bridge
1468
1469    /*
1470     * Routine that intercepts method calls and returns.
1471     */
1472DEFINE_FUNCTION art_quick_instrumentation_entry
1473    SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME ebx, edx
1474    PUSH eax                      // Save eax which will be clobbered by the callee-save method.
1475    subl LITERAL(12), %esp        // Align stack.
1476    CFI_ADJUST_CFA_OFFSET(12)
1477    pushl FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE-4+16(%esp)  // Pass LR.
1478    CFI_ADJUST_CFA_OFFSET(4)
1479    pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current().
1480    CFI_ADJUST_CFA_OFFSET(4)
1481    PUSH ecx                      // Pass receiver.
1482    PUSH eax                      // Pass Method*.
1483    call SYMBOL(artInstrumentationMethodEntryFromCode) // (Method*, Object*, Thread*, LR)
1484    addl LITERAL(28), %esp        // Pop arguments upto saved Method*.
1485    movl 60(%esp), %edi           // Restore edi.
1486    movl %eax, 60(%esp)           // Place code* over edi, just under return pc.
1487    movl SYMBOL(art_quick_instrumentation_exit)@GOT(%ebx), %ebx
1488    // Place instrumentation exit as return pc. ebx holds the GOT computed on entry.
1489    movl %ebx, 64(%esp)
1490    movl 0(%esp), %eax           // Restore eax.
1491    // Restore FPRs (extra 4 bytes of offset due to EAX push at top).
1492    movsd 8(%esp), %xmm0
1493    movsd 16(%esp), %xmm1
1494    movsd 24(%esp), %xmm2
1495    movsd 32(%esp), %xmm3
1496
1497    // Restore GPRs.
1498    movl 40(%esp), %ecx           // Restore ecx.
1499    movl 44(%esp), %edx           // Restore edx.
1500    movl 48(%esp), %ebx           // Restore ebx.
1501    movl 52(%esp), %ebp           // Restore ebp.
1502    movl 56(%esp), %esi           // Restore esi.
1503    addl LITERAL(60), %esp        // Wind stack back upto code*.
1504    ret                           // Call method (and pop).
1505END_FUNCTION art_quick_instrumentation_entry
1506
1507DEFINE_FUNCTION art_quick_instrumentation_exit
1508    pushl LITERAL(0)              // Push a fake return PC as there will be none on the stack.
1509    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx
1510    mov  %esp, %ecx               // Remember SP
1511    subl LITERAL(8), %esp         // Save float return value.
1512    CFI_ADJUST_CFA_OFFSET(8)
1513    movq %xmm0, (%esp)
1514    PUSH edx                      // Save gpr return value.
1515    PUSH eax
1516    subl LITERAL(16), %esp        // Align stack
1517    CFI_ADJUST_CFA_OFFSET(16)
1518    movq %xmm0, (%esp)            // Pass float return value.
1519    PUSH edx                      // Pass gpr return value.
1520    PUSH eax
1521    PUSH ecx                      // Pass SP.
1522    pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current.
1523    CFI_ADJUST_CFA_OFFSET(4)
1524    call SYMBOL(artInstrumentationMethodExitFromCode)  // (Thread*, SP, gpr_result, fpr_result)
1525    mov   %eax, %ecx              // Move returned link register.
1526    addl LITERAL(32), %esp        // Pop arguments.
1527    CFI_ADJUST_CFA_OFFSET(-32)
1528    movl %edx, %ebx               // Move returned link register for deopt
1529                                  // (ebx is pretending to be our LR).
1530    POP eax                       // Restore gpr return value.
1531    POP edx
1532    movq (%esp), %xmm0            // Restore fpr return value.
1533    addl LITERAL(8), %esp
1534    CFI_ADJUST_CFA_OFFSET(-8)
1535    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
1536    addl LITERAL(4), %esp         // Remove fake return pc.
1537    jmp   *%ecx                   // Return.
1538END_FUNCTION art_quick_instrumentation_exit
1539
1540    /*
1541     * Instrumentation has requested that we deoptimize into the interpreter. The deoptimization
1542     * will long jump to the upcall with a special exception of -1.
1543     */
1544DEFINE_FUNCTION art_quick_deoptimize
1545    pushl %ebx                    // Entry point for a jump. Fake that we were called.
1546.globl SYMBOL(art_quick_deoptimize_from_compiled_slow_path)  // Entry point for real calls
1547                                                             // from compiled slow paths.
1548SYMBOL(art_quick_deoptimize_from_compiled_slow_path):
1549    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx
1550    subl LITERAL(12), %esp        // Align stack.
1551    CFI_ADJUST_CFA_OFFSET(12)
1552    pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current().
1553    CFI_ADJUST_CFA_OFFSET(4)
1554    call SYMBOL(artDeoptimize)    // artDeoptimize(Thread*)
1555    int3                          // Unreachable.
1556END_FUNCTION art_quick_deoptimize
1557
1558    /*
1559     * String's compareTo.
1560     *
1561     * On entry:
1562     *    eax:   this string object (known non-null)
1563     *    ecx:   comp string object (known non-null)
1564     */
1565DEFINE_FUNCTION art_quick_string_compareto
1566    PUSH esi                    // push callee save reg
1567    PUSH edi                    // push callee save reg
1568    mov MIRROR_STRING_COUNT_OFFSET(%eax), %edx
1569    mov MIRROR_STRING_COUNT_OFFSET(%ecx), %ebx
1570    mov MIRROR_STRING_VALUE_OFFSET(%eax), %esi
1571    mov MIRROR_STRING_VALUE_OFFSET(%ecx), %edi
1572    mov MIRROR_STRING_OFFSET_OFFSET(%eax), %eax
1573    mov MIRROR_STRING_OFFSET_OFFSET(%ecx), %ecx
1574    /* Build pointers to the start of string data */
1575    lea  MIRROR_CHAR_ARRAY_DATA_OFFSET(%esi, %eax, 2), %esi
1576    lea  MIRROR_CHAR_ARRAY_DATA_OFFSET(%edi, %ecx, 2), %edi
1577    /* Calculate min length and count diff */
1578    mov   %edx, %ecx
1579    mov   %edx, %eax
1580    subl  %ebx, %eax
1581    cmovg %ebx, %ecx
1582    /*
1583     * At this point we have:
1584     *   eax: value to return if first part of strings are equal
1585     *   ecx: minimum among the lengths of the two strings
1586     *   esi: pointer to this string data
1587     *   edi: pointer to comp string data
1588     */
1589    jecxz .Lkeep_length
1590    repe cmpsw                    // find nonmatching chars in [%esi] and [%edi], up to length %ecx
1591    jne .Lnot_equal
1592.Lkeep_length:
1593    POP edi                       // pop callee save reg
1594    POP esi                       // pop callee save reg
1595    ret
1596    .balign 16
1597.Lnot_equal:
1598    movzwl  -2(%esi), %eax        // get last compared char from this string
1599    movzwl  -2(%edi), %ecx        // get last compared char from comp string
1600    subl  %ecx, %eax              // return the difference
1601    POP edi                       // pop callee save reg
1602    POP esi                       // pop callee save reg
1603    ret
1604END_FUNCTION art_quick_string_compareto
1605
1606// Return from a nested signal:
1607// Entry:
1608//  eax: address of jmp_buf in TLS
1609
1610DEFINE_FUNCTION art_nested_signal_return
1611    SETUP_GOT_NOSAVE ebx            // sets %ebx for call into PLT
1612    movl LITERAL(1), %ecx
1613    pushl %ecx                      // second arg to longjmp (1)
1614    pushl %eax                      // first arg to longjmp (jmp_buf)
1615    call PLT_SYMBOL(longjmp)
1616    int3                            // won't get here.
1617END_FUNCTION art_nested_signal_return
1618
1619    // TODO: implement these!
1620UNIMPLEMENTED art_quick_memcmp16
1621