1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_x86.S"
18
19#include "arch/quick_alloc_entrypoints.S"
20
21// For x86, the CFA is esp+4, the address above the pushed return address on the stack.
22
23    /*
24     * Macro that sets up the callee save frame to conform with
25     * Runtime::CreateCalleeSaveMethod(kSaveAll)
26     */
27MACRO2(SETUP_SAVE_ALL_CALLEE_SAVE_FRAME, got_reg, temp_reg)
28    PUSH edi  // Save callee saves (ebx is saved/restored by the upcall)
29    PUSH esi
30    PUSH ebp
31    subl MACRO_LITERAL(12), %esp  // Grow stack by 3 words.
32    CFI_ADJUST_CFA_OFFSET(12)
33    SETUP_GOT_NOSAVE RAW_VAR(got_reg)
34    // Load Runtime::instance_ from GOT.
35    movl SYMBOL(_ZN3art7Runtime9instance_E)@GOT(REG_VAR(got_reg)), REG_VAR(temp_reg)
36    movl (REG_VAR(temp_reg)), REG_VAR(temp_reg)
37    // Push save all callee-save method.
38    pushl RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET(REG_VAR(temp_reg))
39    CFI_ADJUST_CFA_OFFSET(4)
40    // Store esp as the top quick frame.
41    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
42    // Ugly compile-time check, but we only have the preprocessor.
43    // Last +4: implicit return address pushed on stack when caller made call.
44#if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVE != 3*4 + 16 + 4)
45#error "SAVE_ALL_CALLEE_SAVE_FRAME(X86) size not as expected."
46#endif
47END_MACRO
48
49    /*
50     * Macro that sets up the callee save frame to conform with
51     * Runtime::CreateCalleeSaveMethod(kRefsOnly)
52     */
53MACRO2(SETUP_REFS_ONLY_CALLEE_SAVE_FRAME, got_reg, temp_reg)
54    PUSH edi  // Save callee saves (ebx is saved/restored by the upcall)
55    PUSH esi
56    PUSH ebp
57    subl MACRO_LITERAL(12), %esp  // Grow stack by 3 words.
58    CFI_ADJUST_CFA_OFFSET(12)
59    SETUP_GOT_NOSAVE RAW_VAR(got_reg)
60    // Load Runtime::instance_ from GOT.
61    movl SYMBOL(_ZN3art7Runtime9instance_E)@GOT(REG_VAR(got_reg)), REG_VAR(temp_reg)
62    movl (REG_VAR(temp_reg)), REG_VAR(temp_reg)
63    // Push save all callee-save method.
64    pushl RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET(REG_VAR(temp_reg))
65    CFI_ADJUST_CFA_OFFSET(4)
66    // Store esp as the top quick frame.
67    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
68
69    // Ugly compile-time check, but we only have the preprocessor.
70    // Last +4: implicit return address pushed on stack when caller made call.
71#if (FRAME_SIZE_REFS_ONLY_CALLEE_SAVE != 3*4 + 16 + 4)
72#error "REFS_ONLY_CALLEE_SAVE_FRAME(X86) size not as expected."
73#endif
74END_MACRO
75
76    /*
77     * Macro that sets up the callee save frame to conform with
78     * Runtime::CreateCalleeSaveMethod(kRefsOnly)
79     * and preserves the value of got_reg at entry.
80     */
81MACRO2(SETUP_REFS_ONLY_CALLEE_SAVE_FRAME_PRESERVE_GOT_REG, got_reg, temp_reg)
82    PUSH edi  // Save callee saves (ebx is saved/restored by the upcall)
83    PUSH esi
84    PUSH ebp
85    pushl REG_VAR(got_reg)  // Save got_reg
86    subl MACRO_LITERAL(8), %esp  // Grow stack by 2 words.
87    CFI_ADJUST_CFA_OFFSET(8)
88
89    SETUP_GOT_NOSAVE RAW_VAR(got_reg)
90    // Load Runtime::instance_ from GOT.
91    movl SYMBOL(_ZN3art7Runtime9instance_E)@GOT(REG_VAR(got_reg)), REG_VAR(temp_reg)
92    movl (REG_VAR(temp_reg)), REG_VAR(temp_reg)
93    // Push save all callee-save method.
94    pushl RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET(REG_VAR(temp_reg))
95    CFI_ADJUST_CFA_OFFSET(4)
96    // Store esp as the top quick frame.
97    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
98    // Restore got_reg.
99    movl 12(%esp), REG_VAR(got_reg)
100
101    // Ugly compile-time check, but we only have the preprocessor.
102    // Last +4: implicit return address pushed on stack when caller made call.
103#if (FRAME_SIZE_REFS_ONLY_CALLEE_SAVE != 3*4 + 16 + 4)
104#error "REFS_ONLY_CALLEE_SAVE_FRAME(X86) size not as expected."
105#endif
106END_MACRO
107
108MACRO0(RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME)
109    addl MACRO_LITERAL(16), %esp  // Unwind stack up to saved values
110    CFI_ADJUST_CFA_OFFSET(-16)
111    POP ebp  // Restore callee saves (ebx is saved/restored by the upcall)
112    POP esi
113    POP edi
114END_MACRO
115
116    /*
117     * Macro that sets up the callee save frame to conform with
118     * Runtime::CreateCalleeSaveMethod(kRefsAndArgs)
119     */
120MACRO2(SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME, got_reg, temp_reg)
121    PUSH edi  // Save callee saves
122    PUSH esi
123    PUSH ebp
124    PUSH ebx  // Save args
125    PUSH edx
126    PUSH ecx
127    // Create space for FPR args.
128    subl MACRO_LITERAL(4 * 8), %esp
129    CFI_ADJUST_CFA_OFFSET(4 * 8)
130    // Save FPRs.
131    movsd %xmm0, 0(%esp)
132    movsd %xmm1, 8(%esp)
133    movsd %xmm2, 16(%esp)
134    movsd %xmm3, 24(%esp)
135
136    SETUP_GOT_NOSAVE RAW_VAR(got_reg)
137    // Load Runtime::instance_ from GOT.
138    movl SYMBOL(_ZN3art7Runtime9instance_E)@GOT(REG_VAR(got_reg)), REG_VAR(temp_reg)
139    movl (REG_VAR(temp_reg)), REG_VAR(temp_reg)
140    // Push save all callee-save method.
141    pushl RUNTIME_REFS_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET(REG_VAR(temp_reg))
142    CFI_ADJUST_CFA_OFFSET(4)
143    // Store esp as the stop quick frame.
144    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
145
146    // Ugly compile-time check, but we only have the preprocessor.
147    // Last +4: implicit return address pushed on stack when caller made call.
148#if (FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE != 7*4 + 4*8 + 4)
149#error "REFS_AND_ARGS_CALLEE_SAVE_FRAME(X86) size not as expected."
150#endif
151END_MACRO
152
153    /*
154     * Macro that sets up the callee save frame to conform with
155     * Runtime::CreateCalleeSaveMethod(kRefsAndArgs) where the method is passed in EAX.
156     */
157MACRO0(SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_EAX)
158    // Save callee and GPR args, mixed together to agree with core spills bitmap.
159    PUSH edi  // Save callee saves
160    PUSH esi
161    PUSH ebp
162    PUSH ebx  // Save args
163    PUSH edx
164    PUSH ecx
165
166    // Create space for FPR args.
167    subl MACRO_LITERAL(32), %esp
168    CFI_ADJUST_CFA_OFFSET(32)
169
170    // Save FPRs.
171    movsd %xmm0, 0(%esp)
172    movsd %xmm1, 8(%esp)
173    movsd %xmm2, 16(%esp)
174    movsd %xmm3, 24(%esp)
175
176    PUSH eax  // Store the ArtMethod reference at the bottom of the stack.
177    // Store esp as the stop quick frame.
178    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
179END_MACRO
180
181MACRO0(RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME)
182    // Restore FPRs. EAX is still on the stack.
183    movsd 4(%esp), %xmm0
184    movsd 12(%esp), %xmm1
185    movsd 20(%esp), %xmm2
186    movsd 28(%esp), %xmm3
187
188    addl MACRO_LITERAL(36), %esp  // Remove FPRs and EAX.
189    CFI_ADJUST_CFA_OFFSET(-36)
190
191    POP ecx                       // Restore args except eax
192    POP edx
193    POP ebx
194    POP ebp                       // Restore callee saves
195    POP esi
196    POP edi
197END_MACRO
198
199// Restore register and jump to routine
200// Inputs:  EDI contains pointer to code.
201// Notes: Need to pop EAX too (restores Method*)
202MACRO0(RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME_AND_JUMP)
203    POP eax  // Restore Method*
204
205    // Restore FPRs.
206    movsd 0(%esp), %xmm0
207    movsd 8(%esp), %xmm1
208    movsd 16(%esp), %xmm2
209    movsd 24(%esp), %xmm3
210
211    addl MACRO_LITERAL(32), %esp  // Remove FPRs.
212    CFI_ADJUST_CFA_OFFSET(-32)
213
214    POP ecx  // Restore args except eax
215    POP edx
216    POP ebx
217    POP ebp  // Restore callee saves
218    POP esi
219    xchgl 0(%esp),%edi // restore EDI and place code pointer as only value on stack
220    ret
221END_MACRO
222
223    /*
224     * Macro that set calls through to artDeliverPendingExceptionFromCode, where the pending
225     * exception is Thread::Current()->exception_.
226     */
227MACRO0(DELIVER_PENDING_EXCEPTION)
228    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx  // save callee saves for throw
229    // Outgoing argument set up
230    subl MACRO_LITERAL(12), %esp              // Alignment padding
231    CFI_ADJUST_CFA_OFFSET(12)
232    pushl %fs:THREAD_SELF_OFFSET               // pass Thread::Current()
233    CFI_ADJUST_CFA_OFFSET(4)
234    call SYMBOL(artDeliverPendingExceptionFromCode)  // artDeliverPendingExceptionFromCode(Thread*)
235    UNREACHABLE
236END_MACRO
237
238MACRO2(NO_ARG_RUNTIME_EXCEPTION, c_name, cxx_name)
239    DEFINE_FUNCTION VAR(c_name)
240    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME  ebx, ebx  // save all registers as basis for long jump context
241    // Outgoing argument set up
242    subl MACRO_LITERAL(12), %esp                // alignment padding
243    CFI_ADJUST_CFA_OFFSET(12)
244    pushl %fs:THREAD_SELF_OFFSET                // pass Thread::Current()
245    CFI_ADJUST_CFA_OFFSET(4)
246    call CALLVAR(cxx_name)                      // cxx_name(Thread*)
247    UNREACHABLE
248    END_FUNCTION VAR(c_name)
249END_MACRO
250
251MACRO2(ONE_ARG_RUNTIME_EXCEPTION, c_name, cxx_name)
252    DEFINE_FUNCTION VAR(c_name)
253    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx  // save all registers as basis for long jump context
254    mov %esp, %ecx
255    // Outgoing argument set up
256    subl MACRO_LITERAL(8), %esp               // alignment padding
257    CFI_ADJUST_CFA_OFFSET(8)
258    pushl %fs:THREAD_SELF_OFFSET               // pass Thread::Current()
259    CFI_ADJUST_CFA_OFFSET(4)
260    PUSH eax                                   // pass arg1
261    call CALLVAR(cxx_name)                     // cxx_name(arg1, Thread*)
262    UNREACHABLE
263    END_FUNCTION VAR(c_name)
264END_MACRO
265
266MACRO2(TWO_ARG_RUNTIME_EXCEPTION, c_name, cxx_name)
267    DEFINE_FUNCTION VAR(c_name)
268    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx  // save all registers as basis for long jump context
269    // Outgoing argument set up
270    PUSH eax                                   // alignment padding
271    pushl %fs:THREAD_SELF_OFFSET               // pass Thread::Current()
272    CFI_ADJUST_CFA_OFFSET(4)
273    PUSH ecx                                   // pass arg2
274    PUSH eax                                   // pass arg1
275    call CALLVAR(cxx_name)                     // cxx_name(arg1, arg2, Thread*)
276    UNREACHABLE
277    END_FUNCTION VAR(c_name)
278END_MACRO
279
280    /*
281     * Called by managed code to create and deliver a NullPointerException.
282     */
283NO_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode
284
285    /*
286     * Called by managed code to create and deliver an ArithmeticException.
287     */
288NO_ARG_RUNTIME_EXCEPTION art_quick_throw_div_zero, artThrowDivZeroFromCode
289
290    /*
291     * Called by managed code to create and deliver a StackOverflowError.
292     */
293NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode
294
295    /*
296     * Called by managed code, saves callee saves and then calls artThrowException
297     * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception.
298     */
299ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode
300
301    /*
302     * Called by managed code to create and deliver a NoSuchMethodError.
303     */
304ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method, artThrowNoSuchMethodFromCode
305
306    /*
307     * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds
308     * index, arg2 holds limit.
309     */
310TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds, artThrowArrayBoundsFromCode
311
312    /*
313     * All generated callsites for interface invokes and invocation slow paths will load arguments
314     * as usual - except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
315     * the method_idx.  This wrapper will save arg1-arg3 and call the appropriate C helper.
316     * NOTE: "this" is first visible argument of the target, and so can be found in arg1/r1.
317     *
318     * The helper will attempt to locate the target and return a 64-bit result in r0/r1 consisting
319     * of the target Method* in r0 and method->code_ in r1.
320     *
321     * If unsuccessful, the helper will return null/null will bea pending exception in the
322     * thread and we branch to another stub to deliver it.
323     *
324     * On success this wrapper will restore arguments and *jump* to the target, leaving the lr
325     * pointing back to the original caller.
326     */
327MACRO1(INVOKE_TRAMPOLINE_BODY, cxx_name)
328    SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME ebx, ebx
329    movl %esp, %edx  // remember SP
330
331    // Outgoing argument set up
332    PUSH edx                      // pass SP
333    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
334    CFI_ADJUST_CFA_OFFSET(4)
335    PUSH ecx                      // pass arg2
336    PUSH eax                      // pass arg1
337    call CALLVAR(cxx_name)        // cxx_name(arg1, arg2, Thread*, SP)
338    movl %edx, %edi               // save code pointer in EDI
339    addl MACRO_LITERAL(20), %esp  // Pop arguments skip eax
340    CFI_ADJUST_CFA_OFFSET(-20)
341
342    // Restore FPRs.
343    movsd 0(%esp), %xmm0
344    movsd 8(%esp), %xmm1
345    movsd 16(%esp), %xmm2
346    movsd 24(%esp), %xmm3
347
348    // Remove space for FPR args.
349    addl MACRO_LITERAL(4 * 8), %esp
350    CFI_ADJUST_CFA_OFFSET(-4 * 8)
351
352    POP ecx  // Restore args except eax
353    POP edx
354    POP ebx
355    POP ebp  // Restore callee saves
356    POP esi
357    // Swap EDI callee save with code pointer.
358    xchgl %edi, (%esp)
359    testl %eax, %eax              // Branch forward if exception pending.
360    jz    1f
361    // Tail call to intended method.
362    ret
3631:
364    addl MACRO_LITERAL(4), %esp   // Pop code pointer off stack
365    CFI_ADJUST_CFA_OFFSET(-4)
366    DELIVER_PENDING_EXCEPTION
367END_MACRO
368MACRO2(INVOKE_TRAMPOLINE, c_name, cxx_name)
369    DEFINE_FUNCTION VAR(c_name)
370    INVOKE_TRAMPOLINE_BODY RAW_VAR(cxx_name)
371    END_FUNCTION VAR(c_name)
372END_MACRO
373
374INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck
375
376INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck
377INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck
378INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck
379INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck
380
381    /*
382     * Helper for quick invocation stub to set up XMM registers.
383     * Increments shorty and arg_array and clobbers temp_char.
384     * Branches to finished if it encounters the end of the shorty.
385     */
386MACRO5(LOOP_OVER_SHORTY_LOADING_XMMS, xmm_reg, shorty, arg_array, temp_char, finished)
3871: // LOOP
388    movb (REG_VAR(shorty)), REG_VAR(temp_char)     // temp_char := *shorty
389    addl MACRO_LITERAL(1), REG_VAR(shorty)         // shorty++
390    cmpb MACRO_LITERAL(0), REG_VAR(temp_char)      // if (temp_char == '\0')
391    je VAR(finished)                               //   goto finished
392    cmpb MACRO_LITERAL(68), REG_VAR(temp_char)     // if (temp_char == 'D')
393    je 2f                                          //   goto FOUND_DOUBLE
394    cmpb MACRO_LITERAL(70), REG_VAR(temp_char)     // if (temp_char == 'F')
395    je 3f                                          //   goto FOUND_FLOAT
396    addl MACRO_LITERAL(4), REG_VAR(arg_array)      // arg_array++
397    //  Handle extra space in arg array taken by a long.
398    cmpb MACRO_LITERAL(74), REG_VAR(temp_char)     // if (temp_char != 'J')
399    jne 1b                                         //   goto LOOP
400    addl MACRO_LITERAL(4), REG_VAR(arg_array)      // arg_array++
401    jmp 1b                                         // goto LOOP
4022:  // FOUND_DOUBLE
403    movsd (REG_VAR(arg_array)), REG_VAR(xmm_reg)
404    addl MACRO_LITERAL(8), REG_VAR(arg_array)      // arg_array+=2
405    jmp 4f
4063:  // FOUND_FLOAT
407    movss (REG_VAR(arg_array)), REG_VAR(xmm_reg)
408    addl MACRO_LITERAL(4), REG_VAR(arg_array)      // arg_array++
4094:
410END_MACRO
411
412    /*
413     * Helper for quick invocation stub to set up GPR registers.
414     * Increments shorty and arg_array, and returns the current short character in
415     * temp_char. Branches to finished if it encounters the end of the shorty.
416     */
417MACRO4(SKIP_OVER_FLOATS, shorty, arg_array, temp_char, finished)
4181: // LOOP:
419    movb (REG_VAR(shorty)), REG_VAR(temp_char)     // temp_char := *shorty
420    addl MACRO_LITERAL(1), REG_VAR(shorty)         // shorty++
421    cmpb MACRO_LITERAL(0), REG_VAR(temp_char)      // if (temp_char == '\0')
422    je VAR(finished)                               //   goto finished
423    cmpb MACRO_LITERAL(70), REG_VAR(temp_char)     // if (temp_char == 'F')
424    je 3f                                          //   goto SKIP_FLOAT
425    cmpb MACRO_LITERAL(68), REG_VAR(temp_char)     // if (temp_char == 'D')
426    je 4f                                          //   goto SKIP_DOUBLE
427    jmp 5f                                         // goto end
4283:  // SKIP_FLOAT
429    addl MACRO_LITERAL(4), REG_VAR(arg_array)      // arg_array++
430    jmp 1b                                         // goto LOOP
4314:  // SKIP_DOUBLE
432    addl MACRO_LITERAL(8), REG_VAR(arg_array)      // arg_array+=2
433    jmp 1b                                         // goto LOOP
4345:
435END_MACRO
436
437  /*
438     * Quick invocation stub (non-static).
439     * On entry:
440     *   [sp] = return address
441     *   [sp + 4] = method pointer
442     *   [sp + 8] = argument array or null for no argument methods
443     *   [sp + 12] = size of argument array in bytes
444     *   [sp + 16] = (managed) thread pointer
445     *   [sp + 20] = JValue* result
446     *   [sp + 24] = shorty
447     */
448DEFINE_FUNCTION art_quick_invoke_stub
449    // Save the non-volatiles.
450    PUSH ebp                      // save ebp
451    PUSH ebx                      // save ebx
452    PUSH esi                      // save esi
453    PUSH edi                      // save edi
454    // Set up argument XMM registers.
455    mov 24+16(%esp), %esi         // ESI := shorty + 1  ; ie skip return arg character.
456    addl LITERAL(1), %esi
457    mov 8+16(%esp), %edi          // EDI := arg_array + 4 ; ie skip this pointer.
458    addl LITERAL(4), %edi
459    // Clobbers ESI, EDI, EAX.
460    LOOP_OVER_SHORTY_LOADING_XMMS xmm0, esi, edi, al, .Lxmm_setup_finished
461    LOOP_OVER_SHORTY_LOADING_XMMS xmm1, esi, edi, al, .Lxmm_setup_finished
462    LOOP_OVER_SHORTY_LOADING_XMMS xmm2, esi, edi, al, .Lxmm_setup_finished
463    LOOP_OVER_SHORTY_LOADING_XMMS xmm3, esi, edi, al, .Lxmm_setup_finished
464    .balign 16
465.Lxmm_setup_finished:
466    mov %esp, %ebp                // copy value of stack pointer into base pointer
467    CFI_DEF_CFA_REGISTER(ebp)
468    mov 28(%ebp), %ebx            // get arg array size
469    // reserve space for return addr, method*, ebx, ebp, esi, and edi in frame
470    addl LITERAL(36), %ebx
471    // align frame size to 16 bytes
472    andl LITERAL(0xFFFFFFF0), %ebx
473    subl LITERAL(20), %ebx        // remove space for return address, ebx, ebp, esi and edi
474    subl %ebx, %esp               // reserve stack space for argument array
475
476    movl LITERAL(0), (%esp)       // store null for method*
477
478    // Copy arg array into stack.
479    movl 28(%ebp), %ecx           // ECX = size of args
480    movl 24(%ebp), %esi           // ESI = argument array
481    leal 4(%esp), %edi            // EDI = just after Method* in stack arguments
482    rep movsb                     // while (ecx--) { *edi++ = *esi++ }
483
484    mov 40(%ebp), %esi            // ESI := shorty + 1  ; ie skip return arg character.
485    addl LITERAL(1), %esi
486    mov 24(%ebp), %edi            // EDI := arg_array
487    mov 0(%edi), %ecx             // ECX := this pointer
488    addl LITERAL(4), %edi         // EDI := arg_array + 4 ; ie skip this pointer.
489
490    // Enumerate the possible cases for loading GPRS.
491    // edx (and maybe ebx):
492    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished
493    cmpb LITERAL(74), %al         // if (al == 'J') goto FOUND_LONG
494    je .LfirstLong
495    // Must be an integer value.
496    movl (%edi), %edx
497    addl LITERAL(4), %edi         // arg_array++
498
499    // Now check ebx
500    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished
501    // Must be first word of a long, or an integer. First word of long doesn't
502    // go into EBX, but can be loaded there anyways, as it is harmless.
503    movl (%edi), %ebx
504    jmp .Lgpr_setup_finished
505.LfirstLong:
506    movl (%edi), %edx
507    movl 4(%edi), %ebx
508    // Nothing left to load.
509.Lgpr_setup_finished:
510    mov 20(%ebp), %eax            // move method pointer into eax
511    call *ART_METHOD_QUICK_CODE_OFFSET_32(%eax) // call the method
512    mov %ebp, %esp                // restore stack pointer
513    CFI_DEF_CFA_REGISTER(esp)
514    POP edi                       // pop edi
515    POP esi                       // pop esi
516    POP ebx                       // pop ebx
517    POP ebp                       // pop ebp
518    mov 20(%esp), %ecx            // get result pointer
519    mov %eax, (%ecx)              // store the result assuming its a long, int or Object*
520    mov %edx, 4(%ecx)             // store the other half of the result
521    mov 24(%esp), %edx            // get the shorty
522    cmpb LITERAL(68), (%edx)      // test if result type char == 'D'
523    je .Lreturn_double_quick
524    cmpb LITERAL(70), (%edx)      // test if result type char == 'F'
525    je .Lreturn_float_quick
526    ret
527.Lreturn_double_quick:
528    movsd %xmm0, (%ecx)           // store the floating point result
529    ret
530.Lreturn_float_quick:
531    movss %xmm0, (%ecx)           // store the floating point result
532    ret
533END_FUNCTION art_quick_invoke_stub
534
535  /*
536     * Quick invocation stub (static).
537     * On entry:
538     *   [sp] = return address
539     *   [sp + 4] = method pointer
540     *   [sp + 8] = argument array or null for no argument methods
541     *   [sp + 12] = size of argument array in bytes
542     *   [sp + 16] = (managed) thread pointer
543     *   [sp + 20] = JValue* result
544     *   [sp + 24] = shorty
545     */
546DEFINE_FUNCTION art_quick_invoke_static_stub
547    // Save the non-volatiles.
548    PUSH ebp                      // save ebp
549    PUSH ebx                      // save ebx
550    PUSH esi                      // save esi
551    PUSH edi                      // save edi
552    // Set up argument XMM registers.
553    mov 24+16(%esp), %esi         // ESI := shorty + 1  ; ie skip return arg character.
554    addl LITERAL(1), %esi
555    mov 8+16(%esp), %edi          // EDI := arg_array
556    // Clobbers ESI, EDI, EAX.
557    LOOP_OVER_SHORTY_LOADING_XMMS xmm0, esi, edi, al, .Lxmm_setup_finished2
558    LOOP_OVER_SHORTY_LOADING_XMMS xmm1, esi, edi, al, .Lxmm_setup_finished2
559    LOOP_OVER_SHORTY_LOADING_XMMS xmm2, esi, edi, al, .Lxmm_setup_finished2
560    LOOP_OVER_SHORTY_LOADING_XMMS xmm3, esi, edi, al, .Lxmm_setup_finished2
561    .balign 16
562.Lxmm_setup_finished2:
563    mov %esp, %ebp                // copy value of stack pointer into base pointer
564    CFI_DEF_CFA_REGISTER(ebp)
565    mov 28(%ebp), %ebx            // get arg array size
566    // reserve space for return addr, method*, ebx, ebp, esi, and edi in frame
567    addl LITERAL(36), %ebx
568    // align frame size to 16 bytes
569    andl LITERAL(0xFFFFFFF0), %ebx
570    subl LITERAL(20), %ebx        // remove space for return address, ebx, ebp, esi and edi
571    subl %ebx, %esp               // reserve stack space for argument array
572
573    movl LITERAL(0), (%esp)       // store null for method*
574
575    // Copy arg array into stack.
576    movl 28(%ebp), %ecx           // ECX = size of args
577    movl 24(%ebp), %esi           // ESI = argument array
578    leal 4(%esp), %edi            // EDI = just after Method* in stack arguments
579    rep movsb                     // while (ecx--) { *edi++ = *esi++ }
580
581    mov 40(%ebp), %esi            // ESI := shorty + 1  ; ie skip return arg character.
582    addl LITERAL(1), %esi
583    mov 24(%ebp), %edi            // EDI := arg_array
584
585    // Enumerate the possible cases for loading GPRS.
586    // ecx (and maybe edx)
587    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished2
588    cmpb LITERAL(74), %al         // if (al == 'J') goto FOUND_LONG
589    je .LfirstLong2
590    // Must be an integer value.  Load into ECX.
591    movl (%edi), %ecx
592    addl LITERAL(4), %edi         // arg_array++
593
594    // Now check edx (and maybe ebx).
595    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished2
596    cmpb LITERAL(74), %al         // if (al == 'J') goto FOUND_LONG
597    je .LSecondLong2
598    // Must be an integer.  Load into EDX.
599    movl (%edi), %edx
600    addl LITERAL(4), %edi         // arg_array++
601
602    // Is there anything for ebx?
603    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished2
604    // Must be first word of a long, or an integer. First word of long doesn't
605    // go into EBX, but can be loaded there anyways, as it is harmless.
606    movl (%edi), %ebx
607    jmp .Lgpr_setup_finished2
608.LSecondLong2:
609    // EDX:EBX is long.  That is all.
610    movl (%edi), %edx
611    movl 4(%edi), %ebx
612    jmp .Lgpr_setup_finished2
613.LfirstLong2:
614    // ECX:EDX is a long
615    movl (%edi), %ecx
616    movl 4(%edi), %edx
617    addl LITERAL(8), %edi         // arg_array += 2
618
619    // Anything for EBX?
620    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished2
621    // Must be first word of a long, or an integer. First word of long doesn't
622    // go into EBX, but can be loaded there anyways, as it is harmless.
623    movl (%edi), %ebx
624    jmp .Lgpr_setup_finished2
625    // Nothing left to load.
626.Lgpr_setup_finished2:
627    mov 20(%ebp), %eax            // move method pointer into eax
628    call *ART_METHOD_QUICK_CODE_OFFSET_32(%eax) // call the method
629    mov %ebp, %esp                // restore stack pointer
630    CFI_DEF_CFA_REGISTER(esp)
631    POP edi                       // pop edi
632    POP esi                       // pop esi
633    POP ebx                       // pop ebx
634    POP ebp                       // pop ebp
635    mov 20(%esp), %ecx            // get result pointer
636    mov %eax, (%ecx)              // store the result assuming its a long, int or Object*
637    mov %edx, 4(%ecx)             // store the other half of the result
638    mov 24(%esp), %edx            // get the shorty
639    cmpb LITERAL(68), (%edx)      // test if result type char == 'D'
640    je .Lreturn_double_quick2
641    cmpb LITERAL(70), (%edx)      // test if result type char == 'F'
642    je .Lreturn_float_quick2
643    ret
644.Lreturn_double_quick2:
645    movsd %xmm0, (%ecx)           // store the floating point result
646    ret
647.Lreturn_float_quick2:
648    movss %xmm0, (%ecx)           // store the floating point result
649    ret
650END_FUNCTION art_quick_invoke_static_stub
651
652MACRO3(NO_ARG_DOWNCALL, c_name, cxx_name, return_macro)
653    DEFINE_FUNCTION VAR(c_name)
654    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx  // save ref containing registers for GC
655    // Outgoing argument set up
656    subl MACRO_LITERAL(12), %esp                // push padding
657    CFI_ADJUST_CFA_OFFSET(12)
658    pushl %fs:THREAD_SELF_OFFSET                // pass Thread::Current()
659    CFI_ADJUST_CFA_OFFSET(4)
660    call CALLVAR(cxx_name)                      // cxx_name(Thread*)
661    addl MACRO_LITERAL(16), %esp                // pop arguments
662    CFI_ADJUST_CFA_OFFSET(-16)
663    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME         // restore frame up to return address
664    CALL_MACRO(return_macro)                    // return or deliver exception
665    END_FUNCTION VAR(c_name)
666END_MACRO
667
668MACRO3(ONE_ARG_DOWNCALL, c_name, cxx_name, return_macro)
669    DEFINE_FUNCTION VAR(c_name)
670    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME  ebx, ebx  // save ref containing registers for GC
671    // Outgoing argument set up
672    subl MACRO_LITERAL(8), %esp                  // push padding
673    CFI_ADJUST_CFA_OFFSET(8)
674    pushl %fs:THREAD_SELF_OFFSET                 // pass Thread::Current()
675    CFI_ADJUST_CFA_OFFSET(4)
676    PUSH eax                                     // pass arg1
677    call CALLVAR(cxx_name)                       // cxx_name(arg1, Thread*)
678    addl MACRO_LITERAL(16), %esp                 // pop arguments
679    CFI_ADJUST_CFA_OFFSET(-16)
680    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME          // restore frame up to return address
681    CALL_MACRO(return_macro)                     // return or deliver exception
682    END_FUNCTION VAR(c_name)
683END_MACRO
684
685MACRO3(TWO_ARG_DOWNCALL, c_name, cxx_name, return_macro)
686    DEFINE_FUNCTION VAR(c_name)
687    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME  ebx, ebx  // save ref containing registers for GC
688    // Outgoing argument set up
689    PUSH eax                                     // push padding
690    pushl %fs:THREAD_SELF_OFFSET                 // pass Thread::Current()
691    CFI_ADJUST_CFA_OFFSET(4)
692    PUSH ecx                                     // pass arg2
693    PUSH eax                                     // pass arg1
694    call CALLVAR(cxx_name)                       // cxx_name(arg1, arg2, Thread*)
695    addl MACRO_LITERAL(16), %esp                 // pop arguments
696    CFI_ADJUST_CFA_OFFSET(-16)
697    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME          // restore frame up to return address
698    CALL_MACRO(return_macro)                     // return or deliver exception
699    END_FUNCTION VAR(c_name)
700END_MACRO
701
702MACRO3(THREE_ARG_DOWNCALL, c_name, cxx_name, return_macro)
703    DEFINE_FUNCTION VAR(c_name)
704    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME  ebx, ebx  // save ref containing registers for GC
705    // Outgoing argument set up
706    pushl %fs:THREAD_SELF_OFFSET                 // pass Thread::Current()
707    CFI_ADJUST_CFA_OFFSET(4)
708    PUSH edx                                     // pass arg3
709    PUSH ecx                                     // pass arg2
710    PUSH eax                                     // pass arg1
711    call CALLVAR(cxx_name)                       // cxx_name(arg1, arg2, arg3, Thread*)
712    addl MACRO_LITERAL(16), %esp                 // pop arguments
713    CFI_ADJUST_CFA_OFFSET(-16)
714    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME          // restore frame up to return address
715    CALL_MACRO(return_macro)                     // return or deliver exception
716    END_FUNCTION VAR(c_name)
717END_MACRO
718
719MACRO3(FOUR_ARG_DOWNCALL, c_name, cxx_name, return_macro)
720    DEFINE_FUNCTION VAR(c_name)
721    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME_PRESERVE_GOT_REG  ebx, ebx  // save ref containing registers for GC
722
723    // Outgoing argument set up
724    subl MACRO_LITERAL(12), %esp                 // alignment padding
725    CFI_ADJUST_CFA_OFFSET(12)
726    pushl %fs:THREAD_SELF_OFFSET                 // pass Thread::Current()
727    CFI_ADJUST_CFA_OFFSET(4)
728    PUSH ebx                                     // pass arg4
729    PUSH edx                                     // pass arg3
730    PUSH ecx                                     // pass arg2
731    PUSH eax                                     // pass arg1
732    call CALLVAR(cxx_name)                       // cxx_name(arg1, arg2, arg3, arg4, Thread*)
733    addl MACRO_LITERAL(32), %esp                 // pop arguments
734    CFI_ADJUST_CFA_OFFSET(-32)
735    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME          // restore frame up to return address
736    CALL_MACRO(return_macro)                     // return or deliver exception
737    END_FUNCTION VAR(c_name)
738END_MACRO
739
740MACRO3(ONE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
741    DEFINE_FUNCTION VAR(c_name)
742    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME  ebx, ebx       // save ref containing registers for GC
743    // Outgoing argument set up
744    mov FRAME_SIZE_REFS_ONLY_CALLEE_SAVE(%esp), %ecx  // get referrer
745    PUSH eax                                          // push padding
746    pushl %fs:THREAD_SELF_OFFSET                      // pass Thread::Current()
747    CFI_ADJUST_CFA_OFFSET(4)
748    PUSH ecx                                          // pass referrer
749    PUSH eax                                          // pass arg1
750    call CALLVAR(cxx_name)                            // cxx_name(arg1, referrer, Thread*)
751    addl MACRO_LITERAL(16), %esp                      // pop arguments
752    CFI_ADJUST_CFA_OFFSET(-16)
753    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME               // restore frame up to return address
754    CALL_MACRO(return_macro)                          // return or deliver exception
755    END_FUNCTION VAR(c_name)
756END_MACRO
757
758MACRO3(TWO_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
759    DEFINE_FUNCTION VAR(c_name)
760    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx        // save ref containing registers for GC
761    // Outgoing argument set up
762    mov FRAME_SIZE_REFS_ONLY_CALLEE_SAVE(%esp), %edx  // get referrer
763    pushl %fs:THREAD_SELF_OFFSET                      // pass Thread::Current()
764    CFI_ADJUST_CFA_OFFSET(4)
765    PUSH edx                                          // pass referrer
766    PUSH ecx                                          // pass arg2
767    PUSH eax                                          // pass arg1
768    call CALLVAR(cxx_name)                            // cxx_name(arg1, arg2, referrer, Thread*)
769    addl MACRO_LITERAL(16), %esp                      // pop arguments
770    CFI_ADJUST_CFA_OFFSET(-16)
771    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME               // restore frame up to return address
772    CALL_MACRO(return_macro)                          // return or deliver exception
773    END_FUNCTION VAR(c_name)
774END_MACRO
775
776MACRO3(THREE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
777    DEFINE_FUNCTION VAR(c_name)
778    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx        // save ref containing registers for GC
779    // Outgoing argument set up
780    mov FRAME_SIZE_REFS_ONLY_CALLEE_SAVE(%esp), %ebx  // get referrer
781    subl MACRO_LITERAL(12), %esp                      // alignment padding
782    CFI_ADJUST_CFA_OFFSET(12)
783    pushl %fs:THREAD_SELF_OFFSET                      // pass Thread::Current()
784    CFI_ADJUST_CFA_OFFSET(4)
785    PUSH ebx                                          // pass referrer
786    PUSH edx                                          // pass arg3
787    PUSH ecx                                          // pass arg2
788    PUSH eax                                          // pass arg1
789    call CALLVAR(cxx_name)                            // cxx_name(arg1, arg2, arg3, referrer,
790                                                      //          Thread*)
791    addl LITERAL(32), %esp                            // pop arguments
792    CFI_ADJUST_CFA_OFFSET(-32)
793    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME               // restore frame up to return address
794    CALL_MACRO(return_macro)                          // return or deliver exception
795    END_FUNCTION VAR(c_name)
796END_MACRO
797
798MACRO0(RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER)
799    testl %eax, %eax               // eax == 0 ?
800    jz  1f                         // if eax == 0 goto 1
801    ret                            // return
8021:                                 // deliver exception on current thread
803    DELIVER_PENDING_EXCEPTION
804END_MACRO
805
806MACRO0(RETURN_IF_EAX_ZERO)
807    testl %eax, %eax               // eax == 0 ?
808    jnz  1f                        // if eax != 0 goto 1
809    ret                            // return
8101:                                 // deliver exception on current thread
811    DELIVER_PENDING_EXCEPTION
812END_MACRO
813
814MACRO0(RETURN_OR_DELIVER_PENDING_EXCEPTION)
815    cmpl MACRO_LITERAL(0),%fs:THREAD_EXCEPTION_OFFSET // exception field == 0 ?
816    jne 1f                                            // if exception field != 0 goto 1
817    ret                                               // return
8181:                                                    // deliver exception on current thread
819    DELIVER_PENDING_EXCEPTION
820END_MACRO
821
822// Generate the allocation entrypoints for each allocator.
823GENERATE_ALLOC_ENTRYPOINTS_FOR_EACH_ALLOCATOR
824
825// A hand-written override for GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_rosalloc, RosAlloc).
826DEFINE_FUNCTION art_quick_alloc_object_rosalloc
827    // Fast path rosalloc allocation.
828    // eax: uint32_t type_idx/return value, ecx: ArtMethod*
829    // ebx, edx: free
830    PUSH edi
831    movl ART_METHOD_DEX_CACHE_TYPES_OFFSET_32(%ecx), %edx  // Load dex cache resolved types array
832                                                        // Load the class (edx)
833    movl 0(%edx, %eax, COMPRESSED_REFERENCE_SIZE), %edx
834    testl %edx, %edx                                    // Check null class
835    jz   .Lart_quick_alloc_object_rosalloc_slow_path
836                                                        // Check class status
837    cmpl LITERAL(MIRROR_CLASS_STATUS_INITIALIZED), MIRROR_CLASS_STATUS_OFFSET(%edx)
838    jne  .Lart_quick_alloc_object_rosalloc_slow_path
839                                                        // No fake dependence needed on x86
840                                                        // between status and flags load,
841                                                        // since each load is a load-acquire,
842                                                        // no loads reordering.
843                                                        // Check access flags has
844                                                        // kAccClassIsFinalizable
845    testl LITERAL(ACCESS_FLAGS_CLASS_IS_FINALIZABLE), MIRROR_CLASS_ACCESS_FLAGS_OFFSET(%edx)
846    jnz   .Lart_quick_alloc_object_rosalloc_slow_path
847
848    movl %fs:THREAD_SELF_OFFSET, %ebx                   // ebx = thread
849                                                        // Check if the thread local allocation
850                                                        // stack has room
851    movl THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET(%ebx), %edi
852    cmpl THREAD_LOCAL_ALLOC_STACK_END_OFFSET(%ebx), %edi
853    jae  .Lart_quick_alloc_object_rosalloc_slow_path
854
855    movl MIRROR_CLASS_OBJECT_SIZE_OFFSET(%edx), %edi    // Load the object size (edi)
856                                                        // Check if the size is for a thread
857                                                        // local allocation
858    cmpl LITERAL(ROSALLOC_MAX_THREAD_LOCAL_BRACKET_SIZE), %edi
859    ja   .Lart_quick_alloc_object_rosalloc_slow_path
860    decl %edi
861    shrl LITERAL(ROSALLOC_BRACKET_QUANTUM_SIZE_SHIFT), %edi // Calculate the rosalloc bracket index
862                                                            // from object size.
863                                                            // Align up the size by the rosalloc
864                                                            // bracket quantum size and divide
865                                                            // by the quantum size and subtract
866                                                            // by 1. This code is a shorter but
867                                                            // equivalent version.
868                                                        // Load thread local rosalloc run (ebx)
869    movl THREAD_ROSALLOC_RUNS_OFFSET(%ebx, %edi, __SIZEOF_POINTER__), %ebx
870                                                        // Load free_list head (edi),
871                                                        // this will be the return value.
872    movl (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)(%ebx), %edi
873    test %edi, %edi
874    jz   .Lart_quick_alloc_object_rosalloc_slow_path
875                                                        // Point of no slow path. Won't go to
876                                                        // the slow path from here on. Ok to
877                                                        // clobber eax and ecx.
878    movl %edi, %eax
879                                                        // Load the next pointer of the head
880                                                        // and update head of free list with
881                                                        // next pointer
882    movl ROSALLOC_SLOT_NEXT_OFFSET(%eax), %edi
883    movl %edi, (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)(%ebx)
884                                                        // Decrement size of free list by 1
885    decl (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET)(%ebx)
886                                                        // Store the class pointer in the
887                                                        // header. This also overwrites the
888                                                        // next pointer. The offsets are
889                                                        // asserted to match.
890#if ROSALLOC_SLOT_NEXT_OFFSET != MIRROR_OBJECT_CLASS_OFFSET
891#error "Class pointer needs to overwrite next pointer."
892#endif
893    POISON_HEAP_REF edx
894    movl %edx, MIRROR_OBJECT_CLASS_OFFSET(%eax)
895    movl %fs:THREAD_SELF_OFFSET, %ebx                   // ebx = thread
896                                                        // Push the new object onto the thread
897                                                        // local allocation stack and
898                                                        // increment the thread local
899                                                        // allocation stack top.
900    movl THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET(%ebx), %edi
901    movl %eax, (%edi)
902    addl LITERAL(COMPRESSED_REFERENCE_SIZE), %edi
903    movl %edi, THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET(%ebx)
904                                                        // No fence needed for x86.
905    POP edi
906    ret
907.Lart_quick_alloc_object_rosalloc_slow_path:
908    POP edi
909    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx // save ref containing registers for GC
910    // Outgoing argument set up
911    PUSH eax                      // alignment padding
912    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
913    CFI_ADJUST_CFA_OFFSET(4)
914    PUSH ecx
915    PUSH eax
916    call SYMBOL(artAllocObjectFromCodeRosAlloc)  // cxx_name(arg0, arg1, Thread*)
917    addl LITERAL(16), %esp        // pop arguments
918    CFI_ADJUST_CFA_OFFSET(-16)
919    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME          // resotre frame up to return address
920    RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER      // return or deliver exception
921END_FUNCTION art_quick_alloc_object_rosalloc
922
923GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_tlab, TLAB)
924GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_region_tlab, RegionTLAB)
925
926ONE_ARG_DOWNCALL art_quick_resolve_string, artResolveStringFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
927ONE_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
928ONE_ARG_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
929ONE_ARG_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
930
931TWO_ARG_REF_DOWNCALL art_quick_handle_fill_data, artHandleFillArrayDataFromCode, RETURN_IF_EAX_ZERO
932
933DEFINE_FUNCTION art_quick_lock_object
934    testl %eax, %eax                      // null check object/eax
935    jz   .Lslow_lock
936.Lretry_lock:
937    movl MIRROR_OBJECT_LOCK_WORD_OFFSET(%eax), %ecx  // ecx := lock word
938    test LITERAL(LOCK_WORD_STATE_MASK), %ecx         // test the 2 high bits.
939    jne  .Lslow_lock                      // slow path if either of the two high bits are set.
940    movl %ecx, %edx                       // save lock word (edx) to keep read barrier bits.
941    andl LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED), %ecx  // zero the read barrier bits.
942    test %ecx, %ecx
943    jnz  .Lalready_thin                   // lock word contains a thin lock
944    // unlocked case - edx: original lock word, eax: obj.
945    movl %eax, %ecx                       // remember object in case of retry
946    movl %edx, %eax                       // eax: lock word zero except for read barrier bits.
947    movl %fs:THREAD_ID_OFFSET, %edx       // load thread id.
948    or   %eax, %edx                       // edx: thread id with count of 0 + read barrier bits.
949    lock cmpxchg  %edx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%ecx)  // eax: old val, edx: new val.
950    jnz  .Llock_cmpxchg_fail              // cmpxchg failed retry
951    ret
952.Lalready_thin:  // edx: lock word (with high 2 bits zero and original rb bits), eax: obj.
953    movl %fs:THREAD_ID_OFFSET, %ecx       // ecx := thread id
954    cmpw %cx, %dx                         // do we hold the lock already?
955    jne  .Lslow_lock
956    movl %edx, %ecx                       // copy the lock word to check count overflow.
957    andl LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED), %ecx  // zero the read barrier bits.
958    addl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %ecx  // increment recursion count for overflow check.
959    test LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK), %ecx  // overflowed if either of the upper two bits (28-29) are set.
960    jne  .Lslow_lock                      // count overflowed so go slow
961    movl %eax, %ecx                       // save obj to use eax for cmpxchg.
962    movl %edx, %eax                       // copy the lock word as the old val for cmpxchg.
963    addl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %edx  // increment recursion count again for real.
964    // update lockword, cmpxchg necessary for read barrier bits.
965    lock cmpxchg  %edx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%ecx)  // eax: old val, edx: new val.
966    jnz  .Llock_cmpxchg_fail              // cmpxchg failed retry
967    ret
968.Llock_cmpxchg_fail:
969    movl  %ecx, %eax                      // restore eax
970    jmp  .Lretry_lock
971.Lslow_lock:
972    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME  ebx, ebx  // save ref containing registers for GC
973    // Outgoing argument set up
974    subl LITERAL(8), %esp                 // alignment padding
975    CFI_ADJUST_CFA_OFFSET(8)
976    pushl %fs:THREAD_SELF_OFFSET          // pass Thread::Current()
977    CFI_ADJUST_CFA_OFFSET(4)
978    PUSH eax                              // pass object
979    call SYMBOL(artLockObjectFromCode)    // artLockObjectFromCode(object, Thread*)
980    addl LITERAL(16), %esp                // pop arguments
981    CFI_ADJUST_CFA_OFFSET(-16)
982    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME   // restore frame up to return address
983    RETURN_IF_EAX_ZERO
984END_FUNCTION art_quick_lock_object
985
986DEFINE_FUNCTION art_quick_lock_object_no_inline
987    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME  ebx, ebx  // save ref containing registers for GC
988    // Outgoing argument set up
989    subl LITERAL(8), %esp                 // alignment padding
990    CFI_ADJUST_CFA_OFFSET(8)
991    pushl %fs:THREAD_SELF_OFFSET          // pass Thread::Current()
992    CFI_ADJUST_CFA_OFFSET(4)
993    PUSH eax                              // pass object
994    call SYMBOL(artLockObjectFromCode)    // artLockObjectFromCode(object, Thread*)
995    addl LITERAL(16), %esp                // pop arguments
996    CFI_ADJUST_CFA_OFFSET(-16)
997    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME   // restore frame up to return address
998    RETURN_IF_EAX_ZERO
999END_FUNCTION art_quick_lock_object_no_inline
1000
1001
1002DEFINE_FUNCTION art_quick_unlock_object
1003    testl %eax, %eax                      // null check object/eax
1004    jz   .Lslow_unlock
1005.Lretry_unlock:
1006    movl MIRROR_OBJECT_LOCK_WORD_OFFSET(%eax), %ecx  // ecx := lock word
1007    movl %fs:THREAD_ID_OFFSET, %edx       // edx := thread id
1008    test LITERAL(LOCK_WORD_STATE_MASK), %ecx
1009    jnz  .Lslow_unlock                    // lock word contains a monitor
1010    cmpw %cx, %dx                         // does the thread id match?
1011    jne  .Lslow_unlock
1012    movl %ecx, %edx                       // copy the lock word to detect new count of 0.
1013    andl LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED), %edx  // zero the read barrier bits.
1014    cmpl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %edx
1015    jae  .Lrecursive_thin_unlock
1016    // update lockword, cmpxchg necessary for read barrier bits.
1017    movl %eax, %edx                       // edx: obj
1018    movl %ecx, %eax                       // eax: old lock word.
1019    andl LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK), %ecx  // ecx: new lock word zero except original rb bits.
1020#ifndef USE_READ_BARRIER
1021    movl %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edx)
1022#else
1023    lock cmpxchg  %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edx)  // eax: old val, ecx: new val.
1024    jnz  .Lunlock_cmpxchg_fail            // cmpxchg failed retry
1025#endif
1026    ret
1027.Lrecursive_thin_unlock:  // ecx: original lock word, eax: obj
1028    // update lockword, cmpxchg necessary for read barrier bits.
1029    movl %eax, %edx                       // edx: obj
1030    movl %ecx, %eax                       // eax: old lock word.
1031    subl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %ecx  // ecx: new lock word with decremented count.
1032#ifndef USE_READ_BARRIER
1033    mov  %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edx)
1034#else
1035    lock cmpxchg  %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edx)  // eax: old val, ecx: new val.
1036    jnz  .Lunlock_cmpxchg_fail            // cmpxchg failed retry
1037#endif
1038    ret
1039.Lunlock_cmpxchg_fail:  // edx: obj
1040    movl %edx, %eax                       // restore eax
1041    jmp  .Lretry_unlock
1042.Lslow_unlock:
1043    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME  ebx, ebx  // save ref containing registers for GC
1044    // Outgoing argument set up
1045    subl LITERAL(8), %esp                 // alignment padding
1046    CFI_ADJUST_CFA_OFFSET(8)
1047    pushl %fs:THREAD_SELF_OFFSET          // pass Thread::Current()
1048    CFI_ADJUST_CFA_OFFSET(4)
1049    PUSH eax                              // pass object
1050    call SYMBOL(artUnlockObjectFromCode)  // artUnlockObjectFromCode(object, Thread*)
1051    addl LITERAL(16), %esp                // pop arguments
1052    CFI_ADJUST_CFA_OFFSET(-16)
1053    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME   // restore frame up to return address
1054    RETURN_IF_EAX_ZERO
1055END_FUNCTION art_quick_unlock_object
1056
1057DEFINE_FUNCTION art_quick_unlock_object_no_inline
1058    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME  ebx, ebx  // save ref containing registers for GC
1059    // Outgoing argument set up
1060    subl LITERAL(8), %esp                 // alignment padding
1061    CFI_ADJUST_CFA_OFFSET(8)
1062    pushl %fs:THREAD_SELF_OFFSET          // pass Thread::Current()
1063    CFI_ADJUST_CFA_OFFSET(4)
1064    PUSH eax                              // pass object
1065    call SYMBOL(artUnlockObjectFromCode)  // artUnlockObjectFromCode(object, Thread*)
1066    addl LITERAL(16), %esp                // pop arguments
1067    CFI_ADJUST_CFA_OFFSET(-16)
1068    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME   // restore frame up to return address
1069    RETURN_IF_EAX_ZERO
1070END_FUNCTION art_quick_unlock_object_no_inline
1071
1072DEFINE_FUNCTION art_quick_is_assignable
1073    PUSH eax                              // alignment padding
1074    PUSH ecx                              // pass arg2 - obj->klass
1075    PUSH eax                              // pass arg1 - checked class
1076    call SYMBOL(artIsAssignableFromCode)  // (Class* klass, Class* ref_klass)
1077    addl LITERAL(12), %esp                // pop arguments
1078    CFI_ADJUST_CFA_OFFSET(-12)
1079    ret
1080END_FUNCTION art_quick_is_assignable
1081
1082DEFINE_FUNCTION art_quick_check_cast
1083    PUSH eax                              // alignment padding
1084    PUSH ecx                              // pass arg2 - obj->klass
1085    PUSH eax                              // pass arg1 - checked class
1086    call SYMBOL(artIsAssignableFromCode)  // (Class* klass, Class* ref_klass)
1087    testl %eax, %eax
1088    jz 1f                                 // jump forward if not assignable
1089    addl LITERAL(12), %esp                // pop arguments
1090    CFI_ADJUST_CFA_OFFSET(-12)
1091    ret
1092
1093    CFI_ADJUST_CFA_OFFSET(12)             // Reset unwind info so following code unwinds.
10941:
1095    POP eax                               // pop arguments
1096    POP ecx
1097    addl LITERAL(4), %esp
1098    CFI_ADJUST_CFA_OFFSET(-4)
1099    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME  ebx, ebx  // save all registers as basis for long jump context
1100    // Outgoing argument set up
1101    PUSH eax                              // alignment padding
1102    pushl %fs:THREAD_SELF_OFFSET          // pass Thread::Current()
1103    CFI_ADJUST_CFA_OFFSET(4)
1104    PUSH ecx                              // pass arg2
1105    PUSH eax                              // pass arg1
1106    call SYMBOL(artThrowClassCastException) // (Class* a, Class* b, Thread*)
1107    UNREACHABLE
1108END_FUNCTION art_quick_check_cast
1109
1110// Restore reg's value if reg is not the same as exclude_reg, otherwise just adjust stack.
1111MACRO2(POP_REG_NE, reg, exclude_reg)
1112    .ifc RAW_VAR(reg), RAW_VAR(exclude_reg)
1113      addl MACRO_LITERAL(4), %esp
1114      CFI_ADJUST_CFA_OFFSET(-4)
1115    .else
1116      POP RAW_VAR(reg)
1117    .endif
1118END_MACRO
1119
1120    /*
1121     * Macro to insert read barrier, only used in art_quick_aput_obj.
1122     * obj_reg and dest_reg are registers, offset is a defined literal such as
1123     * MIRROR_OBJECT_CLASS_OFFSET.
1124     * pop_eax is a boolean flag, indicating if eax is popped after the call.
1125     * TODO: When read barrier has a fast path, add heap unpoisoning support for the fast path.
1126     */
1127MACRO4(READ_BARRIER, obj_reg, offset, dest_reg, pop_eax)
1128#ifdef USE_READ_BARRIER
1129    PUSH eax                        // save registers used in art_quick_aput_obj
1130    PUSH ebx
1131    PUSH edx
1132    PUSH ecx
1133    // Outgoing argument set up
1134    pushl MACRO_LITERAL((RAW_VAR(offset)))  // pass offset, double parentheses are necessary
1135    CFI_ADJUST_CFA_OFFSET(4)
1136    PUSH RAW_VAR(obj_reg)           // pass obj_reg
1137    PUSH eax                        // pass ref, just pass eax for now since parameter ref is unused
1138    call SYMBOL(artReadBarrierSlow) // artReadBarrierSlow(ref, obj_reg, offset)
1139    // No need to unpoison return value in eax, artReadBarrierSlow() would do the unpoisoning.
1140    .ifnc RAW_VAR(dest_reg), eax
1141      movl %eax, REG_VAR(dest_reg)  // save loaded ref in dest_reg
1142    .endif
1143    addl MACRO_LITERAL(12), %esp    // pop arguments
1144    CFI_ADJUST_CFA_OFFSET(-12)
1145    POP_REG_NE ecx, RAW_VAR(dest_reg) // Restore args except dest_reg
1146    POP_REG_NE edx, RAW_VAR(dest_reg)
1147    POP_REG_NE ebx, RAW_VAR(dest_reg)
1148    .ifc RAW_VAR(pop_eax), true
1149      POP_REG_NE eax, RAW_VAR(dest_reg)
1150    .endif
1151#else
1152    movl RAW_VAR(offset)(REG_VAR(obj_reg)), REG_VAR(dest_reg)
1153    UNPOISON_HEAP_REF RAW_VAR(dest_reg)
1154#endif  // USE_READ_BARRIER
1155END_MACRO
1156
1157    /*
1158     * Entry from managed code for array put operations of objects where the value being stored
1159     * needs to be checked for compatibility.
1160     * eax = array, ecx = index, edx = value
1161     */
1162DEFINE_FUNCTION art_quick_aput_obj_with_null_and_bound_check
1163    testl %eax, %eax
1164    jnz SYMBOL(art_quick_aput_obj_with_bound_check)
1165    jmp SYMBOL(art_quick_throw_null_pointer_exception)
1166END_FUNCTION art_quick_aput_obj_with_null_and_bound_check
1167
1168DEFINE_FUNCTION art_quick_aput_obj_with_bound_check
1169    movl MIRROR_ARRAY_LENGTH_OFFSET(%eax), %ebx
1170    cmpl %ebx, %ecx
1171    jb SYMBOL(art_quick_aput_obj)
1172    mov %ecx, %eax
1173    mov %ebx, %ecx
1174    jmp SYMBOL(art_quick_throw_array_bounds)
1175END_FUNCTION art_quick_aput_obj_with_bound_check
1176
1177DEFINE_FUNCTION art_quick_aput_obj
1178    test %edx, %edx              // store of null
1179    jz .Ldo_aput_null
1180    READ_BARRIER eax, MIRROR_OBJECT_CLASS_OFFSET, ebx, true
1181    READ_BARRIER ebx, MIRROR_CLASS_COMPONENT_TYPE_OFFSET, ebx, true
1182    // value's type == array's component type - trivial assignability
1183#if defined(USE_READ_BARRIER)
1184    READ_BARRIER edx, MIRROR_OBJECT_CLASS_OFFSET, eax, false
1185    cmpl %eax, %ebx
1186    POP eax                      // restore eax from the push in the beginning of READ_BARRIER macro
1187    // This asymmetric push/pop saves a push of eax and maintains stack alignment.
1188#elif defined(USE_HEAP_POISONING)
1189    PUSH eax                     // save eax
1190    movl MIRROR_OBJECT_CLASS_OFFSET(%edx), %eax
1191    UNPOISON_HEAP_REF eax
1192    cmpl %eax, %ebx
1193    POP eax                      // restore eax
1194#else
1195    cmpl MIRROR_OBJECT_CLASS_OFFSET(%edx), %ebx
1196#endif
1197    jne .Lcheck_assignability
1198.Ldo_aput:
1199    POISON_HEAP_REF edx
1200    movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4)
1201    movl %fs:THREAD_CARD_TABLE_OFFSET, %edx
1202    shrl LITERAL(7), %eax
1203    movb %dl, (%edx, %eax)
1204    ret
1205.Ldo_aput_null:
1206    movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4)
1207    ret
1208.Lcheck_assignability:
1209    PUSH eax                      // save arguments
1210    PUSH ecx
1211    PUSH edx
1212#if defined(USE_READ_BARRIER)
1213    subl LITERAL(4), %esp         // alignment padding
1214    CFI_ADJUST_CFA_OFFSET(4)
1215    READ_BARRIER edx, MIRROR_OBJECT_CLASS_OFFSET, eax, true
1216    subl LITERAL(4), %esp         // alignment padding
1217    CFI_ADJUST_CFA_OFFSET(4)
1218    PUSH eax                      // pass arg2 - type of the value to be stored
1219#elif defined(USE_HEAP_POISONING)
1220    subl LITERAL(8), %esp         // alignment padding
1221    CFI_ADJUST_CFA_OFFSET(8)
1222    movl MIRROR_OBJECT_CLASS_OFFSET(%edx), %eax
1223    UNPOISON_HEAP_REF eax
1224    PUSH eax                      // pass arg2 - type of the value to be stored
1225#else
1226    subl LITERAL(8), %esp         // alignment padding
1227    CFI_ADJUST_CFA_OFFSET(8)
1228    pushl MIRROR_OBJECT_CLASS_OFFSET(%edx)  // pass arg2 - type of the value to be stored
1229    CFI_ADJUST_CFA_OFFSET(4)
1230#endif
1231    PUSH ebx                      // pass arg1 - component type of the array
1232    call SYMBOL(artIsAssignableFromCode)  // (Class* a, Class* b)
1233    addl LITERAL(16), %esp        // pop arguments
1234    CFI_ADJUST_CFA_OFFSET(-16)
1235    testl %eax, %eax
1236    jz   .Lthrow_array_store_exception
1237    POP  edx
1238    POP  ecx
1239    POP  eax
1240    POISON_HEAP_REF edx
1241    movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4)  // do the aput
1242    movl %fs:THREAD_CARD_TABLE_OFFSET, %edx
1243    shrl LITERAL(7), %eax
1244    movb %dl, (%edx, %eax)
1245    ret
1246    CFI_ADJUST_CFA_OFFSET(12)     // 3 POP after the jz for unwinding.
1247.Lthrow_array_store_exception:
1248    POP  edx
1249    POP  ecx
1250    POP  eax
1251    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx // save all registers as basis for long jump context
1252    // Outgoing argument set up
1253    PUSH eax                      // alignment padding
1254    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1255    CFI_ADJUST_CFA_OFFSET(4)
1256    PUSH edx                      // pass arg2 - value
1257    PUSH eax                      // pass arg1 - array
1258    call SYMBOL(artThrowArrayStoreException) // (array, value, Thread*)
1259    UNREACHABLE
1260END_FUNCTION art_quick_aput_obj
1261
1262DEFINE_FUNCTION art_quick_memcpy
1263    SETUP_GOT_NOSAVE ebx          // clobbers EBX
1264    PUSH edx                      // pass arg3
1265    PUSH ecx                      // pass arg2
1266    PUSH eax                      // pass arg1
1267    call PLT_SYMBOL(memcpy)       // (void*, const void*, size_t)
1268    addl LITERAL(12), %esp        // pop arguments
1269    CFI_ADJUST_CFA_OFFSET(-12)
1270    ret
1271END_FUNCTION art_quick_memcpy
1272
1273NO_ARG_DOWNCALL art_quick_test_suspend, artTestSuspendFromCode, ret
1274
1275DEFINE_FUNCTION art_quick_d2l
1276    subl LITERAL(12), %esp        // alignment padding, room for argument
1277    CFI_ADJUST_CFA_OFFSET(12)
1278    movsd %xmm0, 0(%esp)          // arg a
1279    call SYMBOL(art_d2l)          // (jdouble a)
1280    addl LITERAL(12), %esp        // pop arguments
1281    CFI_ADJUST_CFA_OFFSET(-12)
1282    ret
1283END_FUNCTION art_quick_d2l
1284
1285DEFINE_FUNCTION art_quick_f2l
1286    subl LITERAL(12), %esp        // alignment padding
1287    CFI_ADJUST_CFA_OFFSET(12)
1288    movss %xmm0, 0(%esp)          // arg a
1289    call SYMBOL(art_f2l)          // (jfloat a)
1290    addl LITERAL(12), %esp        // pop arguments
1291    CFI_ADJUST_CFA_OFFSET(-12)
1292    ret
1293END_FUNCTION art_quick_f2l
1294
1295DEFINE_FUNCTION art_quick_ldiv
1296    subl LITERAL(12), %esp        // alignment padding
1297    CFI_ADJUST_CFA_OFFSET(12)
1298    PUSH ebx                      // pass arg4 b.hi
1299    PUSH edx                      // pass arg3 b.lo
1300    PUSH ecx                      // pass arg2 a.hi
1301    PUSH eax                      // pass arg1 a.lo
1302    call SYMBOL(artLdiv)          // (jlong a, jlong b)
1303    addl LITERAL(28), %esp        // pop arguments
1304    CFI_ADJUST_CFA_OFFSET(-28)
1305    ret
1306END_FUNCTION art_quick_ldiv
1307
1308DEFINE_FUNCTION art_quick_lmod
1309    subl LITERAL(12), %esp        // alignment padding
1310    CFI_ADJUST_CFA_OFFSET(12)
1311    PUSH ebx                      // pass arg4 b.hi
1312    PUSH edx                      // pass arg3 b.lo
1313    PUSH ecx                      // pass arg2 a.hi
1314    PUSH eax                      // pass arg1 a.lo
1315    call SYMBOL(artLmod)          // (jlong a, jlong b)
1316    addl LITERAL(28), %esp        // pop arguments
1317    CFI_ADJUST_CFA_OFFSET(-28)
1318    ret
1319END_FUNCTION art_quick_lmod
1320
1321DEFINE_FUNCTION art_quick_lmul
1322    imul %eax, %ebx               // ebx = a.lo(eax) * b.hi(ebx)
1323    imul %edx, %ecx               // ecx = b.lo(edx) * a.hi(ecx)
1324    mul  %edx                     // edx:eax = a.lo(eax) * b.lo(edx)
1325    add  %ebx, %ecx
1326    add  %ecx, %edx               // edx += (a.lo * b.hi) + (b.lo * a.hi)
1327    ret
1328END_FUNCTION art_quick_lmul
1329
1330DEFINE_FUNCTION art_quick_lshl
1331    // ecx:eax << edx
1332    xchg %edx, %ecx
1333    shld %cl,%eax,%edx
1334    shl  %cl,%eax
1335    test LITERAL(32), %cl
1336    jz  1f
1337    mov %eax, %edx
1338    xor %eax, %eax
13391:
1340    ret
1341END_FUNCTION art_quick_lshl
1342
1343DEFINE_FUNCTION art_quick_lshr
1344    // ecx:eax >> edx
1345    xchg %edx, %ecx
1346    shrd %cl,%edx,%eax
1347    sar  %cl,%edx
1348    test LITERAL(32),%cl
1349    jz  1f
1350    mov %edx, %eax
1351    sar LITERAL(31), %edx
13521:
1353    ret
1354END_FUNCTION art_quick_lshr
1355
1356DEFINE_FUNCTION art_quick_lushr
1357    // ecx:eax >>> edx
1358    xchg %edx, %ecx
1359    shrd %cl,%edx,%eax
1360    shr  %cl,%edx
1361    test LITERAL(32),%cl
1362    jz  1f
1363    mov %edx, %eax
1364    xor %edx, %edx
13651:
1366    ret
1367END_FUNCTION art_quick_lushr
1368
1369ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1370ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1371ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1372ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1373ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1374ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1375ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1376
1377TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1378TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1379TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1380TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1381TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1382TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1383TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1384
1385TWO_ARG_REF_DOWNCALL art_quick_set8_static, artSet8StaticFromCode, RETURN_IF_EAX_ZERO
1386TWO_ARG_REF_DOWNCALL art_quick_set16_static, artSet16StaticFromCode, RETURN_IF_EAX_ZERO
1387TWO_ARG_REF_DOWNCALL art_quick_set32_static, artSet32StaticFromCode, RETURN_IF_EAX_ZERO
1388TWO_ARG_REF_DOWNCALL art_quick_set_obj_static, artSetObjStaticFromCode, RETURN_IF_EAX_ZERO
1389
1390THREE_ARG_REF_DOWNCALL art_quick_set8_instance, artSet8InstanceFromCode, RETURN_IF_EAX_ZERO
1391THREE_ARG_REF_DOWNCALL art_quick_set16_instance, artSet16InstanceFromCode, RETURN_IF_EAX_ZERO
1392THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCode, RETURN_IF_EAX_ZERO
1393THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCode, RETURN_IF_EAX_ZERO
1394
1395// Call artSet64InstanceFromCode with 4 word size arguments and the referrer.
1396DEFINE_FUNCTION art_quick_set64_instance
1397    movd %ebx, %xmm0
1398    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx  // save ref containing registers for GC
1399    movd %xmm0, %ebx
1400    // Outgoing argument set up
1401    subl LITERAL(8), %esp         // alignment padding
1402    CFI_ADJUST_CFA_OFFSET(8)
1403    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1404    CFI_ADJUST_CFA_OFFSET(4)
1405    pushl (FRAME_SIZE_REFS_ONLY_CALLEE_SAVE+12)(%esp)  // pass referrer
1406    CFI_ADJUST_CFA_OFFSET(4)
1407    PUSH ebx                      // pass high half of new_val
1408    PUSH edx                      // pass low half of new_val
1409    PUSH ecx                      // pass object
1410    PUSH eax                      // pass field_idx
1411    call SYMBOL(artSet64InstanceFromCode)  // (field_idx, Object*, new_val, referrer, Thread*)
1412    addl LITERAL(32), %esp        // pop arguments
1413    CFI_ADJUST_CFA_OFFSET(-32)
1414    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME     // restore frame up to return address
1415    RETURN_IF_EAX_ZERO            // return or deliver exception
1416END_FUNCTION art_quick_set64_instance
1417
1418// Call artSet64StaticFromCode with 3 word size arguments plus with the referrer in the 2nd position
1419// so that new_val is aligned on even registers were we passing arguments in registers.
1420DEFINE_FUNCTION art_quick_set64_static
1421    // TODO: Implement SETUP_GOT_NOSAVE for got_reg = ecx to avoid moving around the registers.
1422    movd %ebx, %xmm0
1423    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME  ebx, ebx  // save ref containing registers for GC
1424    movd %xmm0, %ebx
1425    mov FRAME_SIZE_REFS_ONLY_CALLEE_SAVE(%esp), %ecx  // get referrer
1426    subl LITERAL(12), %esp        // alignment padding
1427    CFI_ADJUST_CFA_OFFSET(12)
1428    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1429    CFI_ADJUST_CFA_OFFSET(4)
1430    PUSH ebx                      // pass high half of new_val
1431    PUSH edx                      // pass low half of new_val
1432    PUSH ecx                      // pass referrer
1433    PUSH eax                      // pass field_idx
1434    call SYMBOL(artSet64StaticFromCode)  // (field_idx, referrer, new_val, Thread*)
1435    addl LITERAL(32), %esp        // pop arguments
1436    CFI_ADJUST_CFA_OFFSET(-32)
1437    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME  // restore frame up to return address
1438    RETURN_IF_EAX_ZERO            // return or deliver exception
1439END_FUNCTION art_quick_set64_static
1440
1441DEFINE_FUNCTION art_quick_proxy_invoke_handler
1442    SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_EAX
1443    PUSH esp                      // pass SP
1444    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1445    CFI_ADJUST_CFA_OFFSET(4)
1446    PUSH ecx                      // pass receiver
1447    PUSH eax                      // pass proxy method
1448    call SYMBOL(artQuickProxyInvokeHandler) // (proxy method, receiver, Thread*, SP)
1449    movd %eax, %xmm0              // place return value also into floating point return value
1450    movd %edx, %xmm1
1451    punpckldq %xmm1, %xmm0
1452    addl LITERAL(16 + FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE - FRAME_SIZE_REFS_ONLY_CALLEE_SAVE), %esp
1453    CFI_ADJUST_CFA_OFFSET(-(16 + FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE - FRAME_SIZE_REFS_ONLY_CALLEE_SAVE))
1454    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
1455    RETURN_OR_DELIVER_PENDING_EXCEPTION    // return or deliver exception
1456END_FUNCTION art_quick_proxy_invoke_handler
1457
1458    /*
1459     * Called to resolve an imt conflict.
1460     * eax is the conflict ArtMethod.
1461     * xmm7 is a hidden argument that holds the target interface method's dex method index.
1462     *
1463     * Note that this stub writes to eax.
1464     * Because of lack of free registers, it also saves and restores edi.
1465     */
1466DEFINE_FUNCTION art_quick_imt_conflict_trampoline
1467    PUSH EDI
1468    movl 8(%esp), %edi // Load referrer
1469    movl ART_METHOD_DEX_CACHE_METHODS_OFFSET_32(%edi), %edi   // Load dex cache methods array
1470    pushl ART_METHOD_JNI_OFFSET_32(%eax)  // Push ImtConflictTable.
1471    CFI_ADJUST_CFA_OFFSET(4)
1472    movd %xmm7, %eax              // get target method index stored in xmm7
1473    movl 0(%edi, %eax, __SIZEOF_POINTER__), %edi  // Load interface method
1474    popl %eax  // Pop ImtConflictTable.
1475    CFI_ADJUST_CFA_OFFSET(-4)
1476.Limt_table_iterate:
1477    cmpl %edi, 0(%eax)
1478    jne .Limt_table_next_entry
1479    // We successfully hit an entry in the table. Load the target method
1480    // and jump to it.
1481    POP EDI
1482    movl __SIZEOF_POINTER__(%eax), %eax
1483    jmp *ART_METHOD_QUICK_CODE_OFFSET_32(%eax)
1484.Limt_table_next_entry:
1485    // If the entry is null, the interface method is not in the ImtConflictTable.
1486    cmpl LITERAL(0), 0(%eax)
1487    jz .Lconflict_trampoline
1488    // Iterate over the entries of the ImtConflictTable.
1489    addl LITERAL(2 * __SIZEOF_POINTER__), %eax
1490    jmp .Limt_table_iterate
1491.Lconflict_trampoline:
1492    // Call the runtime stub to populate the ImtConflictTable and jump to the
1493    // resolved method.
1494    POP EDI
1495    INVOKE_TRAMPOLINE_BODY artInvokeInterfaceTrampoline
1496END_FUNCTION art_quick_imt_conflict_trampoline
1497
1498DEFINE_FUNCTION art_quick_resolution_trampoline
1499    SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME ebx, ebx
1500    movl %esp, %edi
1501    PUSH EDI                      // pass SP. do not just PUSH ESP; that messes up unwinding
1502    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1503    CFI_ADJUST_CFA_OFFSET(4)
1504    PUSH ecx                      // pass receiver
1505    PUSH eax                      // pass method
1506    call SYMBOL(artQuickResolutionTrampoline) // (Method* called, receiver, Thread*, SP)
1507    movl %eax, %edi               // remember code pointer in EDI
1508    addl LITERAL(16), %esp        // pop arguments
1509    CFI_ADJUST_CFA_OFFSET(-16)
1510    test %eax, %eax               // if code pointer is null goto deliver pending exception
1511    jz 1f
1512    RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME_AND_JUMP
15131:
1514    RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
1515    DELIVER_PENDING_EXCEPTION
1516END_FUNCTION art_quick_resolution_trampoline
1517
1518DEFINE_FUNCTION art_quick_generic_jni_trampoline
1519    SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_EAX
1520    movl %esp, %ebp               // save SP at callee-save frame
1521    CFI_DEF_CFA_REGISTER(ebp)
1522    subl LITERAL(5120), %esp
1523    // prepare for artQuickGenericJniTrampoline call
1524    // (Thread*,  SP)
1525    //  (esp)    4(esp)   <= C calling convention
1526    //  fs:...    ebp     <= where they are
1527
1528    subl LITERAL(8), %esp         // Padding for 16B alignment.
1529    pushl %ebp                    // Pass SP (to ArtMethod).
1530    pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current().
1531    call SYMBOL(artQuickGenericJniTrampoline)  // (Thread*, sp)
1532
1533    // The C call will have registered the complete save-frame on success.
1534    // The result of the call is:
1535    // eax: pointer to native code, 0 on error.
1536    // edx: pointer to the bottom of the used area of the alloca, can restore stack till there.
1537
1538    // Check for error = 0.
1539    test %eax, %eax
1540    jz .Lexception_in_native
1541
1542    // Release part of the alloca.
1543    movl %edx, %esp
1544
1545    // On x86 there are no registers passed, so nothing to pop here.
1546    // Native call.
1547    call *%eax
1548
1549    // result sign extension is handled in C code
1550    // prepare for artQuickGenericJniEndTrampoline call
1551    // (Thread*, result, result_f)
1552    //  (esp)    4(esp)  12(esp)    <= C calling convention
1553    //  fs:...  eax:edx   fp0      <= where they are
1554
1555    subl LITERAL(20), %esp        // Padding & pass float result.
1556    fstpl (%esp)
1557    pushl %edx                    // Pass int result.
1558    pushl %eax
1559    pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current().
1560    call SYMBOL(artQuickGenericJniEndTrampoline)
1561
1562    // Pending exceptions possible.
1563    mov %fs:THREAD_EXCEPTION_OFFSET, %ebx
1564    testl %ebx, %ebx
1565    jnz .Lexception_in_native
1566
1567    // Tear down the alloca.
1568    movl %ebp, %esp
1569    CFI_DEF_CFA_REGISTER(esp)
1570
1571
1572    // Tear down the callee-save frame.
1573    // Remove space for FPR args and EAX
1574    addl LITERAL(4 + 4 * 8), %esp
1575    CFI_ADJUST_CFA_OFFSET(-(4 + 4 * 8))
1576
1577    POP ecx
1578    addl LITERAL(4), %esp         // Avoid edx, as it may be part of the result.
1579    CFI_ADJUST_CFA_OFFSET(-4)
1580    POP ebx
1581    POP ebp  // Restore callee saves
1582    POP esi
1583    POP edi
1584    // Quick expects the return value to be in xmm0.
1585    movd %eax, %xmm0
1586    movd %edx, %xmm1
1587    punpckldq %xmm1, %xmm0
1588    ret
1589.Lexception_in_native:
1590    movl %fs:THREAD_TOP_QUICK_FRAME_OFFSET, %esp
1591    // Do a call to push a new save-all frame required by the runtime.
1592    call .Lexception_call
1593.Lexception_call:
1594    DELIVER_PENDING_EXCEPTION
1595END_FUNCTION art_quick_generic_jni_trampoline
1596
1597DEFINE_FUNCTION art_quick_to_interpreter_bridge
1598    SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME  ebx, ebx  // save frame
1599    mov %esp, %edx                // remember SP
1600    PUSH eax                      // alignment padding
1601    PUSH edx                      // pass SP
1602    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1603    CFI_ADJUST_CFA_OFFSET(4)
1604    PUSH eax                      // pass  method
1605    call SYMBOL(artQuickToInterpreterBridge)  // (method, Thread*, SP)
1606    addl LITERAL(16), %esp        // pop arguments
1607    CFI_ADJUST_CFA_OFFSET(-16)
1608
1609    // Return eax:edx in xmm0 also.
1610    movd %eax, %xmm0
1611    movd %edx, %xmm1
1612    punpckldq %xmm1, %xmm0
1613
1614    addl LITERAL(48), %esp        // Remove FPRs and EAX, ECX, EDX, EBX.
1615    CFI_ADJUST_CFA_OFFSET(-48)
1616
1617    POP ebp                       // Restore callee saves
1618    POP esi
1619    POP edi
1620
1621    RETURN_OR_DELIVER_PENDING_EXCEPTION    // return or deliver exception
1622END_FUNCTION art_quick_to_interpreter_bridge
1623
1624    /*
1625     * Routine that intercepts method calls and returns.
1626     */
1627DEFINE_FUNCTION art_quick_instrumentation_entry
1628    SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME ebx, edx
1629    PUSH eax                      // Save eax which will be clobbered by the callee-save method.
1630    subl LITERAL(12), %esp        // Align stack.
1631    CFI_ADJUST_CFA_OFFSET(12)
1632    pushl FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE-4+16(%esp)  // Pass LR.
1633    CFI_ADJUST_CFA_OFFSET(4)
1634    pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current().
1635    CFI_ADJUST_CFA_OFFSET(4)
1636    PUSH ecx                      // Pass receiver.
1637    PUSH eax                      // Pass Method*.
1638    call SYMBOL(artInstrumentationMethodEntryFromCode) // (Method*, Object*, Thread*, LR)
1639    addl LITERAL(28), %esp        // Pop arguments upto saved Method*.
1640    CFI_ADJUST_CFA_OFFSET(-28)
1641    movl 60(%esp), %edi           // Restore edi.
1642    movl %eax, 60(%esp)           // Place code* over edi, just under return pc.
1643    movl SYMBOL(art_quick_instrumentation_exit)@GOT(%ebx), %ebx
1644    // Place instrumentation exit as return pc. ebx holds the GOT computed on entry.
1645    movl %ebx, 64(%esp)
1646    movl 0(%esp), %eax           // Restore eax.
1647    // Restore FPRs (extra 4 bytes of offset due to EAX push at top).
1648    movsd 8(%esp), %xmm0
1649    movsd 16(%esp), %xmm1
1650    movsd 24(%esp), %xmm2
1651    movsd 32(%esp), %xmm3
1652
1653    // Restore GPRs.
1654    movl 40(%esp), %ecx           // Restore ecx.
1655    movl 44(%esp), %edx           // Restore edx.
1656    movl 48(%esp), %ebx           // Restore ebx.
1657    movl 52(%esp), %ebp           // Restore ebp.
1658    movl 56(%esp), %esi           // Restore esi.
1659    addl LITERAL(60), %esp        // Wind stack back upto code*.
1660    CFI_ADJUST_CFA_OFFSET(-60)
1661    ret                           // Call method (and pop).
1662END_FUNCTION art_quick_instrumentation_entry
1663
1664DEFINE_FUNCTION art_quick_instrumentation_exit
1665    pushl LITERAL(0)              // Push a fake return PC as there will be none on the stack.
1666    CFI_ADJUST_CFA_OFFSET(4)
1667    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx
1668    mov  %esp, %ecx               // Remember SP
1669    subl LITERAL(8), %esp         // Save float return value.
1670    CFI_ADJUST_CFA_OFFSET(8)
1671    movq %xmm0, (%esp)
1672    PUSH edx                      // Save gpr return value.
1673    PUSH eax
1674    subl LITERAL(16), %esp        // Align stack
1675    CFI_ADJUST_CFA_OFFSET(16)
1676    movq %xmm0, (%esp)            // Pass float return value.
1677    PUSH edx                      // Pass gpr return value.
1678    PUSH eax
1679    PUSH ecx                      // Pass SP.
1680    pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current.
1681    CFI_ADJUST_CFA_OFFSET(4)
1682    call SYMBOL(artInstrumentationMethodExitFromCode)  // (Thread*, SP, gpr_result, fpr_result)
1683    mov   %eax, %ecx              // Move returned link register.
1684    addl LITERAL(32), %esp        // Pop arguments.
1685    CFI_ADJUST_CFA_OFFSET(-32)
1686    movl %edx, %ebx               // Move returned link register for deopt
1687                                  // (ebx is pretending to be our LR).
1688    POP eax                       // Restore gpr return value.
1689    POP edx
1690    movq (%esp), %xmm0            // Restore fpr return value.
1691    addl LITERAL(8), %esp
1692    CFI_ADJUST_CFA_OFFSET(-8)
1693    RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
1694    addl LITERAL(4), %esp         // Remove fake return pc.
1695    CFI_ADJUST_CFA_OFFSET(-4)
1696    jmp   *%ecx                   // Return.
1697END_FUNCTION art_quick_instrumentation_exit
1698
1699    /*
1700     * Instrumentation has requested that we deoptimize into the interpreter. The deoptimization
1701     * will long jump to the upcall with a special exception of -1.
1702     */
1703DEFINE_FUNCTION art_quick_deoptimize
1704    PUSH ebx                      // Entry point for a jump. Fake that we were called.
1705    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx
1706    subl LITERAL(12), %esp        // Align stack.
1707    CFI_ADJUST_CFA_OFFSET(12)
1708    pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current().
1709    CFI_ADJUST_CFA_OFFSET(4)
1710    call SYMBOL(artDeoptimize)    // artDeoptimize(Thread*)
1711    UNREACHABLE
1712END_FUNCTION art_quick_deoptimize
1713
1714    /*
1715     * Compiled code has requested that we deoptimize into the interpreter. The deoptimization
1716     * will long jump to the interpreter bridge.
1717     */
1718DEFINE_FUNCTION art_quick_deoptimize_from_compiled_code
1719    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx
1720    subl LITERAL(12), %esp                      // Align stack.
1721    CFI_ADJUST_CFA_OFFSET(12)
1722    pushl %fs:THREAD_SELF_OFFSET                // Pass Thread::Current().
1723    CFI_ADJUST_CFA_OFFSET(4)
1724    call SYMBOL(artDeoptimizeFromCompiledCode)  // artDeoptimizeFromCompiledCode(Thread*)
1725    UNREACHABLE
1726END_FUNCTION art_quick_deoptimize_from_compiled_code
1727
1728    /*
1729     * String's compareTo.
1730     *
1731     * On entry:
1732     *    eax:   this string object (known non-null)
1733     *    ecx:   comp string object (known non-null)
1734     */
1735DEFINE_FUNCTION art_quick_string_compareto
1736    PUSH esi                      // push callee save reg
1737    PUSH edi                      // push callee save reg
1738    mov MIRROR_STRING_COUNT_OFFSET(%eax), %edx
1739    mov MIRROR_STRING_COUNT_OFFSET(%ecx), %ebx
1740    lea MIRROR_STRING_VALUE_OFFSET(%eax), %esi
1741    lea MIRROR_STRING_VALUE_OFFSET(%ecx), %edi
1742    /* Calculate min length and count diff */
1743    mov   %edx, %ecx
1744    mov   %edx, %eax
1745    subl  %ebx, %eax
1746    cmovg %ebx, %ecx
1747    /*
1748     * At this point we have:
1749     *   eax: value to return if first part of strings are equal
1750     *   ecx: minimum among the lengths of the two strings
1751     *   esi: pointer to this string data
1752     *   edi: pointer to comp string data
1753     */
1754    jecxz .Lkeep_length
1755    repe cmpsw                    // find nonmatching chars in [%esi] and [%edi], up to length %ecx
1756    jne .Lnot_equal
1757.Lkeep_length:
1758    POP edi                       // pop callee save reg
1759    POP esi                       // pop callee save reg
1760    ret
1761    .balign 16
1762.Lnot_equal:
1763    movzwl  -2(%esi), %eax        // get last compared char from this string
1764    movzwl  -2(%edi), %ecx        // get last compared char from comp string
1765    subl  %ecx, %eax              // return the difference
1766    POP edi                       // pop callee save reg
1767    POP esi                       // pop callee save reg
1768    ret
1769END_FUNCTION art_quick_string_compareto
1770
1771// Return from a nested signal:
1772// Entry:
1773//  eax: address of jmp_buf in TLS
1774
1775DEFINE_FUNCTION art_nested_signal_return
1776    SETUP_GOT_NOSAVE ebx            // sets %ebx for call into PLT
1777    movl LITERAL(1), %ecx
1778    PUSH ecx                        // second arg to longjmp (1)
1779    PUSH eax                        // first arg to longjmp (jmp_buf)
1780    call PLT_SYMBOL(longjmp)
1781    UNREACHABLE
1782END_FUNCTION art_nested_signal_return
1783
1784DEFINE_FUNCTION art_quick_read_barrier_mark
1785    PUSH eax                         // pass arg1 - obj
1786    call SYMBOL(artReadBarrierMark)  // artReadBarrierMark(obj)
1787    addl LITERAL(4), %esp            // pop argument
1788    CFI_ADJUST_CFA_OFFSET(-4)
1789    ret
1790END_FUNCTION art_quick_read_barrier_mark
1791
1792DEFINE_FUNCTION art_quick_read_barrier_slow
1793    PUSH edx                         // pass arg3 - offset
1794    PUSH ecx                         // pass arg2 - obj
1795    PUSH eax                         // pass arg1 - ref
1796    call SYMBOL(artReadBarrierSlow)  // artReadBarrierSlow(ref, obj, offset)
1797    addl LITERAL(12), %esp           // pop arguments
1798    CFI_ADJUST_CFA_OFFSET(-12)
1799    ret
1800END_FUNCTION art_quick_read_barrier_slow
1801
1802DEFINE_FUNCTION art_quick_read_barrier_for_root_slow
1803    PUSH eax                                // pass arg1 - root
1804    call SYMBOL(artReadBarrierForRootSlow)  // artReadBarrierForRootSlow(root)
1805    addl LITERAL(4), %esp                   // pop argument
1806    CFI_ADJUST_CFA_OFFSET(-4)
1807    ret
1808END_FUNCTION art_quick_read_barrier_for_root_slow
1809
1810  /*
1811     * On stack replacement stub.
1812     * On entry:
1813     *   [sp] = return address
1814     *   [sp + 4] = stack to copy
1815     *   [sp + 8] = size of stack
1816     *   [sp + 12] = pc to call
1817     *   [sp + 16] = JValue* result
1818     *   [sp + 20] = shorty
1819     *   [sp + 24] = thread
1820     */
1821DEFINE_FUNCTION art_quick_osr_stub
1822    // Save native callee saves.
1823    PUSH ebp
1824    PUSH ebx
1825    PUSH esi
1826    PUSH edi
1827    mov 4+16(%esp), %esi           // ESI = argument array
1828    mov 8+16(%esp), %ecx           // ECX = size of args
1829    mov 12+16(%esp), %ebx          // EBX = pc to call
1830    mov %esp, %ebp                 // Save stack pointer
1831    andl LITERAL(0xFFFFFFF0), %esp // Align stack
1832    PUSH ebp                       // Save old stack pointer
1833    subl LITERAL(12), %esp         // Align stack
1834    movl LITERAL(0), (%esp)        // Store null for ArtMethod* slot
1835    call .Losr_entry
1836
1837    // Restore stack pointer.
1838    addl LITERAL(12), %esp
1839    POP ebp
1840    mov %ebp, %esp
1841
1842    // Restore callee saves.
1843    POP edi
1844    POP esi
1845    POP ebx
1846    POP ebp
1847    mov 16(%esp), %ecx            // Get JValue result
1848    mov %eax, (%ecx)              // Store the result assuming it is a long, int or Object*
1849    mov %edx, 4(%ecx)             // Store the other half of the result
1850    mov 20(%esp), %edx            // Get the shorty
1851    cmpb LITERAL(68), (%edx)      // Test if result type char == 'D'
1852    je .Losr_return_double_quick
1853    cmpb LITERAL(70), (%edx)      // Test if result type char == 'F'
1854    je .Losr_return_float_quick
1855    ret
1856.Losr_return_double_quick:
1857    movsd %xmm0, (%ecx)           // Store the floating point result
1858    ret
1859.Losr_return_float_quick:
1860    movss %xmm0, (%ecx)           // Store the floating point result
1861    ret
1862.Losr_entry:
1863    subl LITERAL(4), %ecx         // Given stack size contains pushed frame pointer, substract it.
1864    subl %ecx, %esp
1865    mov %esp, %edi                // EDI = beginning of stack
1866    rep movsb                     // while (ecx--) { *edi++ = *esi++ }
1867    jmp *%ebx
1868END_FUNCTION art_quick_osr_stub
1869
1870    // TODO: implement these!
1871UNIMPLEMENTED art_quick_memcmp16
1872