quick_entrypoints_arm.S revision 76a0723959c2185ace249e705774af9a679a0e0d
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_arm.S"
18
19#include "arch/quick_alloc_entrypoints.S"
20
21    /* Deliver the given exception */
22    .extern artDeliverExceptionFromCode
23    /* Deliver an exception pending on a thread */
24    .extern artDeliverPendingException
25
26    /*
27     * Macro that sets up the callee save frame to conform with
28     * Runtime::CreateCalleeSaveMethod(kSaveAll)
29     */
30.macro SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
31    push {r4-r11, lr} @ 9 words of callee saves
32    .save {r4-r11, lr}
33    .cfi_adjust_cfa_offset 36
34    .cfi_rel_offset r4, 0
35    .cfi_rel_offset r5, 4
36    .cfi_rel_offset r6, 8
37    .cfi_rel_offset r7, 12
38    .cfi_rel_offset r8, 16
39    .cfi_rel_offset r9, 20
40    .cfi_rel_offset r10, 24
41    .cfi_rel_offset r11, 28
42    .cfi_rel_offset lr, 32
43    vpush {s0-s31}
44    .pad #128
45    .cfi_adjust_cfa_offset 128
46    sub sp, #12       @ 3 words of space, bottom word will hold Method*
47    .pad #12
48    .cfi_adjust_cfa_offset 12
49
50     // Ugly compile-time check, but we only have the preprocessor.
51#if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVE != 36 + 128 + 12)
52#error "SAVE_ALL_CALLEE_SAVE_FRAME(ARM) size not as expected."
53#endif
54.endm
55
56    /*
57     * Macro that sets up the callee save frame to conform with
58     * Runtime::CreateCalleeSaveMethod(kRefsOnly).
59     */
60.macro SETUP_REF_ONLY_CALLEE_SAVE_FRAME
61    push {r5-r8, r10-r11, lr} @ 7 words of callee saves
62    .save {r5-r8, r10-r11, lr}
63    .cfi_adjust_cfa_offset 28
64    .cfi_rel_offset r5, 0
65    .cfi_rel_offset r6, 4
66    .cfi_rel_offset r7, 8
67    .cfi_rel_offset r8, 12
68    .cfi_rel_offset r10, 16
69    .cfi_rel_offset r11, 20
70    .cfi_rel_offset lr, 24
71    sub sp, #4                @ bottom word will hold Method*
72    .pad #4
73    .cfi_adjust_cfa_offset 4
74
75    // Ugly compile-time check, but we only have the preprocessor.
76#if (FRAME_SIZE_REFS_ONLY_CALLEE_SAVE != 28 + 4)
77#error "REFS_ONLY_CALLEE_SAVE_FRAME(ARM) size not as expected."
78#endif
79.endm
80
81.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
82    add sp, #4               @ bottom word holds Method*
83    pop {r5-r8, r10-r11, lr} @ 7 words of callee saves
84    .cfi_restore r5
85    .cfi_restore r6
86    .cfi_restore r7
87    .cfi_restore r8
88    .cfi_restore r10
89    .cfi_restore r11
90    .cfi_adjust_cfa_offset -32
91.endm
92
93.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
94    add sp, #4               @ bottom word holds Method*
95    pop {r5-r8, r10-r11, lr} @ 7 words of callee saves
96    .cfi_restore r5
97    .cfi_restore r6
98    .cfi_restore r7
99    .cfi_restore r8
100    .cfi_restore r10
101    .cfi_restore r11
102    .cfi_adjust_cfa_offset -32
103    bx  lr                   @ return
104.endm
105
106    /*
107     * Macro that sets up the callee save frame to conform with
108     * Runtime::CreateCalleeSaveMethod(kRefsAndArgs).
109     */
110.macro SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
111    push {r1-r3, r5-r8, r10-r11, lr}  @ 10 words of callee saves
112    .save {r1-r3, r5-r8, r10-r11, lr}
113    .cfi_rel_offset r1, 0
114    .cfi_rel_offset r2, 4
115    .cfi_rel_offset r3, 8
116    .cfi_rel_offset r5, 12
117    .cfi_rel_offset r6, 16
118    .cfi_rel_offset r7, 20
119    .cfi_rel_offset r8, 24
120    .cfi_rel_offset r10, 28
121    .cfi_rel_offset r11, 32
122    .cfi_rel_offset lr, 36
123    .cfi_adjust_cfa_offset 40
124    sub sp, #8                        @ 2 words of space, bottom word will hold Method*
125    .pad #8
126    .cfi_adjust_cfa_offset 8
127
128    // Ugly compile-time check, but we only have the preprocessor.
129#if (FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE != 40 + 8)
130#error "REFS_AND_ARGS_CALLEE_SAVE_FRAME(ARM) size not as expected."
131#endif
132.endm
133
134.macro RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
135    add  sp, #8                      @ rewind sp
136    pop {r1-r3, r5-r8, r10-r11, lr}  @ 10 words of callee saves
137    .cfi_restore r1
138    .cfi_restore r2
139    .cfi_restore r3
140    .cfi_restore r5
141    .cfi_restore r6
142    .cfi_restore r7
143    .cfi_restore r8
144    .cfi_restore r10
145    .cfi_restore r11
146    .cfi_adjust_cfa_offset -48
147.endm
148
149.macro RETURN_IF_RESULT_IS_ZERO
150    cbnz   r0, 1f              @ result non-zero branch over
151    bx     lr                  @ return
1521:
153.endm
154
155.macro RETURN_IF_RESULT_IS_NON_ZERO
156    cbz    r0, 1f              @ result zero branch over
157    bx     lr                  @ return
1581:
159.endm
160
161    /*
162     * Macro that set calls through to artDeliverPendingExceptionFromCode, where the pending
163     * exception is Thread::Current()->exception_
164     */
165.macro DELIVER_PENDING_EXCEPTION
166    .fnend
167    .fnstart
168    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME           @ save callee saves for throw
169    mov    r0, r9                              @ pass Thread::Current
170    mov    r1, sp                              @ pass SP
171    b      artDeliverPendingExceptionFromCode  @ artDeliverPendingExceptionFromCode(Thread*, SP)
172.endm
173
174.macro NO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
175    .extern \cxx_name
176ENTRY \c_name
177    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME  // save all registers as basis for long jump context
178    mov r0, r9                      @ pass Thread::Current
179    mov r1, sp                      @ pass SP
180    b   \cxx_name                   @ \cxx_name(Thread*, SP)
181END \c_name
182.endm
183
184.macro ONE_ARG_RUNTIME_EXCEPTION c_name, cxx_name
185    .extern \cxx_name
186ENTRY \c_name
187    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME  // save all registers as basis for long jump context
188    mov r1, r9                      @ pass Thread::Current
189    mov r2, sp                      @ pass SP
190    b   \cxx_name                   @ \cxx_name(Thread*, SP)
191    bkpt
192END \c_name
193.endm
194
195.macro TWO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
196    .extern \cxx_name
197ENTRY \c_name
198    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME  // save all registers as basis for long jump context
199    mov r2, r9                      @ pass Thread::Current
200    mov r3, sp                      @ pass SP
201    b   \cxx_name                   @ \cxx_name(Thread*, SP)
202    bkpt
203END \c_name
204.endm
205
206    /*
207     * Called by managed code, saves callee saves and then calls artThrowException
208     * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception.
209     */
210ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode
211
212    /*
213     * Called by managed code to create and deliver a NullPointerException.
214     */
215NO_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode
216
217    /*
218     * Called by managed code to create and deliver an ArithmeticException.
219     */
220NO_ARG_RUNTIME_EXCEPTION art_quick_throw_div_zero, artThrowDivZeroFromCode
221
222    /*
223     * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds
224     * index, arg2 holds limit.
225     */
226TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds, artThrowArrayBoundsFromCode
227
228    /*
229     * Called by managed code to create and deliver a StackOverflowError.
230     */
231NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode
232
233    /*
234     * Called by managed code to create and deliver a NoSuchMethodError.
235     */
236ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method, artThrowNoSuchMethodFromCode
237
238  /*
239   * Invoke stack overflow exception from signal handler.
240   * On entry:
241   * r9: thread
242   * sp: address of last known frame
243   * r12: address of next valid SP below protected region in stack
244   *
245   * This is deceptively simple but hides some complexity.  It is called in the case of
246   * a stack overflow condition during implicit checks.  The signal handler has been
247   * called by the kernel due to a load from the protected stack region.  The handler
248   * works out the address of the previous frame and passes this in SP.  However there
249   * is a piece of memory somewhere below the current SP that is not accessible (the
250   * memory that caused the signal).  The signal handler works out the next
251   * accessible value of SP and passes this in r12.  This code then sets up the SP
252   * to be this new value and calls the code to create and throw the stack overflow
253   * exception.
254   */
255ENTRY art_quick_throw_stack_overflow_from_signal
256    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME  // save all registers as basis for long jump context
257    mov r0, r9                      @ pass Thread::Current
258    mov r1, sp                      @ pass SP
259    mov sp, r12                     @ move SP down to below protected region.
260    b   artThrowStackOverflowFromCode                   @ artThrowStackOverflowFromCode(Thread*, SP)
261END art_quick_throw_stack_overflow_from_signal
262
263    /*
264     * All generated callsites for interface invokes and invocation slow paths will load arguments
265     * as usual - except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
266     * the method_idx.  This wrapper will save arg1-arg3, load the caller's Method*, align the
267     * stack and call the appropriate C helper.
268     * NOTE: "this" is first visible argument of the target, and so can be found in arg1/r1.
269     *
270     * The helper will attempt to locate the target and return a 64-bit result in r0/r1 consisting
271     * of the target Method* in r0 and method->code_ in r1.
272     *
273     * If unsuccessful, the helper will return NULL/NULL. There will bea pending exception in the
274     * thread and we branch to another stub to deliver it.
275     *
276     * On success this wrapper will restore arguments and *jump* to the target, leaving the lr
277     * pointing back to the original caller.
278     */
279.macro INVOKE_TRAMPOLINE c_name, cxx_name
280    .extern \cxx_name
281ENTRY \c_name
282    SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME  @ save callee saves in case allocation triggers GC
283    ldr    r2, [sp, #48]                  @ pass caller Method*
284    mov    r3, r9                         @ pass Thread::Current
285    mov    r12, sp
286    str    r12, [sp, #-16]!               @ expand the frame and pass SP
287    .pad #16
288    .cfi_adjust_cfa_offset 16
289    bl     \cxx_name                      @ (method_idx, this, caller, Thread*, SP)
290    add    sp, #16                        @ strip the extra frame
291    .cfi_adjust_cfa_offset -16
292    mov    r12, r1                        @ save Method*->code_
293    RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
294    cbz    r0, 1f                         @ did we find the target? if not go to exception delivery
295    bx     r12                            @ tail call to target
2961:
297    DELIVER_PENDING_EXCEPTION
298END \c_name
299.endm
300
301INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline, artInvokeInterfaceTrampoline
302INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck
303
304INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck
305INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck
306INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck
307INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck
308
309    /*
310     * Quick invocation stub.
311     * On entry:
312     *   r0 = method pointer
313     *   r1 = argument array or NULL for no argument methods
314     *   r2 = size of argument array in bytes
315     *   r3 = (managed) thread pointer
316     *   [sp] = JValue* result
317     *   [sp + 4] = shorty
318     */
319ENTRY art_quick_invoke_stub
320    push   {r0, r4, r5, r9, r11, lr}       @ spill regs
321    .save  {r0, r4, r5, r9, r11, lr}
322    .pad #24
323    .cfi_adjust_cfa_offset 24
324    .cfi_rel_offset r0, 0
325    .cfi_rel_offset r4, 4
326    .cfi_rel_offset r5, 8
327    .cfi_rel_offset r9, 12
328    .cfi_rel_offset r11, 16
329    .cfi_rel_offset lr, 20
330    mov    r11, sp                         @ save the stack pointer
331    .cfi_def_cfa_register r11
332    mov    r9, r3                          @ move managed thread pointer into r9
333#ifdef ARM_R4_SUSPEND_FLAG
334    mov    r4, #SUSPEND_CHECK_INTERVAL     @ reset r4 to suspend check interval
335#endif
336    add    r5, r2, #16                     @ create space for method pointer in frame
337    and    r5, #0xFFFFFFF0                 @ align frame size to 16 bytes
338    sub    sp, r5                          @ reserve stack space for argument array
339    add    r0, sp, #4                      @ pass stack pointer + method ptr as dest for memcpy
340    bl     memcpy                          @ memcpy (dest, src, bytes)
341    ldr    r0, [r11]                       @ restore method*
342    ldr    r1, [sp, #4]                    @ copy arg value for r1
343    ldr    r2, [sp, #8]                    @ copy arg value for r2
344    ldr    r3, [sp, #12]                   @ copy arg value for r3
345    mov    ip, #0                          @ set ip to 0
346    str    ip, [sp]                        @ store NULL for method* at bottom of frame
347    ldr    ip, [r0, #METHOD_QUICK_CODE_OFFSET]  @ get pointer to the code
348    blx    ip                              @ call the method
349    mov    sp, r11                         @ restore the stack pointer
350    ldr    ip, [sp, #24]                   @ load the result pointer
351    strd   r0, [ip]                        @ store r0/r1 into result pointer
352    pop    {r0, r4, r5, r9, r11, lr}       @ restore spill regs
353    .cfi_restore r0
354    .cfi_restore r4
355    .cfi_restore r5
356    .cfi_restore r9
357    .cfi_restore lr
358    .cfi_adjust_cfa_offset -24
359    bx     lr
360END art_quick_invoke_stub
361
362    /*
363     * On entry r0 is uint32_t* gprs_ and r1 is uint32_t* fprs_
364     */
365ARM_ENTRY art_quick_do_long_jump
366    vldm r1, {s0-s31}     @ load all fprs from argument fprs_
367    ldr  r2, [r0, #60]    @ r2 = r15 (PC from gprs_ 60=4*15)
368    ldr  r14, [r0, #56]   @ (LR from gprs_ 56=4*14)
369    add  r0, r0, #12      @ increment r0 to skip gprs_[0..2] 12=4*3
370    ldm  r0, {r3-r13}     @ load remaining gprs from argument gprs_
371    mov  r0, #0           @ clear result registers r0 and r1
372    mov  r1, #0
373    bx   r2               @ do long jump
374END art_quick_do_long_jump
375
376    /*
377     * Entry from managed code that calls artHandleFillArrayDataFromCode and delivers exception on
378     * failure.
379     */
380    .extern artHandleFillArrayDataFromCode
381ENTRY art_quick_handle_fill_data
382    SETUP_REF_ONLY_CALLEE_SAVE_FRAME  @ save callee saves in case exception allocation triggers GC
383    mov    r2, r9                          @ pass Thread::Current
384    mov    r3, sp                          @ pass SP
385    bl     artHandleFillArrayDataFromCode  @ (Array*, const DexFile::Payload*, Thread*, SP)
386    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
387    RETURN_IF_RESULT_IS_ZERO
388    DELIVER_PENDING_EXCEPTION
389END art_quick_handle_fill_data
390
391    /*
392     * Entry from managed code that calls artLockObjectFromCode, may block for GC. r0 holds the
393     * possibly null object to lock.
394     */
395    .extern artLockObjectFromCode
396ENTRY art_quick_lock_object
397    cbz    r0, .Lslow_lock
398.Lretry_lock:
399    ldr    r2, [r9, #THREAD_ID_OFFSET]
400    ldrex  r1, [r0, #LOCK_WORD_OFFSET]
401    cbnz   r1, .Lnot_unlocked         @ already thin locked
402    @ unlocked case - r2 holds thread id with count of 0
403    strex  r3, r2, [r0, #LOCK_WORD_OFFSET]
404    cbnz   r3, .Lstrex_fail           @ store failed, retry
405    dmb    ish                        @ full (LoadLoad|LoadStore) memory barrier
406    bx lr
407.Lstrex_fail:
408    b .Lretry_lock                    @ unlikely forward branch, need to reload and recheck r1/r2
409.Lnot_unlocked:
410    lsr    r3, r1, 30
411    cbnz   r3, .Lslow_lock            @ if either of the top two bits are set, go slow path
412    eor    r2, r1, r2                 @ lock_word.ThreadId() ^ self->ThreadId()
413    uxth   r2, r2                     @ zero top 16 bits
414    cbnz   r2, .Lslow_lock            @ lock word and self thread id's match -> recursive lock
415                                      @ else contention, go to slow path
416    add    r2, r1, #65536             @ increment count in lock word placing in r2 for storing
417    lsr    r1, r2, 30                 @ if either of the top two bits are set, we overflowed.
418    cbnz   r1, .Lslow_lock            @ if we overflow the count go slow path
419    str    r2, [r0, #LOCK_WORD_OFFSET] @ no need for strex as we hold the lock
420    bx lr
421.Lslow_lock:
422    SETUP_REF_ONLY_CALLEE_SAVE_FRAME  @ save callee saves in case we block
423    mov    r1, r9                     @ pass Thread::Current
424    mov    r2, sp                     @ pass SP
425    bl     artLockObjectFromCode      @ (Object* obj, Thread*, SP)
426    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
427    RETURN_IF_RESULT_IS_ZERO
428    DELIVER_PENDING_EXCEPTION
429END art_quick_lock_object
430
431    /*
432     * Entry from managed code that calls artUnlockObjectFromCode and delivers exception on failure.
433     * r0 holds the possibly null object to lock.
434     */
435    .extern artUnlockObjectFromCode
436ENTRY art_quick_unlock_object
437    cbz    r0, .Lslow_unlock
438    ldr    r1, [r0, #LOCK_WORD_OFFSET]
439    lsr    r2, r1, 30
440    cbnz   r2, .Lslow_unlock          @ if either of the top two bits are set, go slow path
441    ldr    r2, [r9, #THREAD_ID_OFFSET]
442    eor    r3, r1, r2                 @ lock_word.ThreadId() ^ self->ThreadId()
443    uxth   r3, r3                     @ zero top 16 bits
444    cbnz   r3, .Lslow_unlock          @ do lock word and self thread id's match?
445    cmp    r1, #65536
446    bpl    .Lrecursive_thin_unlock
447    @ transition to unlocked, r3 holds 0
448    dmb    ish                        @ full (LoadStore|StoreStore) memory barrier
449    str    r3, [r0, #LOCK_WORD_OFFSET]
450    bx     lr
451.Lrecursive_thin_unlock:
452    sub    r1, r1, #65536
453    str    r1, [r0, #LOCK_WORD_OFFSET]
454    bx     lr
455.Lslow_unlock:
456    SETUP_REF_ONLY_CALLEE_SAVE_FRAME  @ save callee saves in case exception allocation triggers GC
457    mov    r1, r9                     @ pass Thread::Current
458    mov    r2, sp                     @ pass SP
459    bl     artUnlockObjectFromCode    @ (Object* obj, Thread*, SP)
460    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
461    RETURN_IF_RESULT_IS_ZERO
462    DELIVER_PENDING_EXCEPTION
463END art_quick_unlock_object
464
465    /*
466     * Entry from managed code that calls artIsAssignableFromCode and on failure calls
467     * artThrowClassCastException.
468     */
469    .extern artThrowClassCastException
470ENTRY art_quick_check_cast
471    push {r0-r1, lr}                    @ save arguments, link register and pad
472    .save {r0-r1, lr}
473    .cfi_adjust_cfa_offset 12
474    .cfi_rel_offset r0, 0
475    .cfi_rel_offset r1, 4
476    .cfi_rel_offset lr, 8
477    sub sp, #4
478    .pad #4
479    .cfi_adjust_cfa_offset 4
480    bl artIsAssignableFromCode
481    cbz    r0, .Lthrow_class_cast_exception
482    add sp, #4
483    .cfi_adjust_cfa_offset -4
484    pop {r0-r1, pc}
485.Lthrow_class_cast_exception:
486    add sp, #4
487    .cfi_adjust_cfa_offset -4
488    pop {r0-r1, lr}
489    .cfi_restore r0
490    .cfi_restore r1
491    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME  // save all registers as basis for long jump context
492    mov r2, r9                      @ pass Thread::Current
493    mov r3, sp                      @ pass SP
494    b   artThrowClassCastException  @ (Class*, Class*, Thread*, SP)
495    bkpt
496END art_quick_check_cast
497
498    /*
499     * Entry from managed code for array put operations of objects where the value being stored
500     * needs to be checked for compatibility.
501     * r0 = array, r1 = index, r2 = value
502     */
503ENTRY art_quick_aput_obj_with_null_and_bound_check
504    tst r0, r0
505    bne art_quick_aput_obj_with_bound_check
506    b art_quick_throw_null_pointer_exception
507END art_quick_aput_obj_with_null_and_bound_check
508
509ENTRY art_quick_aput_obj_with_bound_check
510    ldr r3, [r0, #ARRAY_LENGTH_OFFSET]
511    cmp r3, r1
512    bhi art_quick_aput_obj
513    mov r0, r1
514    mov r1, r3
515    b art_quick_throw_array_bounds
516END art_quick_aput_obj_with_bound_check
517
518ENTRY art_quick_aput_obj
519    cbz r2, .Ldo_aput_null
520    ldr r3, [r0, #CLASS_OFFSET]
521    ldr ip, [r2, #CLASS_OFFSET]
522    ldr r3, [r3, #CLASS_COMPONENT_TYPE_OFFSET]
523    cmp r3, ip  @ value's type == array's component type - trivial assignability
524    bne .Lcheck_assignability
525.Ldo_aput:
526    add r3, r0, #OBJECT_ARRAY_DATA_OFFSET
527    str r2, [r3, r1, lsl #2]
528    ldr r3, [r9, #THREAD_CARD_TABLE_OFFSET]
529    lsr r0, r0, #7
530    strb r3, [r3, r0]
531    blx lr
532.Ldo_aput_null:
533    add r3, r0, #OBJECT_ARRAY_DATA_OFFSET
534    str r2, [r3, r1, lsl #2]
535    blx lr
536.Lcheck_assignability:
537    push {r0-r2, lr}             @ save arguments
538    .save {r0-r2, lr}
539    .cfi_adjust_cfa_offset 16
540    .cfi_rel_offset r0, 0
541    .cfi_rel_offset r1, 4
542    .cfi_rel_offset r2, 8
543    .cfi_rel_offset lr, 12
544    mov r1, ip
545    mov r0, r3
546    bl artIsAssignableFromCode
547    cbz r0, .Lthrow_array_store_exception
548    pop {r0-r2, lr}
549    .cfi_restore r0
550    .cfi_restore r1
551    .cfi_restore r2
552    .cfi_restore lr
553    .cfi_adjust_cfa_offset -16
554    add r3, r0, #OBJECT_ARRAY_DATA_OFFSET
555    str r2, [r3, r1, lsl #2]
556    ldr r3, [r9, #THREAD_CARD_TABLE_OFFSET]
557    lsr r0, r0, #7
558    strb r3, [r3, r0]
559    blx lr
560.Lthrow_array_store_exception:
561    pop {r0-r2, lr}
562    .cfi_restore r0
563    .cfi_restore r1
564    .cfi_restore r2
565    .cfi_restore lr
566    .cfi_adjust_cfa_offset -16
567    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
568    mov r1, r2
569    mov r2, r9                   @ pass Thread::Current
570    mov r3, sp                   @ pass SP
571    b artThrowArrayStoreException  @ (Class*, Class*, Thread*, SP)
572    bkpt                         @ unreached
573END art_quick_aput_obj
574
575    /*
576     * Entry from managed code when uninitialized static storage, this stub will run the class
577     * initializer and deliver the exception on error. On success the static storage base is
578     * returned.
579     */
580    .extern artInitializeStaticStorageFromCode
581ENTRY art_quick_initialize_static_storage
582    SETUP_REF_ONLY_CALLEE_SAVE_FRAME           @ save callee saves in case of GC
583    mov    r2, r9                              @ pass Thread::Current
584    mov    r3, sp                              @ pass SP
585    @ artInitializeStaticStorageFromCode(uint32_t type_idx, Method* referrer, Thread*, SP)
586    bl     artInitializeStaticStorageFromCode
587    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
588    RETURN_IF_RESULT_IS_NON_ZERO
589    DELIVER_PENDING_EXCEPTION
590END art_quick_initialize_static_storage
591
592    /*
593     * Entry from managed code when dex cache misses for a type_idx
594     */
595    .extern artInitializeTypeFromCode
596ENTRY art_quick_initialize_type
597    SETUP_REF_ONLY_CALLEE_SAVE_FRAME           @ save callee saves in case of GC
598    mov    r2, r9                              @ pass Thread::Current
599    mov    r3, sp                              @ pass SP
600    @ artInitializeTypeFromCode(uint32_t type_idx, Method* referrer, Thread*, SP)
601    bl     artInitializeTypeFromCode
602    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
603    RETURN_IF_RESULT_IS_NON_ZERO
604    DELIVER_PENDING_EXCEPTION
605END art_quick_initialize_type
606
607    /*
608     * Entry from managed code when type_idx needs to be checked for access and dex cache may also
609     * miss.
610     */
611    .extern artInitializeTypeAndVerifyAccessFromCode
612ENTRY art_quick_initialize_type_and_verify_access
613    SETUP_REF_ONLY_CALLEE_SAVE_FRAME           @ save callee saves in case of GC
614    mov    r2, r9                              @ pass Thread::Current
615    mov    r3, sp                              @ pass SP
616    @ artInitializeTypeAndVerifyAccessFromCode(uint32_t type_idx, Method* referrer, Thread*, SP)
617    bl     artInitializeTypeAndVerifyAccessFromCode
618    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
619    RETURN_IF_RESULT_IS_NON_ZERO
620    DELIVER_PENDING_EXCEPTION
621END art_quick_initialize_type_and_verify_access
622
623    /*
624     * Called by managed code to resolve a static field and load a 32-bit primitive value.
625     */
626    .extern artGet32StaticFromCode
627ENTRY art_quick_get32_static
628    SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
629    ldr    r1, [sp, #32]                 @ pass referrer
630    mov    r2, r9                        @ pass Thread::Current
631    mov    r3, sp                        @ pass SP
632    bl     artGet32StaticFromCode        @ (uint32_t field_idx, const Method* referrer, Thread*, SP)
633    ldr    r1, [r9, #THREAD_EXCEPTION_OFFSET]  @ load Thread::Current()->exception_
634    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
635    cbnz   r1, 1f                        @ success if no exception pending
636    bx     lr                            @ return on success
6371:
638    DELIVER_PENDING_EXCEPTION
639END art_quick_get32_static
640
641    /*
642     * Called by managed code to resolve a static field and load a 64-bit primitive value.
643     */
644    .extern artGet64StaticFromCode
645ENTRY art_quick_get64_static
646    SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
647    ldr    r1, [sp, #32]                 @ pass referrer
648    mov    r2, r9                        @ pass Thread::Current
649    mov    r3, sp                        @ pass SP
650    bl     artGet64StaticFromCode        @ (uint32_t field_idx, const Method* referrer, Thread*, SP)
651    ldr    r2, [r9, #THREAD_EXCEPTION_OFFSET]  @ load Thread::Current()->exception_
652    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
653    cbnz   r2, 1f                        @ success if no exception pending
654    bx     lr                            @ return on success
6551:
656    DELIVER_PENDING_EXCEPTION
657END art_quick_get64_static
658
659    /*
660     * Called by managed code to resolve a static field and load an object reference.
661     */
662    .extern artGetObjStaticFromCode
663ENTRY art_quick_get_obj_static
664    SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
665    ldr    r1, [sp, #32]                 @ pass referrer
666    mov    r2, r9                        @ pass Thread::Current
667    mov    r3, sp                        @ pass SP
668    bl     artGetObjStaticFromCode       @ (uint32_t field_idx, const Method* referrer, Thread*, SP)
669    ldr    r1, [r9, #THREAD_EXCEPTION_OFFSET]  @ load Thread::Current()->exception_
670    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
671    cbnz   r1, 1f                        @ success if no exception pending
672    bx     lr                            @ return on success
6731:
674    DELIVER_PENDING_EXCEPTION
675END art_quick_get_obj_static
676
677    /*
678     * Called by managed code to resolve an instance field and load a 32-bit primitive value.
679     */
680    .extern artGet32InstanceFromCode
681ENTRY art_quick_get32_instance
682    SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
683    ldr    r2, [sp, #32]                 @ pass referrer
684    mov    r3, r9                        @ pass Thread::Current
685    mov    r12, sp
686    str    r12, [sp, #-16]!              @ expand the frame and pass SP
687    bl     artGet32InstanceFromCode      @ (field_idx, Object*, referrer, Thread*, SP)
688    add    sp, #16                       @ strip the extra frame
689    ldr    r1, [r9, #THREAD_EXCEPTION_OFFSET]  @ load Thread::Current()->exception_
690    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
691    cbnz   r1, 1f                        @ success if no exception pending
692    bx     lr                            @ return on success
6931:
694    DELIVER_PENDING_EXCEPTION
695END art_quick_get32_instance
696
697    /*
698     * Called by managed code to resolve an instance field and load a 64-bit primitive value.
699     */
700    .extern artGet64InstanceFromCode
701ENTRY art_quick_get64_instance
702    SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
703    ldr    r2, [sp, #32]                 @ pass referrer
704    mov    r3, r9                        @ pass Thread::Current
705    mov    r12, sp
706    str    r12, [sp, #-16]!              @ expand the frame and pass SP
707    .pad #16
708    .cfi_adjust_cfa_offset 16
709    bl     artGet64InstanceFromCode      @ (field_idx, Object*, referrer, Thread*, SP)
710    add    sp, #16                       @ strip the extra frame
711    .cfi_adjust_cfa_offset -16
712    ldr    r2, [r9, #THREAD_EXCEPTION_OFFSET]  @ load Thread::Current()->exception_
713    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
714    cbnz   r2, 1f                        @ success if no exception pending
715    bx     lr                            @ return on success
7161:
717    DELIVER_PENDING_EXCEPTION
718END art_quick_get64_instance
719
720    /*
721     * Called by managed code to resolve an instance field and load an object reference.
722     */
723    .extern artGetObjInstanceFromCode
724ENTRY art_quick_get_obj_instance
725    SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
726    ldr    r2, [sp, #32]                 @ pass referrer
727    mov    r3, r9                        @ pass Thread::Current
728    mov    r12, sp
729    str    r12, [sp, #-16]!              @ expand the frame and pass SP
730    .pad #16
731    .cfi_adjust_cfa_offset 16
732    bl     artGetObjInstanceFromCode     @ (field_idx, Object*, referrer, Thread*, SP)
733    add    sp, #16                       @ strip the extra frame
734    .cfi_adjust_cfa_offset -16
735    ldr    r1, [r9, #THREAD_EXCEPTION_OFFSET]  @ load Thread::Current()->exception_
736    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
737    cbnz   r1, 1f                        @ success if no exception pending
738    bx     lr                            @ return on success
7391:
740    DELIVER_PENDING_EXCEPTION
741END art_quick_get_obj_instance
742
743    /*
744     * Called by managed code to resolve a static field and store a 32-bit primitive value.
745     */
746    .extern artSet32StaticFromCode
747ENTRY art_quick_set32_static
748    SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
749    ldr    r2, [sp, #32]                 @ pass referrer
750    mov    r3, r9                        @ pass Thread::Current
751    mov    r12, sp
752    str    r12, [sp, #-16]!              @ expand the frame and pass SP
753    .pad #16
754    .cfi_adjust_cfa_offset 16
755    bl     artSet32StaticFromCode        @ (field_idx, new_val, referrer, Thread*, SP)
756    add    sp, #16                       @ strip the extra frame
757    .cfi_adjust_cfa_offset -16
758    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
759    RETURN_IF_RESULT_IS_ZERO
760    DELIVER_PENDING_EXCEPTION
761END art_quick_set32_static
762
763    /*
764     * Called by managed code to resolve a static field and store a 64-bit primitive value.
765     * On entry r0 holds field index, r1:r2 hold new_val
766     */
767    .extern artSet64StaticFromCode
768ENTRY art_quick_set64_static
769    SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
770    mov    r3, r2                        @ pass one half of wide argument
771    mov    r2, r1                        @ pass other half of wide argument
772    ldr    r1, [sp, #32]                 @ pass referrer
773    mov    r12, sp                       @ save SP
774    sub    sp, #8                        @ grow frame for alignment with stack args
775    .pad #8
776    .cfi_adjust_cfa_offset 8
777    push   {r9, r12}                     @ pass Thread::Current and SP
778    .save {r9, r12}
779    .cfi_adjust_cfa_offset 8
780    .cfi_rel_offset r9, 0
781    bl     artSet64StaticFromCode        @ (field_idx, referrer, new_val, Thread*, SP)
782    add    sp, #16                       @ release out args
783    .cfi_adjust_cfa_offset -16
784    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME   @ TODO: we can clearly save an add here
785    RETURN_IF_RESULT_IS_ZERO
786    DELIVER_PENDING_EXCEPTION
787END art_quick_set64_static
788
789    /*
790     * Called by managed code to resolve a static field and store an object reference.
791     */
792    .extern artSetObjStaticFromCode
793ENTRY art_quick_set_obj_static
794    SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
795    ldr    r2, [sp, #32]                 @ pass referrer
796    mov    r3, r9                        @ pass Thread::Current
797    mov    r12, sp
798    str    r12, [sp, #-16]!              @ expand the frame and pass SP
799    .pad #16
800    .cfi_adjust_cfa_offset 16
801    bl     artSetObjStaticFromCode       @ (field_idx, new_val, referrer, Thread*, SP)
802    add    sp, #16                       @ strip the extra frame
803    .cfi_adjust_cfa_offset -16
804    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
805    RETURN_IF_RESULT_IS_ZERO
806    DELIVER_PENDING_EXCEPTION
807END art_quick_set_obj_static
808
809    /*
810     * Called by managed code to resolve an instance field and store a 32-bit primitive value.
811     */
812    .extern artSet32InstanceFromCode
813ENTRY art_quick_set32_instance
814    SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
815    ldr    r3, [sp, #32]                 @ pass referrer
816    mov    r12, sp                       @ save SP
817    sub    sp, #8                        @ grow frame for alignment with stack args
818    .pad #8
819    .cfi_adjust_cfa_offset 8
820    push   {r9, r12}                     @ pass Thread::Current and SP
821    .save {r9, r12}
822    .cfi_adjust_cfa_offset 8
823    .cfi_rel_offset r9, 0
824    .cfi_rel_offset r12, 4
825    bl     artSet32InstanceFromCode      @ (field_idx, Object*, new_val, referrer, Thread*, SP)
826    add    sp, #16                       @ release out args
827    .cfi_adjust_cfa_offset -16
828    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME   @ TODO: we can clearly save an add here
829    RETURN_IF_RESULT_IS_ZERO
830    DELIVER_PENDING_EXCEPTION
831END art_quick_set32_instance
832
833    /*
834     * Called by managed code to resolve an instance field and store a 64-bit primitive value.
835     */
836    .extern artSet32InstanceFromCode
837ENTRY art_quick_set64_instance
838    SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
839    mov    r12, sp                       @ save SP
840    sub    sp, #8                        @ grow frame for alignment with stack args
841    .pad #8
842    .cfi_adjust_cfa_offset 8
843    push   {r9, r12}                     @ pass Thread::Current and SP
844    .save {r9, r12}
845    .cfi_adjust_cfa_offset 8
846    .cfi_rel_offset r9, 0
847    bl     artSet64InstanceFromCode      @ (field_idx, Object*, new_val, Thread*, SP)
848    add    sp, #16                       @ release out args
849    .cfi_adjust_cfa_offset -16
850    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME   @ TODO: we can clearly save an add here
851    RETURN_IF_RESULT_IS_ZERO
852    DELIVER_PENDING_EXCEPTION
853END art_quick_set64_instance
854
855    /*
856     * Called by managed code to resolve an instance field and store an object reference.
857     */
858    .extern artSetObjInstanceFromCode
859ENTRY art_quick_set_obj_instance
860    SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
861    ldr    r3, [sp, #32]                 @ pass referrer
862    mov    r12, sp                       @ save SP
863    sub    sp, #8                        @ grow frame for alignment with stack args
864    .pad #8
865    .cfi_adjust_cfa_offset 8
866    push   {r9, r12}                     @ pass Thread::Current and SP
867    .save {r9, r12}
868    .cfi_adjust_cfa_offset 8
869    .cfi_rel_offset r9, 0
870    bl     artSetObjInstanceFromCode     @ (field_idx, Object*, new_val, referrer, Thread*, SP)
871    add    sp, #16                       @ release out args
872    .cfi_adjust_cfa_offset -16
873    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME   @ TODO: we can clearly save an add here
874    RETURN_IF_RESULT_IS_ZERO
875    DELIVER_PENDING_EXCEPTION
876END art_quick_set_obj_instance
877
878    /*
879     * Entry from managed code to resolve a string, this stub will allocate a String and deliver an
880     * exception on error. On success the String is returned. R0 holds the referring method,
881     * R1 holds the string index. The fast path check for hit in strings cache has already been
882     * performed.
883     */
884    .extern artResolveStringFromCode
885ENTRY art_quick_resolve_string
886    SETUP_REF_ONLY_CALLEE_SAVE_FRAME  @ save callee saves in case of GC
887    mov    r2, r9                     @ pass Thread::Current
888    mov    r3, sp                     @ pass SP
889    @ artResolveStringFromCode(Method* referrer, uint32_t string_idx, Thread*, SP)
890    bl     artResolveStringFromCode
891    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
892    RETURN_IF_RESULT_IS_NON_ZERO
893    DELIVER_PENDING_EXCEPTION
894END art_quick_resolve_string
895
896// Macro to facilitate adding new allocation entrypoints.
897.macro TWO_ARG_DOWNCALL name, entrypoint, return
898    .extern \entrypoint
899ENTRY \name
900    SETUP_REF_ONLY_CALLEE_SAVE_FRAME  @ save callee saves in case of GC
901    mov    r2, r9                     @ pass Thread::Current
902    mov    r3, sp                     @ pass SP
903    bl     \entrypoint     @ (uint32_t type_idx, Method* method, Thread*, SP)
904    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
905    \return
906    DELIVER_PENDING_EXCEPTION
907END \name
908.endm
909
910// Macro to facilitate adding new array allocation entrypoints.
911.macro THREE_ARG_DOWNCALL name, entrypoint, return
912    .extern \entrypoint
913ENTRY \name
914    SETUP_REF_ONLY_CALLEE_SAVE_FRAME  @ save callee saves in case of GC
915    mov    r3, r9                     @ pass Thread::Current
916    mov    r12, sp
917    str    r12, [sp, #-16]!           @ expand the frame and pass SP
918    .pad #16
919    .cfi_adjust_cfa_offset 16
920    @ (uint32_t type_idx, Method* method, int32_t component_count, Thread*, SP)
921    bl     \entrypoint
922    add    sp, #16                    @ strip the extra frame
923    .cfi_adjust_cfa_offset -16
924    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
925    \return
926    DELIVER_PENDING_EXCEPTION
927END \name
928.endm
929
930// Generate the allocation entrypoints for each allocator.
931GENERATE_ALL_ALLOC_ENTRYPOINTS
932
933    /*
934     * Called by managed code when the value in rSUSPEND has been decremented to 0.
935     */
936    .extern artTestSuspendFromCode
937ENTRY art_quick_test_suspend
938#ifdef ARM_R4_SUSPEND_FLAG
939    ldrh    r0, [rSELF, #THREAD_FLAGS_OFFSET]
940    mov    rSUSPEND, #SUSPEND_CHECK_INTERVAL  @ reset rSUSPEND to SUSPEND_CHECK_INTERVAL
941    cbnz   r0, 1f                             @ check Thread::Current()->suspend_count_ == 0
942    bx     lr                                 @ return if suspend_count_ == 0
9431:
944#endif
945    mov    r0, rSELF
946    SETUP_REF_ONLY_CALLEE_SAVE_FRAME          @ save callee saves for stack crawl
947    mov    r1, sp
948    bl     artTestSuspendFromCode             @ (Thread*, SP)
949    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
950END art_quick_test_suspend
951
952ENTRY art_quick_implicit_suspend
953    mov    r0, rSELF
954    SETUP_REF_ONLY_CALLEE_SAVE_FRAME          @ save callee saves for stack crawl
955    mov    r1, sp
956    bl     artTestSuspendFromCode             @ (Thread*, SP)
957    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
958END art_quick_implicit_suspend
959
960    /*
961     * Called by managed code that is attempting to call a method on a proxy class. On entry
962     * r0 holds the proxy method and r1 holds the receiver; r2 and r3 may contain arguments. The
963     * frame size of the invoked proxy method agrees with a ref and args callee save frame.
964     */
965     .extern artQuickProxyInvokeHandler
966ENTRY art_quick_proxy_invoke_handler
967    SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
968    str     r0, [sp, #0]           @ place proxy method at bottom of frame
969    mov     r2, r9                 @ pass Thread::Current
970    mov     r3, sp                 @ pass SP
971    blx     artQuickProxyInvokeHandler  @ (Method* proxy method, receiver, Thread*, SP)
972    ldr     r2, [r9, #THREAD_EXCEPTION_OFFSET]  @ load Thread::Current()->exception_
973    add     sp, #16                @ skip r1-r3, 4 bytes padding.
974    .cfi_adjust_cfa_offset -16
975    cbnz    r2, 1f                 @ success if no exception is pending
976    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
977    bx      lr                     @ return on success
9781:
979    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
980    DELIVER_PENDING_EXCEPTION
981END art_quick_proxy_invoke_handler
982
983    /*
984     * Called to resolve an imt conflict. r12 is a hidden argument that holds the target method's
985     * dex method index.
986     */
987ENTRY art_quick_imt_conflict_trampoline
988    ldr    r0, [sp, #0]            @ load caller Method*
989    ldr    r0, [r0, #METHOD_DEX_CACHE_METHODS_OFFSET]  @ load dex_cache_resolved_methods
990    add    r0, #OBJECT_ARRAY_DATA_OFFSET  @ get starting address of data
991    ldr    r0, [r0, r12, lsl 2]    @ load the target method
992    b art_quick_invoke_interface_trampoline
993END art_quick_imt_conflict_trampoline
994
995    .extern artQuickResolutionTrampoline
996ENTRY art_quick_resolution_trampoline
997    SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
998    mov     r2, r9                 @ pass Thread::Current
999    mov     r3, sp                 @ pass SP
1000    blx     artQuickResolutionTrampoline  @ (Method* called, receiver, Thread*, SP)
1001    cbz     r0, 1f                 @ is code pointer null? goto exception
1002    mov     r12, r0
1003    ldr  r0, [sp, #0]              @ load resolved method in r0
1004    RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1005    bx      r12                    @ tail-call into actual code
10061:
1007    RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1008    DELIVER_PENDING_EXCEPTION
1009END art_quick_resolution_trampoline
1010
1011    /*
1012     * Called to do a generic JNI down-call
1013     */
1014ENTRY art_quick_generic_jni_trampoline
1015    SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
1016    str r0, [sp, #0]  // Store native ArtMethod* to bottom of stack.
1017
1018    // Save rSELF
1019    mov r11, rSELF
1020    // Save SP , so we can have static CFI info. r10 is saved in ref_and_args.
1021    mov r10, sp
1022    .cfi_def_cfa_register r10
1023
1024    sub sp, sp, #5120
1025
1026    // prepare for artQuickGenericJniTrampoline call
1027    // (Thread*,  SP)
1028    //    r0      r1   <= C calling convention
1029    //  rSELF     r10  <= where they are
1030
1031    mov r0, rSELF   // Thread*
1032    mov r1, r10
1033    blx artQuickGenericJniTrampoline  // (Thread*, sp)
1034
1035    // The C call will have registered the complete save-frame on success.
1036    // The result of the call is:
1037    // r0: pointer to native code, 0 on error.
1038    // r1: pointer to the bottom of the used area of the alloca, can restore stack till there.
1039
1040    // Check for error = 0.
1041    cbz r0, .Lentry_error
1042
1043    // Release part of the alloca.
1044    mov sp, r1
1045
1046    // Save the code pointer
1047    mov r12, r0
1048
1049    // Load parameters from frame into registers.
1050    pop {r0-r3}
1051
1052    // Softfloat.
1053    // TODO: Change to hardfloat when supported.
1054
1055    blx r12           // native call.
1056
1057    // result sign extension is handled in C code
1058    // prepare for artQuickGenericJniEndTrampoline call
1059    // (Thread*, result, result_f)
1060    //    r0      r1,r2    r3,stack       <= C calling convention
1061    //    r11     r0,r1    r0,r1          <= where they are
1062    sub sp, sp, #12 // Stack alignment.
1063
1064    push {r1}
1065    mov r3, r0
1066    mov r2, r1
1067    mov r1, r0
1068    mov r0, r11
1069
1070    blx artQuickGenericJniEndTrampoline
1071
1072    // Tear down the alloca.
1073    mov sp, r10
1074    .cfi_def_cfa_register sp
1075
1076    // Restore self pointer.
1077    mov r9, r11
1078
1079    // Pending exceptions possible.
1080    ldr r2, [r9, #THREAD_EXCEPTION_OFFSET]  @ load Thread::Current()->exception_
1081    cbnz r2, .Lexception_in_native
1082
1083    // Tear down the callee-save frame.
1084    RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1085
1086    bx lr      // ret
1087
1088.Lentry_error:
1089    mov sp, r10
1090    .cfi_def_cfa_register sp
1091    mov r9, r11
1092.Lexception_in_native:
1093    RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1094    DELIVER_PENDING_EXCEPTION
1095
1096END art_quick_generic_jni_trampoline
1097
1098    .extern artQuickToInterpreterBridge
1099ENTRY art_quick_to_interpreter_bridge
1100    SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
1101    mov     r1, r9                 @ pass Thread::Current
1102    mov     r2, sp                 @ pass SP
1103    blx     artQuickToInterpreterBridge    @ (Method* method, Thread*, SP)
1104    ldr     r2, [r9, #THREAD_EXCEPTION_OFFSET]  @ load Thread::Current()->exception_
1105    add     sp, #16                @ skip r1-r3, 4 bytes padding.
1106    .cfi_adjust_cfa_offset -16
1107    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1108    cbnz    r2, 1f                 @ success if no exception is pending
1109    bx    lr                       @ return on success
11101:
1111    DELIVER_PENDING_EXCEPTION
1112END art_quick_to_interpreter_bridge
1113
1114    /*
1115     * Routine that intercepts method calls and returns.
1116     */
1117    .extern artInstrumentationMethodEntryFromCode
1118    .extern artInstrumentationMethodExitFromCode
1119ENTRY art_quick_instrumentation_entry
1120    SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
1121    str   r0, [sp, #4]     @ preserve r0
1122    mov   r12, sp          @ remember sp
1123    str   lr, [sp, #-16]!  @ expand the frame and pass LR
1124    .pad #16
1125    .cfi_adjust_cfa_offset 16
1126    .cfi_rel_offset lr, 0
1127    mov   r2, r9         @ pass Thread::Current
1128    mov   r3, r12        @ pass SP
1129    blx   artInstrumentationMethodEntryFromCode  @ (Method*, Object*, Thread*, SP, LR)
1130    add   sp, #16        @ remove out argument and padding from stack
1131    .cfi_adjust_cfa_offset -16
1132    mov   r12, r0        @ r12 holds reference to code
1133    ldr   r0, [sp, #4]   @ restore r0
1134    RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1135    blx   r12            @ call method with lr set to art_quick_instrumentation_exit
1136END art_quick_instrumentation_entry
1137    .type art_quick_instrumentation_exit, #function
1138    .global art_quick_instrumentation_exit
1139art_quick_instrumentation_exit:
1140    .cfi_startproc
1141    .fnstart
1142    mov   lr, #0         @ link register is to here, so clobber with 0 for later checks
1143    SETUP_REF_ONLY_CALLEE_SAVE_FRAME
1144    mov   r12, sp        @ remember bottom of caller's frame
1145    push  {r0-r1}        @ save return value
1146    .save {r0-r1}
1147    .cfi_adjust_cfa_offset 8
1148    .cfi_rel_offset r0, 0
1149    .cfi_rel_offset r1, 4
1150    sub   sp, #8         @ space for return value argument
1151    .pad #8
1152    .cfi_adjust_cfa_offset 8
1153    strd r0, [sp]        @ r0/r1 -> [sp] for fpr_res
1154    mov   r2, r0         @ pass return value as gpr_res
1155    mov   r3, r1
1156    mov   r0, r9         @ pass Thread::Current
1157    mov   r1, r12        @ pass SP
1158    blx   artInstrumentationMethodExitFromCode  @ (Thread*, SP, gpr_res, fpr_res)
1159    add   sp, #8
1160    .cfi_adjust_cfa_offset -8
1161
1162    mov   r2, r0         @ link register saved by instrumentation
1163    mov   lr, r1         @ r1 is holding link register if we're to bounce to deoptimize
1164    pop   {r0, r1}       @ restore return value
1165    .cfi_restore r0
1166    .cfi_restore r1
1167    add sp, #32          @ remove callee save frame
1168    .cfi_adjust_cfa_offset -32
1169    bx    r2             @ return
1170END art_quick_instrumentation_exit
1171
1172    /*
1173     * Instrumentation has requested that we deoptimize into the interpreter. The deoptimization
1174     * will long jump to the upcall with a special exception of -1.
1175     */
1176    .extern artDeoptimize
1177ENTRY art_quick_deoptimize
1178    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
1179    mov    r0, r9         @ Set up args.
1180    mov    r1, sp
1181    blx    artDeoptimize  @ artDeoptimize(Thread*, SP)
1182END art_quick_deoptimize
1183
1184    /*
1185     * Signed 64-bit integer multiply.
1186     *
1187     * Consider WXxYZ (r1r0 x r3r2) with a long multiply:
1188     *        WX
1189     *      x YZ
1190     *  --------
1191     *     ZW ZX
1192     *  YW YX
1193     *
1194     * The low word of the result holds ZX, the high word holds
1195     * (ZW+YX) + (the high overflow from ZX).  YW doesn't matter because
1196     * it doesn't fit in the low 64 bits.
1197     *
1198     * Unlike most ARM math operations, multiply instructions have
1199     * restrictions on using the same register more than once (Rd and Rm
1200     * cannot be the same).
1201     */
1202    /* mul-long vAA, vBB, vCC */
1203ENTRY art_quick_mul_long
1204    push    {r9 - r10}
1205    .save {r9 - r10}
1206    .cfi_adjust_cfa_offset 8
1207    .cfi_rel_offset r9, 0
1208    .cfi_rel_offset r10, 4
1209    mul     ip, r2, r1                  @  ip<- ZxW
1210    umull   r9, r10, r2, r0             @  r9/r10 <- ZxX
1211    mla     r2, r0, r3, ip              @  r2<- YxX + (ZxW)
1212    add     r10, r2, r10                @  r10<- r10 + low(ZxW + (YxX))
1213    mov     r0,r9
1214    mov     r1,r10
1215    pop     {r9 - r10}
1216    .cfi_adjust_cfa_offset -8
1217    .cfi_restore r9
1218    .cfi_restore r10
1219    bx      lr
1220END art_quick_mul_long
1221
1222    /*
1223     * Long integer shift.  This is different from the generic 32/64-bit
1224     * binary operations because vAA/vBB are 64-bit but vCC (the shift
1225     * distance) is 32-bit.  Also, Dalvik requires us to ignore all but the low
1226     * 6 bits.
1227     * On entry:
1228     *   r0: low word
1229     *   r1: high word
1230     *   r2: shift count
1231     */
1232    /* shl-long vAA, vBB, vCC */
1233ARM_ENTRY art_quick_shl_long            @ ARM code as thumb code requires spills
1234    and     r2, r2, #63                 @ r2<- r2 & 0x3f
1235    mov     r1, r1, asl r2              @  r1<- r1 << r2
1236    rsb     r3, r2, #32                 @  r3<- 32 - r2
1237    orr     r1, r1, r0, lsr r3          @  r1<- r1 | (r0 << (32-r2))
1238    subs    ip, r2, #32                 @  ip<- r2 - 32
1239    movpl   r1, r0, asl ip              @  if r2 >= 32, r1<- r0 << (r2-32)
1240    mov     r0, r0, asl r2              @  r0<- r0 << r2
1241    bx      lr
1242END art_quick_shl_long
1243
1244    /*
1245     * Long integer shift.  This is different from the generic 32/64-bit
1246     * binary operations because vAA/vBB are 64-bit but vCC (the shift
1247     * distance) is 32-bit.  Also, Dalvik requires us to ignore all but the low
1248     * 6 bits.
1249     * On entry:
1250     *   r0: low word
1251     *   r1: high word
1252     *   r2: shift count
1253     */
1254    /* shr-long vAA, vBB, vCC */
1255ARM_ENTRY art_quick_shr_long            @ ARM code as thumb code requires spills
1256    and     r2, r2, #63                 @ r0<- r0 & 0x3f
1257    mov     r0, r0, lsr r2              @  r0<- r2 >> r2
1258    rsb     r3, r2, #32                 @  r3<- 32 - r2
1259    orr     r0, r0, r1, asl r3          @  r0<- r0 | (r1 << (32-r2))
1260    subs    ip, r2, #32                 @  ip<- r2 - 32
1261    movpl   r0, r1, asr ip              @  if r2 >= 32, r0<-r1 >> (r2-32)
1262    mov     r1, r1, asr r2              @  r1<- r1 >> r2
1263    bx      lr
1264END art_quick_shr_long
1265
1266    /*
1267     * Long integer shift.  This is different from the generic 32/64-bit
1268     * binary operations because vAA/vBB are 64-bit but vCC (the shift
1269     * distance) is 32-bit.  Also, Dalvik requires us to ignore all but the low
1270     * 6 bits.
1271     * On entry:
1272     *   r0: low word
1273     *   r1: high word
1274     *   r2: shift count
1275     */
1276    /* ushr-long vAA, vBB, vCC */
1277ARM_ENTRY art_quick_ushr_long           @ ARM code as thumb code requires spills
1278    and     r2, r2, #63                 @ r0<- r0 & 0x3f
1279    mov     r0, r0, lsr r2              @  r0<- r2 >> r2
1280    rsb     r3, r2, #32                 @  r3<- 32 - r2
1281    orr     r0, r0, r1, asl r3          @  r0<- r0 | (r1 << (32-r2))
1282    subs    ip, r2, #32                 @  ip<- r2 - 32
1283    movpl   r0, r1, lsr ip              @  if r2 >= 32, r0<-r1 >>> (r2-32)
1284    mov     r1, r1, lsr r2              @  r1<- r1 >>> r2
1285    bx      lr
1286END art_quick_ushr_long
1287
1288    /*
1289     * String's indexOf.
1290     *
1291     * On entry:
1292     *    r0:   string object (known non-null)
1293     *    r1:   char to match (known <= 0xFFFF)
1294     *    r2:   Starting offset in string data
1295     */
1296ENTRY art_quick_indexof
1297    push {r4, r10-r11, lr} @ 4 words of callee saves
1298    .save {r4, r10-r11, lr}
1299    .cfi_adjust_cfa_offset 16
1300    .cfi_rel_offset r4, 0
1301    .cfi_rel_offset r10, 4
1302    .cfi_rel_offset r11, 8
1303    .cfi_rel_offset lr, 12
1304    ldr   r3, [r0, #STRING_COUNT_OFFSET]
1305    ldr   r12, [r0, #STRING_OFFSET_OFFSET]
1306    ldr   r0, [r0, #STRING_VALUE_OFFSET]
1307
1308    /* Clamp start to [0..count] */
1309    cmp   r2, #0
1310    it    lt
1311    movlt r2, #0
1312    cmp   r2, r3
1313    it    gt
1314    movgt r2, r3
1315
1316    /* Build a pointer to the start of string data */
1317    add   r0, #STRING_DATA_OFFSET
1318    add   r0, r0, r12, lsl #1
1319
1320    /* Save a copy in r12 to later compute result */
1321    mov   r12, r0
1322
1323    /* Build pointer to start of data to compare and pre-bias */
1324    add   r0, r0, r2, lsl #1
1325    sub   r0, #2
1326
1327    /* Compute iteration count */
1328    sub   r2, r3, r2
1329
1330    /*
1331     * At this point we have:
1332     *   r0: start of data to test
1333     *   r1: char to compare
1334     *   r2: iteration count
1335     *   r12: original start of string data
1336     *   r3, r4, r10, r11 available for loading string data
1337     */
1338
1339    subs  r2, #4
1340    blt   .Lindexof_remainder
1341
1342.Lindexof_loop4:
1343    ldrh  r3, [r0, #2]!
1344    ldrh  r4, [r0, #2]!
1345    ldrh  r10, [r0, #2]!
1346    ldrh  r11, [r0, #2]!
1347    cmp   r3, r1
1348    beq   .Lmatch_0
1349    cmp   r4, r1
1350    beq   .Lmatch_1
1351    cmp   r10, r1
1352    beq   .Lmatch_2
1353    cmp   r11, r1
1354    beq   .Lmatch_3
1355    subs  r2, #4
1356    bge   .Lindexof_loop4
1357
1358.Lindexof_remainder:
1359    adds  r2, #4
1360    beq   .Lindexof_nomatch
1361
1362.Lindexof_loop1:
1363    ldrh  r3, [r0, #2]!
1364    cmp   r3, r1
1365    beq   .Lmatch_3
1366    subs  r2, #1
1367    bne   .Lindexof_loop1
1368
1369.Lindexof_nomatch:
1370    mov   r0, #-1
1371    pop {r4, r10-r11, pc}
1372
1373.Lmatch_0:
1374    sub   r0, #6
1375    sub   r0, r12
1376    asr   r0, r0, #1
1377    pop {r4, r10-r11, pc}
1378.Lmatch_1:
1379    sub   r0, #4
1380    sub   r0, r12
1381    asr   r0, r0, #1
1382    pop {r4, r10-r11, pc}
1383.Lmatch_2:
1384    sub   r0, #2
1385    sub   r0, r12
1386    asr   r0, r0, #1
1387    pop {r4, r10-r11, pc}
1388.Lmatch_3:
1389    sub   r0, r12
1390    asr   r0, r0, #1
1391    pop {r4, r10-r11, pc}
1392END art_quick_indexof
1393
1394   /*
1395     * String's compareTo.
1396     *
1397     * Requires rARG0/rARG1 to have been previously checked for null.  Will
1398     * return negative if this's string is < comp, 0 if they are the
1399     * same and positive if >.
1400     *
1401     * On entry:
1402     *    r0:   this object pointer
1403     *    r1:   comp object pointer
1404     *
1405     */
1406    .extern __memcmp16
1407ENTRY art_quick_string_compareto
1408    mov    r2, r0         @ this to r2, opening up r0 for return value
1409    sub    r0, r2, r1     @ Same?
1410    cbnz   r0,1f
1411    bx     lr
14121:                        @ Same strings, return.
1413
1414    push {r4, r7-r12, lr} @ 8 words - keep alignment
1415    .save {r4, r7-r12, lr}
1416    .cfi_adjust_cfa_offset 32
1417    .cfi_rel_offset r4, 0
1418    .cfi_rel_offset r7, 4
1419    .cfi_rel_offset r8, 8
1420    .cfi_rel_offset r9, 12
1421    .cfi_rel_offset r10, 16
1422    .cfi_rel_offset r11, 20
1423    .cfi_rel_offset r12, 24
1424    .cfi_rel_offset lr, 28
1425
1426    ldr    r4, [r2, #STRING_OFFSET_OFFSET]
1427    ldr    r9, [r1, #STRING_OFFSET_OFFSET]
1428    ldr    r7, [r2, #STRING_COUNT_OFFSET]
1429    ldr    r10, [r1, #STRING_COUNT_OFFSET]
1430    ldr    r2, [r2, #STRING_VALUE_OFFSET]
1431    ldr    r1, [r1, #STRING_VALUE_OFFSET]
1432
1433    /*
1434     * At this point, we have:
1435     *    value:  r2/r1
1436     *    offset: r4/r9
1437     *    count:  r7/r10
1438     * We're going to compute
1439     *    r11 <- countDiff
1440     *    r10 <- minCount
1441     */
1442     subs  r11, r7, r10
1443     it    ls
1444     movls r10, r7
1445
1446     /* Now, build pointers to the string data */
1447     add   r2, r2, r4, lsl #1
1448     add   r1, r1, r9, lsl #1
1449     /*
1450      * Note: data pointers point to previous element so we can use pre-index
1451      * mode with base writeback.
1452      */
1453     add   r2, #STRING_DATA_OFFSET-2   @ offset to contents[-1]
1454     add   r1, #STRING_DATA_OFFSET-2   @ offset to contents[-1]
1455
1456     /*
1457      * At this point we have:
1458      *   r2: *this string data
1459      *   r1: *comp string data
1460      *   r10: iteration count for comparison
1461      *   r11: value to return if the first part of the string is equal
1462      *   r0: reserved for result
1463      *   r3, r4, r7, r8, r9, r12 available for loading string data
1464      */
1465
1466    subs  r10, #2
1467    blt   .Ldo_remainder2
1468
1469      /*
1470       * Unroll the first two checks so we can quickly catch early mismatch
1471       * on long strings (but preserve incoming alignment)
1472       */
1473
1474    ldrh  r3, [r2, #2]!
1475    ldrh  r4, [r1, #2]!
1476    ldrh  r7, [r2, #2]!
1477    ldrh  r8, [r1, #2]!
1478    subs  r0, r3, r4
1479    it    eq
1480    subseq  r0, r7, r8
1481    bne   .Ldone
1482    cmp   r10, #28
1483    bgt   .Ldo_memcmp16
1484    subs  r10, #3
1485    blt   .Ldo_remainder
1486
1487.Lloopback_triple:
1488    ldrh  r3, [r2, #2]!
1489    ldrh  r4, [r1, #2]!
1490    ldrh  r7, [r2, #2]!
1491    ldrh  r8, [r1, #2]!
1492    ldrh  r9, [r2, #2]!
1493    ldrh  r12,[r1, #2]!
1494    subs  r0, r3, r4
1495    it    eq
1496    subseq  r0, r7, r8
1497    it    eq
1498    subseq  r0, r9, r12
1499    bne   .Ldone
1500    subs  r10, #3
1501    bge   .Lloopback_triple
1502
1503.Ldo_remainder:
1504    adds  r10, #3
1505    beq   .Lreturn_diff
1506
1507.Lloopback_single:
1508    ldrh  r3, [r2, #2]!
1509    ldrh  r4, [r1, #2]!
1510    subs  r0, r3, r4
1511    bne   .Ldone
1512    subs  r10, #1
1513    bne   .Lloopback_single
1514
1515.Lreturn_diff:
1516    mov   r0, r11
1517    pop   {r4, r7-r12, pc}
1518
1519.Ldo_remainder2:
1520    adds  r10, #2
1521    bne   .Lloopback_single
1522    mov   r0, r11
1523    pop   {r4, r7-r12, pc}
1524
1525    /* Long string case */
1526.Ldo_memcmp16:
1527    mov   r7, r11
1528    add   r0, r2, #2
1529    add   r1, r1, #2
1530    mov   r2, r10
1531    bl    __memcmp16
1532    cmp   r0, #0
1533    it    eq
1534    moveq r0, r7
1535.Ldone:
1536    pop   {r4, r7-r12, pc}
1537END art_quick_string_compareto
1538