quick_entrypoints_arm.S revision af6e67a4816d2593586115b89faa659225363246
1/* 2 * Copyright (C) 2012 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "asm_support.h" 18 19 /* Deliver the given exception */ 20 .extern artDeliverExceptionFromCode 21 /* Deliver an exception pending on a thread */ 22 .extern artDeliverPendingException 23 24 /* Cache alignment for function entry */ 25.macro ALIGN_FUNCTION_ENTRY 26 .balign 16 27.endm 28 29 /* 30 * Macro that sets up the callee save frame to conform with 31 * Runtime::CreateCalleeSaveMethod(kSaveAll) 32 */ 33.macro SETUP_SAVE_ALL_CALLEE_SAVE_FRAME 34 push {r4-r11, lr} @ 9 words of callee saves 35 vpush {s0-s31} 36 sub sp, #12 @ 3 words of space, bottom word will hold Method* 37.endm 38 39 /* 40 * Macro that sets up the callee save frame to conform with 41 * Runtime::CreateCalleeSaveMethod(kRefsOnly). Restoration assumes non-moving GC. 42 */ 43.macro SETUP_REF_ONLY_CALLEE_SAVE_FRAME 44 push {r5-r8, r10-r11, lr} @ 7 words of callee saves 45 sub sp, #4 @ bottom word will hold Method* 46.endm 47 48.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 49 ldr lr, [sp, #28] @ restore lr for return 50 add sp, #32 @ unwind stack 51.endm 52 53.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN 54 ldr lr, [sp, #28] @ restore lr for return 55 add sp, #32 @ unwind stack 56 bx lr @ return 57.endm 58 59 /* 60 * Macro that sets up the callee save frame to conform with 61 * Runtime::CreateCalleeSaveMethod(kRefsAndArgs). Restoration assumes non-moving GC. 62 */ 63.macro SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME 64 push {r1-r3, r5-r8, r10-r11, lr} @ 10 words of callee saves 65 sub sp, #8 @ 2 words of space, bottom word will hold Method* 66.endm 67 68.macro RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME 69 ldr r1, [sp, #8] @ restore non-callee save r1 70 ldrd r2, [sp, #12] @ restore non-callee saves r2-r3 71 ldr lr, [sp, #44] @ restore lr 72 add sp, #48 @ rewind sp 73.endm 74 75 /* 76 * Macro that set calls through to artDeliverPendingExceptionFromCode, where the pending 77 * exception is Thread::Current()->exception_ 78 */ 79.macro DELIVER_PENDING_EXCEPTION 80 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME @ save callee saves for throw 81 mov r0, r9 @ pass Thread::Current 82 mov r1, sp @ pass SP 83 b artDeliverPendingExceptionFromCode @ artDeliverPendingExceptionFromCode(Thread*, SP) 84.endm 85 86.macro NO_ARG_RUNTIME_EXCEPTION c_name, cxx_name 87 .global \c_name 88 .extern \cxx_name 89 ALIGN_FUNCTION_ENTRY 90\c_name: 91 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context 92 mov r0, r9 @ pass Thread::Current 93 mov r1, sp @ pass SP 94 b \cxx_name @ \cxx_name(Thread*, SP) 95.endm 96 97.macro ONE_ARG_RUNTIME_EXCEPTION c_name, cxx_name 98 .global \c_name 99 .extern \cxx_name 100 ALIGN_FUNCTION_ENTRY 101\c_name: 102 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context 103 mov r1, r9 @ pass Thread::Current 104 mov r2, sp @ pass SP 105 b \cxx_name @ \cxx_name(Thread*, SP) 106.endm 107 108.macro TWO_ARG_RUNTIME_EXCEPTION c_name, cxx_name 109 .global \c_name 110 .extern \cxx_name 111 ALIGN_FUNCTION_ENTRY 112\c_name: 113 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context 114 mov r2, r9 @ pass Thread::Current 115 mov r3, sp @ pass SP 116 b \cxx_name @ \cxx_name(Thread*, SP) 117.endm 118 119 /* 120 * Called by managed code, saves callee saves and then calls artThrowException 121 * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception. 122 */ 123ONE_ARG_RUNTIME_EXCEPTION art_deliver_exception_from_code, artDeliverExceptionFromCode 124 125 /* 126 * Called by managed code to create and deliver a NullPointerException. 127 */ 128NO_ARG_RUNTIME_EXCEPTION art_throw_null_pointer_exception_from_code, artThrowNullPointerExceptionFromCode 129 130 /* 131 * Called by managed code to create and deliver an ArithmeticException. 132 */ 133NO_ARG_RUNTIME_EXCEPTION art_throw_div_zero_from_code, artThrowDivZeroFromCode 134 135 /* 136 * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds 137 * index, arg2 holds limit. 138 */ 139TWO_ARG_RUNTIME_EXCEPTION art_throw_array_bounds_from_code, artThrowArrayBoundsFromCode 140 141 /* 142 * Called by managed code to create and deliver a StackOverflowError. 143 */ 144NO_ARG_RUNTIME_EXCEPTION art_throw_stack_overflow_from_code, artThrowStackOverflowFromCode 145 146 /* 147 * Called by managed code to create and deliver a NoSuchMethodError. 148 */ 149ONE_ARG_RUNTIME_EXCEPTION art_throw_no_such_method_from_code, artThrowNoSuchMethodFromCode 150 151 /* 152 * All generated callsites for interface invokes and invocation slow paths will load arguments 153 * as usual - except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain 154 * the method_idx. This wrapper will save arg1-arg3, load the caller's Method*, align the 155 * stack and call the appropriate C helper. 156 * NOTE: "this" is first visible argument of the target, and so can be found in arg1/r1. 157 * 158 * The helper will attempt to locate the target and return a 64-bit result in r0/r1 consisting 159 * of the target Method* in r0 and method->code_ in r1. 160 * 161 * If unsuccessful, the helper will return NULL/NULL. There will bea pending exception in the 162 * thread and we branch to another stub to deliver it. 163 * 164 * On success this wrapper will restore arguments and *jump* to the target, leaving the lr 165 * pointing back to the original caller. 166 */ 167.macro INVOKE_TRAMPOLINE c_name, cxx_name 168 .global \c_name 169 .extern \cxx_name 170 ALIGN_FUNCTION_ENTRY 171\c_name: 172 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME @ save callee saves in case allocation triggers GC 173 ldr r2, [sp, #48] @ pass caller Method* 174 mov r3, r9 @ pass Thread::Current 175 mov r12, sp 176 str r12, [sp, #-16]! @ expand the frame and pass SP 177 bl \cxx_name @ (method_idx, this, caller, Thread*, SP) 178 add sp, #16 @ strip the extra frame 179 mov r12, r1 @ save Method*->code_ 180 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME 181 cmp r0, #0 @ did we find the target? 182 bxne r12 @ tail call to target if so 183 DELIVER_PENDING_EXCEPTION 184.endm 185 186INVOKE_TRAMPOLINE art_invoke_interface_trampoline, artInvokeInterfaceTrampoline 187INVOKE_TRAMPOLINE art_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck 188 189INVOKE_TRAMPOLINE art_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck 190INVOKE_TRAMPOLINE art_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck 191INVOKE_TRAMPOLINE art_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck 192INVOKE_TRAMPOLINE art_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck 193 194 .global art_update_debugger 195 .extern artUpdateDebuggerFromCode 196 /* 197 * On entry, r0 and r1 must be preserved, r2 is dex PC 198 */ 199 ALIGN_FUNCTION_ENTRY 200art_update_debugger: 201 mov r3, r0 @ stash away r0 so that it's saved as if it were an argument 202 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME 203 mov r0, r2 @ arg0 is dex PC 204 mov r1, rSELF @ arg1 is Thread* 205 mov r2, sp @ arg2 is sp 206 bl artUpdateDebuggerFromCode @ artUpdateDebuggerFromCode(int32_t, Thread*, Method**) 207 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME 208 mov r0, r3 @ restore original r0 209 bx lr 210 211 .global art_do_long_jump 212 /* 213 * On entry r0 is uint32_t* gprs_ and r1 is uint32_t* fprs_ 214 */ 215 ALIGN_FUNCTION_ENTRY 216art_do_long_jump: 217 vldm r1, {s0-s31} @ load all fprs from argument fprs_ 218 ldr r2, [r0, #60] @ r2 = r15 (PC from gprs_ 60=4*15) 219 add r0, r0, #12 @ increment r0 to skip gprs_[0..2] 12=4*3 220 ldm r0, {r3-r14} @ load remaining gprs from argument gprs_ 221 mov r0, #0 @ clear result registers r0 and r1 222 mov r1, #0 223 bx r2 @ do long jump 224 225 .global art_work_around_app_jni_bugs 226 .extern artWorkAroundAppJniBugs 227 /* 228 * Entry point of native methods when JNI bug compatibility is enabled. 229 */ 230 ALIGN_FUNCTION_ENTRY 231art_work_around_app_jni_bugs: 232 @ save registers that may contain arguments and LR that will be crushed by a call 233 push {r0-r3, lr} 234 sub sp, #12 @ 3 words of space for alignment 235 mov r0, r9 @ pass Thread::Current 236 mov r1, sp @ pass SP 237 bl artWorkAroundAppJniBugs @ (Thread*, SP) 238 add sp, #12 @ rewind stack 239 mov r12, r0 @ save target address 240 pop {r0-r3, lr} @ restore possibly modified argument registers 241 bx r12 @ tail call into JNI routine 242 243 .global art_handle_fill_data_from_code 244 .extern artHandleFillArrayDataFromCode 245 /* 246 * Entry from managed code that calls artHandleFillArrayDataFromCode and delivers exception on 247 * failure. 248 */ 249 ALIGN_FUNCTION_ENTRY 250art_handle_fill_data_from_code: 251 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case exception allocation triggers GC 252 mov r2, r9 @ pass Thread::Current 253 mov r3, sp @ pass SP 254 bl artHandleFillArrayDataFromCode @ (Array*, const DexFile::Payload*, Thread*, SP) 255 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 256 cmp r0, #0 @ success? 257 bxeq lr @ return on success 258 DELIVER_PENDING_EXCEPTION 259 260 .global art_lock_object_from_code 261 .extern artLockObjectFromCode 262 /* 263 * Entry from managed code that calls artLockObjectFromCode, may block for GC. 264 */ 265 ALIGN_FUNCTION_ENTRY 266art_lock_object_from_code: 267 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case we block 268 mov r1, r9 @ pass Thread::Current 269 mov r2, sp @ pass SP 270 bl artLockObjectFromCode @ (Object* obj, Thread*, SP) 271 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN 272 273 .global art_unlock_object_from_code 274 .extern artUnlockObjectFromCode 275 /* 276 * Entry from managed code that calls artUnlockObjectFromCode and delivers exception on failure. 277 */ 278 ALIGN_FUNCTION_ENTRY 279art_unlock_object_from_code: 280 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case exception allocation triggers GC 281 mov r1, r9 @ pass Thread::Current 282 mov r2, sp @ pass SP 283 bl artUnlockObjectFromCode @ (Object* obj, Thread*, SP) 284 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 285 cmp r0, #0 @ success? 286 bxeq lr @ return on success 287 DELIVER_PENDING_EXCEPTION 288 289 .global art_check_cast_from_code 290 .extern artCheckCastFromCode 291 /* 292 * Entry from managed code that calls artCheckCastFromCode and delivers exception on failure. 293 */ 294 ALIGN_FUNCTION_ENTRY 295art_check_cast_from_code: 296 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case exception allocation triggers GC 297 mov r2, r9 @ pass Thread::Current 298 mov r3, sp @ pass SP 299 bl artCheckCastFromCode @ (Class* a, Class* b, Thread*, SP) 300 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 301 cmp r0, #0 @ success? 302 bxeq lr @ return on success 303 DELIVER_PENDING_EXCEPTION 304 305 .global art_can_put_array_element_from_code 306 .extern artCanPutArrayElementFromCode 307 /* 308 * Entry from managed code that calls artCanPutArrayElementFromCode and delivers exception on 309 * failure. 310 */ 311 ALIGN_FUNCTION_ENTRY 312art_can_put_array_element_from_code: 313 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case exception allocation triggers GC 314 mov r2, r9 @ pass Thread::Current 315 mov r3, sp @ pass SP 316 bl artCanPutArrayElementFromCode @ (Object* element, Class* array_class, Thread*, SP) 317 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 318 cmp r0, #0 @ success? 319 bxeq lr @ return on success 320 DELIVER_PENDING_EXCEPTION 321 322 .global art_initialize_static_storage_from_code 323 .extern artInitializeStaticStorageFromCode 324 /* 325 * Entry from managed code when uninitialized static storage, this stub will run the class 326 * initializer and deliver the exception on error. On success the static storage base is 327 * returned. 328 */ 329 ALIGN_FUNCTION_ENTRY 330art_initialize_static_storage_from_code: 331 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC 332 mov r2, r9 @ pass Thread::Current 333 mov r3, sp @ pass SP 334 @ artInitializeStaticStorageFromCode(uint32_t type_idx, Method* referrer, Thread*, SP) 335 bl artInitializeStaticStorageFromCode 336 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 337 cmp r0, #0 @ success if result is non-null 338 bxne lr @ return on success 339 DELIVER_PENDING_EXCEPTION 340 341 .global art_initialize_type_from_code 342 .extern artInitializeTypeFromCode 343 /* 344 * Entry from managed code when dex cache misses for a type_idx 345 */ 346 ALIGN_FUNCTION_ENTRY 347art_initialize_type_from_code: 348 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC 349 mov r2, r9 @ pass Thread::Current 350 mov r3, sp @ pass SP 351 @ artInitializeTypeFromCode(uint32_t type_idx, Method* referrer, Thread*, SP) 352 bl artInitializeTypeFromCode 353 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 354 cmp r0, #0 @ success if result is non-null 355 bxne lr @ return on success 356 DELIVER_PENDING_EXCEPTION 357 358 .global art_initialize_type_and_verify_access_from_code 359 .extern artInitializeTypeAndVerifyAccessFromCode 360 /* 361 * Entry from managed code when type_idx needs to be checked for access and dex cache may also 362 * miss. 363 */ 364 ALIGN_FUNCTION_ENTRY 365art_initialize_type_and_verify_access_from_code: 366 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC 367 mov r2, r9 @ pass Thread::Current 368 mov r3, sp @ pass SP 369 @ artInitializeTypeAndVerifyAccessFromCode(uint32_t type_idx, Method* referrer, Thread*, SP) 370 bl artInitializeTypeAndVerifyAccessFromCode 371 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 372 cmp r0, #0 @ success if result is non-null 373 bxne lr @ return on success 374 DELIVER_PENDING_EXCEPTION 375 376 .global art_get32_static_from_code 377 .extern artGet32StaticFromCode 378 /* 379 * Called by managed code to resolve a static field and load a 32-bit primitive value. 380 */ 381 ALIGN_FUNCTION_ENTRY 382art_get32_static_from_code: 383 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC 384 ldr r1, [sp, #32] @ pass referrer 385 mov r2, r9 @ pass Thread::Current 386 mov r3, sp @ pass SP 387 bl artGet32StaticFromCode @ (uint32_t field_idx, const Method* referrer, Thread*, SP) 388 ldr r12, [r9, #THREAD_EXCEPTION_OFFSET] @ load Thread::Current()->exception_ 389 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 390 cmp r12, #0 @ success if no exception is pending 391 bxeq lr @ return on success 392 DELIVER_PENDING_EXCEPTION 393 394 .global art_get64_static_from_code 395 .extern artGet64StaticFromCode 396 /* 397 * Called by managed code to resolve a static field and load a 64-bit primitive value. 398 */ 399 ALIGN_FUNCTION_ENTRY 400art_get64_static_from_code: 401 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC 402 ldr r1, [sp, #32] @ pass referrer 403 mov r2, r9 @ pass Thread::Current 404 mov r3, sp @ pass SP 405 bl artGet64StaticFromCode @ (uint32_t field_idx, const Method* referrer, Thread*, SP) 406 ldr r12, [r9, #THREAD_EXCEPTION_OFFSET] @ load Thread::Current()->exception_ 407 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 408 cmp r12, #0 @ success if no exception is pending 409 bxeq lr @ return on success 410 DELIVER_PENDING_EXCEPTION 411 412 .global art_get_obj_static_from_code 413 .extern artGetObjStaticFromCode 414 /* 415 * Called by managed code to resolve a static field and load an object reference. 416 */ 417 ALIGN_FUNCTION_ENTRY 418art_get_obj_static_from_code: 419 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC 420 ldr r1, [sp, #32] @ pass referrer 421 mov r2, r9 @ pass Thread::Current 422 mov r3, sp @ pass SP 423 bl artGetObjStaticFromCode @ (uint32_t field_idx, const Method* referrer, Thread*, SP) 424 ldr r12, [r9, #THREAD_EXCEPTION_OFFSET] @ load Thread::Current()->exception_ 425 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 426 cmp r12, #0 @ success if no exception is pending 427 bxeq lr @ return on success 428 DELIVER_PENDING_EXCEPTION 429 430 .global art_get32_instance_from_code 431 .extern artGet32InstanceFromCode 432 /* 433 * Called by managed code to resolve an instance field and load a 32-bit primitive value. 434 */ 435 ALIGN_FUNCTION_ENTRY 436art_get32_instance_from_code: 437 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC 438 ldr r2, [sp, #32] @ pass referrer 439 mov r3, r9 @ pass Thread::Current 440 mov r12, sp 441 str r12, [sp, #-16]! @ expand the frame and pass SP 442 bl artGet32InstanceFromCode @ (field_idx, Object*, referrer, Thread*, SP) 443 add sp, #16 @ strip the extra frame 444 ldr r12, [r9, #THREAD_EXCEPTION_OFFSET] @ load Thread::Current()->exception_ 445 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 446 cmp r12, #0 @ success if no exception is pending 447 bxeq lr @ return on success 448 DELIVER_PENDING_EXCEPTION 449 450 .global art_get64_instance_from_code 451 .extern artGet64InstanceFromCode 452 /* 453 * Called by managed code to resolve an instance field and load a 64-bit primitive value. 454 */ 455 ALIGN_FUNCTION_ENTRY 456art_get64_instance_from_code: 457 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC 458 ldr r2, [sp, #32] @ pass referrer 459 mov r3, r9 @ pass Thread::Current 460 mov r12, sp 461 str r12, [sp, #-16]! @ expand the frame and pass SP 462 bl artGet64InstanceFromCode @ (field_idx, Object*, referrer, Thread*, SP) 463 add sp, #16 @ strip the extra frame 464 ldr r12, [r9, #THREAD_EXCEPTION_OFFSET] @ load Thread::Current()->exception_ 465 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 466 cmp r12, #0 @ success if no exception is pending 467 bxeq lr @ return on success 468 DELIVER_PENDING_EXCEPTION 469 470 .global art_get_obj_instance_from_code 471 .extern artGetObjInstanceFromCode 472 /* 473 * Called by managed code to resolve an instance field and load an object reference. 474 */ 475 ALIGN_FUNCTION_ENTRY 476art_get_obj_instance_from_code: 477 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC 478 ldr r2, [sp, #32] @ pass referrer 479 mov r3, r9 @ pass Thread::Current 480 mov r12, sp 481 str r12, [sp, #-16]! @ expand the frame and pass SP 482 bl artGetObjInstanceFromCode @ (field_idx, Object*, referrer, Thread*, SP) 483 add sp, #16 @ strip the extra frame 484 ldr r12, [r9, #THREAD_EXCEPTION_OFFSET] @ load Thread::Current()->exception_ 485 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 486 cmp r12, #0 @ success if no exception is pending 487 bxeq lr @ return on success 488 DELIVER_PENDING_EXCEPTION 489 490 .global art_set32_static_from_code 491 .extern artSet32StaticFromCode 492 /* 493 * Called by managed code to resolve a static field and store a 32-bit primitive value. 494 */ 495 ALIGN_FUNCTION_ENTRY 496art_set32_static_from_code: 497 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC 498 ldr r2, [sp, #32] @ pass referrer 499 mov r3, r9 @ pass Thread::Current 500 mov r12, sp 501 str r12, [sp, #-16]! @ expand the frame and pass SP 502 bl artSet32StaticFromCode @ (field_idx, new_val, referrer, Thread*, SP) 503 add sp, #16 @ strip the extra frame 504 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 505 cmp r0, #0 @ success if result is 0 506 bxeq lr @ return on success 507 DELIVER_PENDING_EXCEPTION 508 509 .global art_set64_static_from_code 510 .extern artSet64StaticFromCode 511 /* 512 * Called by managed code to resolve a static field and store a 64-bit primitive value. 513 * On entry r0 holds field index, r1:r2 hold new_val 514 */ 515 ALIGN_FUNCTION_ENTRY 516art_set64_static_from_code: 517 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC 518 mov r3, r2 @ pass one half of wide argument 519 mov r2, r1 @ pass other half of wide argument 520 ldr r1, [sp, #32] @ pass referrer 521 mov r12, sp @ save SP 522 sub sp, #8 @ grow frame for alignment with stack args 523 push {r9, r12} @ pass Thread::Current and SP 524 bl artSet64StaticFromCode @ (field_idx, referrer, new_val, Thread*, SP) 525 add sp, #16 @ release out args 526 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME @ TODO: we can clearly save an add here 527 cmp r0, #0 @ success if result is 0 528 bxeq lr @ return on success 529 DELIVER_PENDING_EXCEPTION 530 531 .global art_set_obj_static_from_code 532 .extern artSetObjStaticFromCode 533 /* 534 * Called by managed code to resolve a static field and store an object reference. 535 */ 536 ALIGN_FUNCTION_ENTRY 537art_set_obj_static_from_code: 538 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC 539 ldr r2, [sp, #32] @ pass referrer 540 mov r3, r9 @ pass Thread::Current 541 mov r12, sp 542 str r12, [sp, #-16]! @ expand the frame and pass SP 543 bl artSetObjStaticFromCode @ (field_idx, new_val, referrer, Thread*, SP) 544 add sp, #16 @ strip the extra frame 545 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 546 cmp r0, #0 @ success if result is 0 547 bxeq lr @ return on success 548 DELIVER_PENDING_EXCEPTION 549 550 .global art_set32_instance_from_code 551 .extern artSet32InstanceFromCode 552 /* 553 * Called by managed code to resolve an instance field and store a 32-bit primitive value. 554 */ 555 ALIGN_FUNCTION_ENTRY 556art_set32_instance_from_code: 557 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC 558 ldr r3, [sp, #32] @ pass referrer 559 mov r12, sp @ save SP 560 sub sp, #8 @ grow frame for alignment with stack args 561 push {r9, r12} @ pass Thread::Current and SP 562 bl artSet32InstanceFromCode @ (field_idx, Object*, new_val, referrer, Thread*, SP) 563 add sp, #16 @ release out args 564 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME @ TODO: we can clearly save an add here 565 cmp r0, #0 @ success if result is 0 566 bxeq lr @ return on success 567 DELIVER_PENDING_EXCEPTION 568 569 .global art_set64_instance_from_code 570 .extern artSet32InstanceFromCode 571 /* 572 * Called by managed code to resolve an instance field and store a 64-bit primitive value. 573 */ 574 ALIGN_FUNCTION_ENTRY 575art_set64_instance_from_code: 576 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC 577 mov r12, sp @ save SP 578 sub sp, #8 @ grow frame for alignment with stack args 579 push {r9, r12} @ pass Thread::Current and SP 580 bl artSet64InstanceFromCode @ (field_idx, Object*, new_val, Thread*, SP) 581 add sp, #16 @ release out args 582 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME @ TODO: we can clearly save an add here 583 cmp r0, #0 @ success if result is 0 584 bxeq lr @ return on success 585 DELIVER_PENDING_EXCEPTION 586 587 .global art_set_obj_instance_from_code 588 .extern artSetObjInstanceFromCode 589 /* 590 * Called by managed code to resolve an instance field and store an object reference. 591 */ 592 ALIGN_FUNCTION_ENTRY 593art_set_obj_instance_from_code: 594 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC 595 ldr r3, [sp, #32] @ pass referrer 596 mov r12, sp @ save SP 597 sub sp, #8 @ grow frame for alignment with stack args 598 push {r9, r12} @ pass Thread::Current and SP 599 bl artSetObjInstanceFromCode @ (field_idx, Object*, new_val, referrer, Thread*, SP) 600 add sp, #16 @ release out args 601 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME @ TODO: we can clearly save an add here 602 cmp r0, #0 @ success if result is 0 603 bxeq lr @ return on success 604 DELIVER_PENDING_EXCEPTION 605 606 .global art_resolve_string_from_code 607 .extern artResolveStringFromCode 608 /* 609 * Entry from managed code to resolve a string, this stub will allocate a String and deliver an 610 * exception on error. On success the String is returned. R0 holds the referring method, 611 * R1 holds the string index. The fast path check for hit in strings cache has already been 612 * performed. 613 */ 614 ALIGN_FUNCTION_ENTRY 615art_resolve_string_from_code: 616 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC 617 mov r2, r9 @ pass Thread::Current 618 mov r3, sp @ pass SP 619 @ artResolveStringFromCode(Method* referrer, uint32_t string_idx, Thread*, SP) 620 bl artResolveStringFromCode 621 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 622 cmp r0, #0 @ success if result is non-null 623 bxne lr @ return on success 624 DELIVER_PENDING_EXCEPTION 625 626 .global art_alloc_object_from_code 627 .extern artAllocObjectFromCode 628 /* 629 * Called by managed code to allocate an object 630 */ 631 ALIGN_FUNCTION_ENTRY 632art_alloc_object_from_code: 633 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC 634 mov r2, r9 @ pass Thread::Current 635 mov r3, sp @ pass SP 636 bl artAllocObjectFromCode @ (uint32_t type_idx, Method* method, Thread*, SP) 637 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 638 cmp r0, #0 @ success if result is non-null 639 bxne lr @ return on success 640 DELIVER_PENDING_EXCEPTION 641 642 .global art_alloc_object_from_code_with_access_check 643 .extern artAllocObjectFromCodeWithAccessCheck 644 /* 645 * Called by managed code to allocate an object when the caller doesn't know whether it has 646 * access to the created type. 647 */ 648 ALIGN_FUNCTION_ENTRY 649art_alloc_object_from_code_with_access_check: 650 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC 651 mov r2, r9 @ pass Thread::Current 652 mov r3, sp @ pass SP 653 bl artAllocObjectFromCodeWithAccessCheck @ (uint32_t type_idx, Method* method, Thread*, SP) 654 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 655 cmp r0, #0 @ success if result is non-null 656 bxne lr @ return on success 657 DELIVER_PENDING_EXCEPTION 658 659 .global art_alloc_array_from_code 660 .extern artAllocArrayFromCode 661 /* 662 * Called by managed code to allocate an array. 663 */ 664 ALIGN_FUNCTION_ENTRY 665art_alloc_array_from_code: 666 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC 667 mov r3, r9 @ pass Thread::Current 668 mov r12, sp 669 str r12, [sp, #-16]! @ expand the frame and pass SP 670 @ artAllocArrayFromCode(uint32_t type_idx, Method* method, int32_t component_count, Thread*, SP) 671 bl artAllocArrayFromCode 672 add sp, #16 @ strip the extra frame 673 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 674 cmp r0, #0 @ success if result is non-null 675 bxne lr @ return on success 676 DELIVER_PENDING_EXCEPTION 677 678 .global art_alloc_array_from_code_with_access_check 679 .extern artAllocArrayFromCodeWithAccessCheck 680 /* 681 * Called by managed code to allocate an array when the caller doesn't know whether it has 682 * access to the created type. 683 */ 684 ALIGN_FUNCTION_ENTRY 685art_alloc_array_from_code_with_access_check: 686 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC 687 mov r3, r9 @ pass Thread::Current 688 mov r12, sp 689 str r12, [sp, #-16]! @ expand the frame and pass SP 690 @ artAllocArrayFromCodeWithAccessCheck(type_idx, method, component_count, Thread*, SP) 691 bl artAllocArrayFromCodeWithAccessCheck 692 add sp, #16 @ strip the extra frame 693 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 694 cmp r0, #0 @ success if result is non-null 695 bxne lr @ return on success 696 DELIVER_PENDING_EXCEPTION 697 698 .global art_check_and_alloc_array_from_code 699 .extern artCheckAndAllocArrayFromCode 700 /* 701 * Called by managed code to allocate an array in a special case for FILLED_NEW_ARRAY. 702 */ 703 ALIGN_FUNCTION_ENTRY 704art_check_and_alloc_array_from_code: 705 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC 706 mov r3, r9 @ pass Thread::Current 707 mov r12, sp 708 str r12, [sp, #-16]! @ expand the frame and pass SP 709 @ artCheckAndAllocArrayFromCode(uint32_t type_idx, Method* method, int32_t count, Thread* , SP) 710 bl artCheckAndAllocArrayFromCode 711 add sp, #16 @ strip the extra frame 712 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 713 cmp r0, #0 @ success if result is non-null 714 bxne lr @ return on success 715 DELIVER_PENDING_EXCEPTION 716 717 .global art_check_and_alloc_array_from_code_with_access_check 718 .extern artCheckAndAllocArrayFromCodeWithAccessCheck 719 /* 720 * Called by managed code to allocate an array in a special case for FILLED_NEW_ARRAY. 721 */ 722 ALIGN_FUNCTION_ENTRY 723art_check_and_alloc_array_from_code_with_access_check: 724 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC 725 mov r3, r9 @ pass Thread::Current 726 mov r12, sp 727 str r12, [sp, #-16]! @ expand the frame and pass SP 728 @ artCheckAndAllocArrayFromCodeWithAccessCheck(type_idx, method, count, Thread* , SP) 729 bl artCheckAndAllocArrayFromCodeWithAccessCheck 730 add sp, #16 @ strip the extra frame 731 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 732 cmp r0, #0 @ success if result is non-null 733 bxne lr @ return on success 734 DELIVER_PENDING_EXCEPTION 735 736 .global art_test_suspend 737 .extern artTestSuspendFromCode 738 /* 739 * Called by managed code when the value in rSUSPEND has been decremented to 0. 740 */ 741 ALIGN_FUNCTION_ENTRY 742art_test_suspend: 743 ldrh r0, [rSELF, #THREAD_FLAGS_OFFSET] 744 mov rSUSPEND, #SUSPEND_CHECK_INTERVAL @ reset rSUSPEND to SUSPEND_CHECK_INTERVAL 745 cmp r0, #0 @ check Thread::Current()->suspend_count_ == 0 746 bxeq lr @ return if suspend_count_ == 0 747 mov r0, rSELF 748 SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves for stack crawl 749 mov r1, sp 750 bl artTestSuspendFromCode @ (Thread*, SP) 751 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN 752 753 .global art_proxy_invoke_handler 754 .extern artProxyInvokeHandler 755 /* 756 * Called by managed code that is attempting to call a method on a proxy class. On entry 757 * r0 holds the proxy method and r1 holds the receiver; r2 and r3 may contain arguments. The 758 * frame size of the invoked proxy method agrees with a ref and args callee save frame. 759 */ 760 ALIGN_FUNCTION_ENTRY 761art_proxy_invoke_handler: 762 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME 763 str r0, [sp, #0] @ place proxy method at bottom of frame 764 mov r2, r9 @ pass Thread::Current 765 mov r3, sp @ pass SP 766 blx artProxyInvokeHandler @ (Method* proxy method, receiver, Thread*, SP) 767 ldr r12, [r9, #THREAD_EXCEPTION_OFFSET] @ load Thread::Current()->exception_ 768 ldr lr, [sp, #44] @ restore lr 769 add sp, #48 @ pop frame 770 cmp r12, #0 @ success if no exception is pending 771 bxeq lr @ return on success 772 DELIVER_PENDING_EXCEPTION 773 774 .global art_instrumentation_entry_from_code 775 .global art_instrumentation_exit_from_code 776 .extern artInstrumentationMethodEntryFromCode 777 .extern artInstrumentationMethodExitFromCode 778 /* 779 * Routine that intercepts method calls and returns. 780 */ 781 ALIGN_FUNCTION_ENTRY 782art_instrumentation_entry_from_code: 783 mov r12, sp @ remember bottom of caller's frame 784 push {r0-r3} @ save arguments (4 words) 785 mov r1, r9 @ pass Thread::Current 786 mov r2, r12 @ pass SP 787 mov r3, lr @ pass LR 788 blx artInstrumentationMethodEntryFromCode @ (Method*, Thread*, SP, LR) 789 mov r12, r0 @ r12 holds reference to code 790 pop {r0-r3} @ restore arguments 791 blx r12 @ call method 792art_instrumentation_exit_from_code: 793 mov r12, sp @ remember bottom of caller's frame 794 push {r0-r1} @ save return value 795 sub sp, #8 @ align stack 796 mov r0, r9 @ pass Thread::Current 797 mov r1, r12 @ pass SP 798 blx artInstrumentationMethodExitFromCode @ (Thread*, SP) 799 add sp, #8 800 mov r2, r0 @ link register saved by instrumentation 801 mov lr, r1 @ r1 is holding link register if we're to bounce to deoptimize 802 pop {r0, r1} @ restore return value 803 bx r2 @ return 804 805 .global art_deoptimize 806 .extern artDeoptimize 807 .extern artEnterInterpreterFromDeoptimize 808 /* 809 * The thread's enter interpreter flag is set and so we should transition to the interpreter 810 * rather than allow execution to continue in the frame below. There may be live results in 811 * registers depending on how complete the operation is when we safepoint - for example, a 812 * set operation may have completed while a get operation needs writing back into the vregs. 813 */ 814 ALIGN_FUNCTION_ENTRY 815art_deoptimize: 816 SETUP_REF_ONLY_CALLEE_SAVE_FRAME 817 mov r2, r9 @ Set up args. 818 mov r3, sp 819 blx artDeoptimize @ artDeoptimize(return value, Thread*, SP) 820 @ Returns caller method's frame size. 821 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 822 cmp r0, #0 @ Was the caller an upcall? 823 bxeq lr @ Return if caller was upcall. 824 add r12, sp, r0 @ r12 == bottom of caller's frame. 825 ldr lr, [r12, #-4] @ Restore lr. 826 mov sp, r12 @ Remove frame. 827 SETUP_REF_ONLY_CALLEE_SAVE_FRAME 828 blx artEnterInterpreterFromDeoptimize @ Enter interpreter, callee-save ends stack fragment. 829 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME 830 bx lr @ Return to caller. 831 832 .global art_mul_long 833 /* 834 * Signed 64-bit integer multiply. 835 * 836 * Consider WXxYZ (r1r0 x r3r2) with a long multiply: 837 * WX 838 * x YZ 839 * -------- 840 * ZW ZX 841 * YW YX 842 * 843 * The low word of the result holds ZX, the high word holds 844 * (ZW+YX) + (the high overflow from ZX). YW doesn't matter because 845 * it doesn't fit in the low 64 bits. 846 * 847 * Unlike most ARM math operations, multiply instructions have 848 * restrictions on using the same register more than once (Rd and Rm 849 * cannot be the same). 850 */ 851 /* mul-long vAA, vBB, vCC */ 852 ALIGN_FUNCTION_ENTRY 853art_mul_long: 854 push {r9 - r10} 855 mul ip, r2, r1 @ ip<- ZxW 856 umull r9, r10, r2, r0 @ r9/r10 <- ZxX 857 mla r2, r0, r3, ip @ r2<- YxX + (ZxW) 858 add r10, r2, r10 @ r10<- r10 + low(ZxW + (YxX)) 859 mov r0,r9 860 mov r1,r10 861 pop {r9 - r10} 862 bx lr 863 864 .global art_shl_long 865 /* 866 * Long integer shift. This is different from the generic 32/64-bit 867 * binary operations because vAA/vBB are 64-bit but vCC (the shift 868 * distance) is 32-bit. Also, Dalvik requires us to ignore all but the low 869 * 6 bits. 870 * On entry: 871 * r0: low word 872 * r1: high word 873 * r2: shift count 874 */ 875 /* shl-long vAA, vBB, vCC */ 876 ALIGN_FUNCTION_ENTRY 877art_shl_long: 878 and r2, r2, #63 @ r2<- r2 & 0x3f 879 mov r1, r1, asl r2 @ r1<- r1 << r2 880 rsb r3, r2, #32 @ r3<- 32 - r2 881 orr r1, r1, r0, lsr r3 @ r1<- r1 | (r0 << (32-r2)) 882 subs ip, r2, #32 @ ip<- r2 - 32 883 movpl r1, r0, asl ip @ if r2 >= 32, r1<- r0 << (r2-32) 884 mov r0, r0, asl r2 @ r0<- r0 << r2 885 bx lr 886 887 .global art_shr_long 888 /* 889 * Long integer shift. This is different from the generic 32/64-bit 890 * binary operations because vAA/vBB are 64-bit but vCC (the shift 891 * distance) is 32-bit. Also, Dalvik requires us to ignore all but the low 892 * 6 bits. 893 * On entry: 894 * r0: low word 895 * r1: high word 896 * r2: shift count 897 */ 898 /* shr-long vAA, vBB, vCC */ 899 ALIGN_FUNCTION_ENTRY 900art_shr_long: 901 and r2, r2, #63 @ r0<- r0 & 0x3f 902 mov r0, r0, lsr r2 @ r0<- r2 >> r2 903 rsb r3, r2, #32 @ r3<- 32 - r2 904 orr r0, r0, r1, asl r3 @ r0<- r0 | (r1 << (32-r2)) 905 subs ip, r2, #32 @ ip<- r2 - 32 906 movpl r0, r1, asr ip @ if r2 >= 32, r0<-r1 >> (r2-32) 907 mov r1, r1, asr r2 @ r1<- r1 >> r2 908 bx lr 909 910 .global art_ushr_long 911 /* 912 * Long integer shift. This is different from the generic 32/64-bit 913 * binary operations because vAA/vBB are 64-bit but vCC (the shift 914 * distance) is 32-bit. Also, Dalvik requires us to ignore all but the low 915 * 6 bits. 916 * On entry: 917 * r0: low word 918 * r1: high word 919 * r2: shift count 920 */ 921 /* ushr-long vAA, vBB, vCC */ 922 ALIGN_FUNCTION_ENTRY 923art_ushr_long: 924 and r2, r2, #63 @ r0<- r0 & 0x3f 925 mov r0, r0, lsr r2 @ r0<- r2 >> r2 926 rsb r3, r2, #32 @ r3<- 32 - r2 927 orr r0, r0, r1, asl r3 @ r0<- r0 | (r1 << (32-r2)) 928 subs ip, r2, #32 @ ip<- r2 - 32 929 movpl r0, r1, lsr ip @ if r2 >= 32, r0<-r1 >>> (r2-32) 930 mov r1, r1, lsr r2 @ r1<- r1 >>> r2 931 bx lr 932 933 .balign 4 934 .global art_indexof 935art_indexof: 936 /* 937 * String's indexOf. 938 * 939 * On entry: 940 * r0: string object (known non-null) 941 * r1: char to match (known <= 0xFFFF) 942 * r2: Starting offset in string data 943 */ 944 945 push {r4, r10-r11, lr} @ 4 words of callee saves 946 ldr r3, [r0, #STRING_COUNT_OFFSET] 947 ldr r12, [r0, #STRING_OFFSET_OFFSET] 948 ldr r0, [r0, #STRING_VALUE_OFFSET] 949 950 /* Clamp start to [0..count] */ 951 cmp r2, #0 952 movlt r2, #0 953 cmp r2, r3 954 movgt r2, r3 955 956 /* Build a pointer to the start of string data */ 957 add r0, #STRING_DATA_OFFSET 958 add r0, r0, r12, lsl #1 959 960 /* Save a copy in r12 to later compute result */ 961 mov r12, r0 962 963 /* Build pointer to start of data to compare and pre-bias */ 964 add r0, r0, r2, lsl #1 965 sub r0, #2 966 967 /* Compute iteration count */ 968 sub r2, r3, r2 969 970 /* 971 * At this point we have: 972 * r0: start of data to test 973 * r1: char to compare 974 * r2: iteration count 975 * r12: original start of string data 976 * r3, r4, r10, r11 available for loading string data 977 */ 978 979 subs r2, #4 980 blt indexof_remainder 981 982indexof_loop4: 983 ldrh r3, [r0, #2]! 984 ldrh r4, [r0, #2]! 985 ldrh r10, [r0, #2]! 986 ldrh r11, [r0, #2]! 987 cmp r3, r1 988 beq match_0 989 cmp r4, r1 990 beq match_1 991 cmp r10, r1 992 beq match_2 993 cmp r11, r1 994 beq match_3 995 subs r2, #4 996 bge indexof_loop4 997 998indexof_remainder: 999 adds r2, #4 1000 beq indexof_nomatch 1001 1002indexof_loop1: 1003 ldrh r3, [r0, #2]! 1004 cmp r3, r1 1005 beq match_3 1006 subs r2, #1 1007 bne indexof_loop1 1008 1009indexof_nomatch: 1010 mov r0, #-1 1011 pop {r4, r10-r11, pc} 1012 1013match_0: 1014 sub r0, #6 1015 sub r0, r12 1016 asr r0, r0, #1 1017 pop {r4, r10-r11, pc} 1018match_1: 1019 sub r0, #4 1020 sub r0, r12 1021 asr r0, r0, #1 1022 pop {r4, r10-r11, pc} 1023match_2: 1024 sub r0, #2 1025 sub r0, r12 1026 asr r0, r0, #1 1027 pop {r4, r10-r11, pc} 1028match_3: 1029 sub r0, r12 1030 asr r0, r0, #1 1031 pop {r4, r10-r11, pc} 1032 1033 1034 /* 1035 * String's compareTo. 1036 * 1037 * Requires rARG0/rARG1 to have been previously checked for null. Will 1038 * return negative if this's string is < comp, 0 if they are the 1039 * same and positive if >. 1040 * 1041 * On entry: 1042 * r0: this object pointer 1043 * r1: comp object pointer 1044 * 1045 */ 1046 1047 .balign 4 1048 .global art_string_compareto 1049 .extern __memcmp16 1050art_string_compareto: 1051 mov r2, r0 @ this to r2, opening up r0 for return value 1052 subs r0, r2, r1 @ Same? 1053 bxeq lr 1054 1055 push {r4, r7-r12, lr} @ 8 words - keep alignment 1056 1057 ldr r4, [r2, #STRING_OFFSET_OFFSET] 1058 ldr r9, [r1, #STRING_OFFSET_OFFSET] 1059 ldr r7, [r2, #STRING_COUNT_OFFSET] 1060 ldr r10, [r1, #STRING_COUNT_OFFSET] 1061 ldr r2, [r2, #STRING_VALUE_OFFSET] 1062 ldr r1, [r1, #STRING_VALUE_OFFSET] 1063 1064 /* 1065 * At this point, we have: 1066 * value: r2/r1 1067 * offset: r4/r9 1068 * count: r7/r10 1069 * We're going to compute 1070 * r11 <- countDiff 1071 * r10 <- minCount 1072 */ 1073 subs r11, r7, r10 1074 movls r10, r7 1075 1076 /* Now, build pointers to the string data */ 1077 add r2, r2, r4, lsl #1 1078 add r1, r1, r9, lsl #1 1079 /* 1080 * Note: data pointers point to previous element so we can use pre-index 1081 * mode with base writeback. 1082 */ 1083 add r2, #STRING_DATA_OFFSET-2 @ offset to contents[-1] 1084 add r1, #STRING_DATA_OFFSET-2 @ offset to contents[-1] 1085 1086 /* 1087 * At this point we have: 1088 * r2: *this string data 1089 * r1: *comp string data 1090 * r10: iteration count for comparison 1091 * r11: value to return if the first part of the string is equal 1092 * r0: reserved for result 1093 * r3, r4, r7, r8, r9, r12 available for loading string data 1094 */ 1095 1096 subs r10, #2 1097 blt do_remainder2 1098 1099 /* 1100 * Unroll the first two checks so we can quickly catch early mismatch 1101 * on long strings (but preserve incoming alignment) 1102 */ 1103 1104 ldrh r3, [r2, #2]! 1105 ldrh r4, [r1, #2]! 1106 ldrh r7, [r2, #2]! 1107 ldrh r8, [r1, #2]! 1108 subs r0, r3, r4 1109 subeqs r0, r7, r8 1110 bne done 1111 cmp r10, #28 1112 bgt do_memcmp16 1113 subs r10, #3 1114 blt do_remainder 1115 1116loopback_triple: 1117 ldrh r3, [r2, #2]! 1118 ldrh r4, [r1, #2]! 1119 ldrh r7, [r2, #2]! 1120 ldrh r8, [r1, #2]! 1121 ldrh r9, [r2, #2]! 1122 ldrh r12,[r1, #2]! 1123 subs r0, r3, r4 1124 subeqs r0, r7, r8 1125 subeqs r0, r9, r12 1126 bne done 1127 subs r10, #3 1128 bge loopback_triple 1129 1130do_remainder: 1131 adds r10, #3 1132 beq returnDiff 1133 1134loopback_single: 1135 ldrh r3, [r2, #2]! 1136 ldrh r4, [r1, #2]! 1137 subs r0, r3, r4 1138 bne done 1139 subs r10, #1 1140 bne loopback_single 1141 1142returnDiff: 1143 mov r0, r11 1144 pop {r4, r7-r12, pc} 1145 1146do_remainder2: 1147 adds r10, #2 1148 bne loopback_single 1149 mov r0, r11 1150 pop {r4, r7-r12, pc} 1151 1152 /* Long string case */ 1153do_memcmp16: 1154 mov r7, r11 1155 add r0, r2, #2 1156 add r1, r1, #2 1157 mov r2, r10 1158 bl __memcmp16 1159 cmp r0, #0 1160 moveq r0, r7 1161done: 1162 pop {r4, r7-r12, pc} 1163