1/* 2 * Copyright (C) 2008 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#if defined(WITH_JIT) 18 19/* 20 * This is a #include, not a %include, because we want the C pre-processor 21 * to expand the macros into assembler assignment statements. 22 */ 23#include "../../../mterp/common/asm-constants.h" 24#include "../../../mterp/common/mips-defines.h" 25#include "../../../mterp/common/jit-config.h" 26#include <asm/regdef.h> 27#include <asm/fpregdef.h> 28 29#ifdef __mips_hard_float 30#define HARD_FLOAT 31#else 32#define SOFT_FLOAT 33#endif 34 35/* MIPS definitions and declarations 36 37 reg nick purpose 38 s0 rPC interpreted program counter, used for fetching instructions 39 s1 rFP interpreted frame pointer, used for accessing locals and args 40 s2 rSELF pointer to thread 41 s3 rIBASE interpreted instruction base pointer, used for computed goto 42 s4 rINST first 16-bit code unit of current instruction 43*/ 44 45/* register offsets */ 46#define r_ZERO 0 47#define r_AT 1 48#define r_V0 2 49#define r_V1 3 50#define r_A0 4 51#define r_A1 5 52#define r_A2 6 53#define r_A3 7 54#define r_T0 8 55#define r_T1 9 56#define r_T2 10 57#define r_T3 11 58#define r_T4 12 59#define r_T5 13 60#define r_T6 14 61#define r_T7 15 62#define r_S0 16 63#define r_S1 17 64#define r_S2 18 65#define r_S3 19 66#define r_S4 20 67#define r_S5 21 68#define r_S6 22 69#define r_S7 23 70#define r_T8 24 71#define r_T9 25 72#define r_K0 26 73#define r_K1 27 74#define r_GP 28 75#define r_SP 29 76#define r_FP 30 77#define r_RA 31 78#define r_F0 32 79#define r_F1 33 80#define r_F2 34 81#define r_F3 35 82#define r_F4 36 83#define r_F5 37 84#define r_F6 38 85#define r_F7 39 86#define r_F8 40 87#define r_F9 41 88#define r_F10 42 89#define r_F11 43 90#define r_F12 44 91#define r_F13 45 92#define r_F14 46 93#define r_F15 47 94#define r_F16 48 95#define r_F17 49 96#define r_F18 50 97#define r_F19 51 98#define r_F20 52 99#define r_F21 53 100#define r_F22 54 101#define r_F23 55 102#define r_F24 56 103#define r_F25 57 104#define r_F26 58 105#define r_F27 59 106#define r_F28 60 107#define r_F29 61 108#define r_F30 62 109#define r_F31 63 110 111/* single-purpose registers, given names for clarity */ 112#define rPC s0 113#define rFP s1 114#define rSELF s2 115#define rIBASE s3 116#define rINST s4 117#define rOBJ s5 118#define rBIX s6 119#define rTEMP s7 120 121/* The long arguments sent to function calls in Big-endian mode should be register 122swapped when sent to functions in little endian mode. In other words long variable 123sent as a0(MSW), a1(LSW) for a function call in LE mode should be sent as a1, a0 in 124Big Endian mode */ 125 126#ifdef HAVE_LITTLE_ENDIAN 127#define rARG0 a0 128#define rARG1 a1 129#define rARG2 a2 130#define rARG3 a3 131#define rRESULT0 v0 132#define rRESULT1 v1 133#else 134#define rARG0 a1 135#define rARG1 a0 136#define rARG2 a3 137#define rARG3 a2 138#define rRESULT0 v1 139#define rRESULT1 v0 140#endif 141 142 143/* save/restore the PC and/or FP from the thread struct */ 144#define LOAD_PC_FROM_SELF() lw rPC, offThread_pc(rSELF) 145#define SAVE_PC_TO_SELF() sw rPC, offThread_pc(rSELF) 146#define LOAD_FP_FROM_SELF() lw rFP, offThread_curFrame(rSELF) 147#define SAVE_FP_TO_SELF() sw rFP, offThread_curFrame(rSELF) 148 149#define EXPORT_PC() \ 150 sw rPC, (offStackSaveArea_currentPc - sizeofStackSaveArea)(rFP) 151 152#define SAVEAREA_FROM_FP(rd, _fpreg) \ 153 subu rd, _fpreg, sizeofStackSaveArea 154 155#define FETCH_INST() lhu rINST, (rPC) 156 157#define FETCH_ADVANCE_INST(_count) lhu rINST, (_count*2)(rPC); \ 158 addu rPC, rPC, (_count * 2) 159 160#define FETCH_ADVANCE_INST_RB(rd) addu rPC, rPC, rd; \ 161 lhu rINST, (rPC) 162 163#define FETCH(rd, _count) lhu rd, (_count * 2)(rPC) 164#define FETCH_S(rd, _count) lh rd, (_count * 2)(rPC) 165 166#ifdef HAVE_LITTLE_ENDIAN 167 168#define FETCH_B(rd, _count) lbu rd, (_count * 2)(rPC) 169#define FETCH_C(rd, _count) lbu rd, (_count * 2 + 1)(rPC) 170 171#else 172 173#define FETCH_B(rd, _count) lbu rd, (_count * 2 + 1)(rPC) 174#define FETCH_C(rd, _count) lbu rd, (_count * 2)(rPC) 175 176#endif 177 178#define GET_INST_OPCODE(rd) and rd, rINST, 0xFF 179 180#define GOTO_OPCODE(rd) sll rd, rd, ${handler_size_bits}; \ 181 addu rd, rIBASE, rd; \ 182 jr rd 183 184 185#define LOAD(rd, rbase) lw rd, 0(rbase) 186#define LOAD_F(rd, rbase) l.s rd, (rbase) 187#define STORE(rd, rbase) sw rd, 0(rbase) 188#define STORE_F(rd, rbase) s.s rd, (rbase) 189 190#define GET_VREG(rd, rix) LOAD_eas2(rd,rFP,rix) 191 192#define GET_VREG_F(rd, rix) EAS2(AT, rFP, rix); \ 193 .set noat; l.s rd, (AT); .set at 194 195#define SET_VREG(rd, rix) STORE_eas2(rd, rFP, rix) 196 197#define SET_VREG_GOTO(rd, rix, dst) .set noreorder; \ 198 sll dst, dst, ${handler_size_bits}; \ 199 addu dst, rIBASE, dst; \ 200 sll t8, rix, 2; \ 201 addu t8, t8, rFP; \ 202 jr dst; \ 203 sw rd, 0(t8); \ 204 .set reorder 205 206#define SET_VREG_F(rd, rix) EAS2(AT, rFP, rix); \ 207 .set noat; s.s rd, (AT); .set at 208 209 210#define GET_OPA(rd) srl rd, rINST, 8 211#ifndef MIPS32R2 212#define GET_OPA4(rd) GET_OPA(rd); and rd, 0xf 213#else 214#define GET_OPA4(rd) ext rd, rd, 8, 4 215#endif 216#define GET_OPB(rd) srl rd, rINST, 12 217 218#define LOAD_rSELF_OFF(rd,off) lw rd, offThread_##off##(rSELF) 219 220#define LOAD_rSELF_method(rd) LOAD_rSELF_OFF(rd, method) 221#define LOAD_rSELF_methodClassDex(rd) LOAD_rSELF_OFF(rd, methodClassDex) 222#define LOAD_rSELF_interpStackEnd(rd) LOAD_rSELF_OFF(rd, interpStackEnd) 223#define LOAD_rSELF_retval(rd) LOAD_rSELF_OFF(rd, retval) 224#define LOAD_rSELF_pActiveProfilers(rd) LOAD_rSELF_OFF(rd, pActiveProfilers) 225#define LOAD_rSELF_bailPtr(rd) LOAD_rSELF_OFF(rd, bailPtr) 226 227#define GET_JIT_PROF_TABLE(rd) LOAD_rSELF_OFF(rd,pJitProfTable) 228#define GET_JIT_THRESHOLD(rd) LOAD_rSELF_OFF(rd,jitThreshold) 229 230/* 231 * Form an Effective Address rd = rbase + roff<<n; 232 * Uses reg AT 233 */ 234#define EASN(rd,rbase,roff,rshift) .set noat; \ 235 sll AT, roff, rshift; \ 236 addu rd, rbase, AT; \ 237 .set at 238 239#define EAS1(rd,rbase,roff) EASN(rd,rbase,roff,1) 240#define EAS2(rd,rbase,roff) EASN(rd,rbase,roff,2) 241#define EAS3(rd,rbase,roff) EASN(rd,rbase,roff,3) 242#define EAS4(rd,rbase,roff) EASN(rd,rbase,roff,4) 243 244/* 245 * Form an Effective Shift Right rd = rbase + roff>>n; 246 * Uses reg AT 247 */ 248#define ESRN(rd,rbase,roff,rshift) .set noat; \ 249 srl AT, roff, rshift; \ 250 addu rd, rbase, AT; \ 251 .set at 252 253#define LOAD_eas2(rd,rbase,roff) EAS2(AT, rbase, roff); \ 254 .set noat; lw rd, 0(AT); .set at 255 256#define STORE_eas2(rd,rbase,roff) EAS2(AT, rbase, roff); \ 257 .set noat; sw rd, 0(AT); .set at 258 259#define LOAD_RB_OFF(rd,rbase,off) lw rd, off(rbase) 260#define LOADu2_RB_OFF(rd,rbase,off) lhu rd, off(rbase) 261#define STORE_RB_OFF(rd,rbase,off) sw rd, off(rbase) 262 263#ifdef HAVE_LITTLE_ENDIAN 264 265#define STORE64_off(rlo,rhi,rbase,off) sw rlo, off(rbase); \ 266 sw rhi, (off+4)(rbase) 267#define LOAD64_off(rlo,rhi,rbase,off) lw rlo, off(rbase); \ 268 lw rhi, (off+4)(rbase) 269 270#define STORE64_off_F(rlo,rhi,rbase,off) s.s rlo, off(rbase); \ 271 s.s rhi, (off+4)(rbase) 272#define LOAD64_off_F(rlo,rhi,rbase,off) l.s rlo, off(rbase); \ 273 l.s rhi, (off+4)(rbase) 274#else 275 276#define STORE64_off(rlo,rhi,rbase,off) sw rlo, (off+4)(rbase); \ 277 sw rhi, (off)(rbase) 278#define LOAD64_off(rlo,rhi,rbase,off) lw rlo, (off+4)(rbase); \ 279 lw rhi, (off)(rbase) 280#define STORE64_off_F(rlo,rhi,rbase,off) s.s rlo, (off+4)(rbase); \ 281 s.s rhi, (off)(rbase) 282#define LOAD64_off_F(rlo,rhi,rbase,off) l.s rlo, (off+4)(rbase); \ 283 l.s rhi, (off)(rbase) 284#endif 285 286#define STORE64(rlo,rhi,rbase) STORE64_off(rlo,rhi,rbase,0) 287#define LOAD64(rlo,rhi,rbase) LOAD64_off(rlo,rhi,rbase,0) 288 289#define STORE64_F(rlo,rhi,rbase) STORE64_off_F(rlo,rhi,rbase,0) 290#define LOAD64_F(rlo,rhi,rbase) LOAD64_off_F(rlo,rhi,rbase,0) 291 292#define STORE64_lo(rd,rbase) sw rd, 0(rbase) 293#define STORE64_hi(rd,rbase) sw rd, 4(rbase) 294 295 296#define LOAD_offThread_exception(rd,rbase) LOAD_RB_OFF(rd,rbase,offThread_exception) 297#define LOAD_base_offArrayObject_length(rd,rbase) LOAD_RB_OFF(rd,rbase,offArrayObject_length) 298#define LOAD_base_offClassObject_accessFlags(rd,rbase) LOAD_RB_OFF(rd,rbase,offClassObject_accessFlags) 299#define LOAD_base_offClassObject_descriptor(rd,rbase) LOAD_RB_OFF(rd,rbase,offClassObject_descriptor) 300#define LOAD_base_offClassObject_super(rd,rbase) LOAD_RB_OFF(rd,rbase,offClassObject_super) 301 302#define LOAD_base_offClassObject_vtable(rd,rbase) LOAD_RB_OFF(rd,rbase,offClassObject_vtable) 303#define LOAD_base_offClassObject_vtableCount(rd,rbase) LOAD_RB_OFF(rd,rbase,offClassObject_vtableCount) 304#define LOAD_base_offDvmDex_pResClasses(rd,rbase) LOAD_RB_OFF(rd,rbase,offDvmDex_pResClasses) 305#define LOAD_base_offDvmDex_pResFields(rd,rbase) LOAD_RB_OFF(rd,rbase,offDvmDex_pResFields) 306 307#define LOAD_base_offDvmDex_pResMethods(rd,rbase) LOAD_RB_OFF(rd,rbase,offDvmDex_pResMethods) 308#define LOAD_base_offDvmDex_pResStrings(rd,rbase) LOAD_RB_OFF(rd,rbase,offDvmDex_pResStrings) 309#define LOAD_base_offInstField_byteOffset(rd,rbase) LOAD_RB_OFF(rd,rbase,offInstField_byteOffset) 310#define LOAD_base_offStaticField_value(rd,rbase) LOAD_RB_OFF(rd,rbase,offStaticField_value) 311#define LOAD_base_offMethod_clazz(rd,rbase) LOAD_RB_OFF(rd,rbase,offMethod_clazz) 312 313#define LOAD_base_offMethod_name(rd,rbase) LOAD_RB_OFF(rd,rbase,offMethod_name) 314#define LOAD_base_offObject_clazz(rd,rbase) LOAD_RB_OFF(rd,rbase,offObject_clazz) 315 316#define LOADu2_offMethod_methodIndex(rd,rbase) LOADu2_RB_OFF(rd,rbase,offMethod_methodIndex) 317 318 319#define STORE_offThread_exception(rd,rbase) STORE_RB_OFF(rd,rbase,offThread_exception) 320 321 322#define STACK_STORE(rd,off) sw rd, off(sp) 323#define STACK_LOAD(rd,off) lw rd, off(sp) 324#define CREATE_STACK(n) subu sp, sp, n 325#define DELETE_STACK(n) addu sp, sp, n 326 327#define SAVE_RA(offset) STACK_STORE(ra, offset) 328#define LOAD_RA(offset) STACK_LOAD(ra, offset) 329 330#define LOAD_ADDR(dest,addr) la dest, addr 331#define LOAD_IMM(dest, imm) li dest, imm 332#define MOVE_REG(dest,src) move dest, src 333#define RETURN jr ra 334#define STACK_SIZE 128 335 336#define STACK_OFFSET_ARG04 16 337#define STACK_OFFSET_GP 84 338#define STACK_OFFSET_rFP 112 339 340/* This directive will make sure all subsequent jal restore gp at a known offset */ 341 .cprestore STACK_OFFSET_GP 342 343#define JAL(func) move rTEMP, ra; \ 344 jal func; \ 345 move ra, rTEMP 346 347#define JALR(reg) move rTEMP, ra; \ 348 jalr ra, reg; \ 349 move ra, rTEMP 350 351#define BAL(n) bal n 352 353#define STACK_STORE_RA() CREATE_STACK(STACK_SIZE); \ 354 STACK_STORE(gp, STACK_OFFSET_GP); \ 355 STACK_STORE(ra, 124) 356 357#define STACK_STORE_S0() STACK_STORE_RA(); \ 358 STACK_STORE(s0, 116) 359 360#define STACK_STORE_S0S1() STACK_STORE_S0(); \ 361 STACK_STORE(s1, STACK_OFFSET_rFP) 362 363#define STACK_LOAD_RA() STACK_LOAD(ra, 124); \ 364 STACK_LOAD(gp, STACK_OFFSET_GP); \ 365 DELETE_STACK(STACK_SIZE) 366 367#define STACK_LOAD_S0() STACK_LOAD(s0, 116); \ 368 STACK_LOAD_RA() 369 370#define STACK_LOAD_S0S1() STACK_LOAD(s1, STACK_OFFSET_rFP); \ 371 STACK_LOAD_S0() 372 373#define STACK_STORE_FULL() CREATE_STACK(STACK_SIZE); \ 374 STACK_STORE(ra, 124); \ 375 STACK_STORE(fp, 120); \ 376 STACK_STORE(s0, 116); \ 377 STACK_STORE(s1, STACK_OFFSET_rFP); \ 378 STACK_STORE(s2, 108); \ 379 STACK_STORE(s3, 104); \ 380 STACK_STORE(s4, 100); \ 381 STACK_STORE(s5, 96); \ 382 STACK_STORE(s6, 92); \ 383 STACK_STORE(s7, 88); 384 385#define STACK_LOAD_FULL() STACK_LOAD(gp, STACK_OFFSET_GP); \ 386 STACK_LOAD(s7, 88); \ 387 STACK_LOAD(s6, 92); \ 388 STACK_LOAD(s5, 96); \ 389 STACK_LOAD(s4, 100); \ 390 STACK_LOAD(s3, 104); \ 391 STACK_LOAD(s2, 108); \ 392 STACK_LOAD(s1, STACK_OFFSET_rFP); \ 393 STACK_LOAD(s0, 116); \ 394 STACK_LOAD(fp, 120); \ 395 STACK_LOAD(ra, 124); \ 396 DELETE_STACK(STACK_SIZE) 397 398/* 399 * first 8 words are reserved for function calls 400 * Maximum offset is STACK_OFFSET_SCRMX-STACK_OFFSET_SCR 401 */ 402#define STACK_OFFSET_SCR 32 403#define SCRATCH_STORE(r,off) \ 404 STACK_STORE(r, STACK_OFFSET_SCR+off); 405#define SCRATCH_LOAD(r,off) \ 406 STACK_LOAD(r, STACK_OFFSET_SCR+off); 407