portable_trampoline_entrypoints.cc revision ea46f950e7a51585db293cd7f047de190a482414
1/* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#ifndef ART_RUNTIME_ENTRYPOINTS_PORTABLE_PORTABLE_ARGUMENT_VISITOR_H_ 18#define ART_RUNTIME_ENTRYPOINTS_PORTABLE_PORTABLE_ARGUMENT_VISITOR_H_ 19 20#include "dex_instruction-inl.h" 21#include "entrypoints/entrypoint_utils.h" 22#include "interpreter/interpreter.h" 23#include "mirror/art_method-inl.h" 24#include "mirror/object-inl.h" 25#include "object_utils.h" 26#include "scoped_thread_state_change.h" 27 28namespace art { 29 30// Visits the arguments as saved to the stack by a Runtime::kRefAndArgs callee save frame. 31class PortableArgumentVisitor { 32 public: 33// Offset to first (not the Method*) argument in a Runtime::kRefAndArgs callee save frame. 34// Size of Runtime::kRefAndArgs callee save frame. 35// Size of Method* and register parameters in out stack arguments. 36#if defined(__arm__) 37#define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET 8 38#define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE 48 39#define PORTABLE_STACK_ARG_SKIP 0 40#elif defined(__mips__) 41#define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET 4 42#define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE 64 43#define PORTABLE_STACK_ARG_SKIP 16 44#elif defined(__i386__) 45#define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET 4 46#define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE 32 47#define PORTABLE_STACK_ARG_SKIP 4 48#else 49#error "Unsupported architecture" 50#define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET 0 51#define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE 0 52#define PORTABLE_STACK_ARG_SKIP 0 53#endif 54 55 PortableArgumentVisitor(MethodHelper& caller_mh, mirror::ArtMethod** sp) 56 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) : 57 caller_mh_(caller_mh), 58 args_in_regs_(ComputeArgsInRegs(caller_mh)), 59 num_params_(caller_mh.NumArgs()), 60 reg_args_(reinterpret_cast<byte*>(sp) + PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET), 61 stack_args_(reinterpret_cast<byte*>(sp) + PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE 62 + PORTABLE_STACK_ARG_SKIP), 63 cur_args_(reg_args_), 64 cur_arg_index_(0), 65 param_index_(0) { 66 } 67 68 virtual ~PortableArgumentVisitor() {} 69 70 virtual void Visit() = 0; 71 72 bool IsParamAReference() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 73 return caller_mh_.IsParamAReference(param_index_); 74 } 75 76 bool IsParamALongOrDouble() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 77 return caller_mh_.IsParamALongOrDouble(param_index_); 78 } 79 80 Primitive::Type GetParamPrimitiveType() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 81 return caller_mh_.GetParamPrimitiveType(param_index_); 82 } 83 84 byte* GetParamAddress() const { 85 return cur_args_ + (cur_arg_index_ * kPointerSize); 86 } 87 88 void VisitArguments() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 89 for (cur_arg_index_ = 0; cur_arg_index_ < args_in_regs_ && param_index_ < num_params_; ) { 90#if (defined(__arm__) || defined(__mips__)) 91 if (IsParamALongOrDouble() && cur_arg_index_ == 2) { 92 break; 93 } 94#endif 95 Visit(); 96 cur_arg_index_ += (IsParamALongOrDouble() ? 2 : 1); 97 param_index_++; 98 } 99 cur_args_ = stack_args_; 100 cur_arg_index_ = 0; 101 while (param_index_ < num_params_) { 102#if (defined(__arm__) || defined(__mips__)) 103 if (IsParamALongOrDouble() && cur_arg_index_ % 2 != 0) { 104 cur_arg_index_++; 105 } 106#endif 107 Visit(); 108 cur_arg_index_ += (IsParamALongOrDouble() ? 2 : 1); 109 param_index_++; 110 } 111 } 112 113 private: 114 static size_t ComputeArgsInRegs(MethodHelper& mh) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 115#if (defined(__i386__)) 116 return 0; 117#else 118 size_t args_in_regs = 0; 119 size_t num_params = mh.NumArgs(); 120 for (size_t i = 0; i < num_params; i++) { 121 args_in_regs = args_in_regs + (mh.IsParamALongOrDouble(i) ? 2 : 1); 122 if (args_in_regs > 3) { 123 args_in_regs = 3; 124 break; 125 } 126 } 127 return args_in_regs; 128#endif 129 } 130 MethodHelper& caller_mh_; 131 const size_t args_in_regs_; 132 const size_t num_params_; 133 byte* const reg_args_; 134 byte* const stack_args_; 135 byte* cur_args_; 136 size_t cur_arg_index_; 137 size_t param_index_; 138}; 139 140// Visits arguments on the stack placing them into the shadow frame. 141class BuildPortableShadowFrameVisitor : public PortableArgumentVisitor { 142 public: 143 BuildPortableShadowFrameVisitor(MethodHelper& caller_mh, mirror::ArtMethod** sp, 144 ShadowFrame& sf, size_t first_arg_reg) : 145 PortableArgumentVisitor(caller_mh, sp), sf_(sf), cur_reg_(first_arg_reg) { } 146 virtual void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 147 Primitive::Type type = GetParamPrimitiveType(); 148 switch (type) { 149 case Primitive::kPrimLong: // Fall-through. 150 case Primitive::kPrimDouble: 151 sf_.SetVRegLong(cur_reg_, *reinterpret_cast<jlong*>(GetParamAddress())); 152 ++cur_reg_; 153 break; 154 case Primitive::kPrimNot: 155 sf_.SetVRegReference(cur_reg_, *reinterpret_cast<mirror::Object**>(GetParamAddress())); 156 break; 157 case Primitive::kPrimBoolean: // Fall-through. 158 case Primitive::kPrimByte: // Fall-through. 159 case Primitive::kPrimChar: // Fall-through. 160 case Primitive::kPrimShort: // Fall-through. 161 case Primitive::kPrimInt: // Fall-through. 162 case Primitive::kPrimFloat: 163 sf_.SetVReg(cur_reg_, *reinterpret_cast<jint*>(GetParamAddress())); 164 break; 165 case Primitive::kPrimVoid: 166 LOG(FATAL) << "UNREACHABLE"; 167 break; 168 } 169 ++cur_reg_; 170 } 171 172 private: 173 ShadowFrame& sf_; 174 size_t cur_reg_; 175 176 DISALLOW_COPY_AND_ASSIGN(BuildPortableShadowFrameVisitor); 177}; 178 179extern "C" uint64_t artPortableToInterpreterBridge(mirror::ArtMethod* method, Thread* self, 180 mirror::ArtMethod** sp) 181 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 182 // Ensure we don't get thread suspension until the object arguments are safely in the shadow 183 // frame. 184 // FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs); 185 186 if (method->IsAbstract()) { 187 ThrowAbstractMethodError(method); 188 return 0; 189 } else { 190 const char* old_cause = self->StartAssertNoThreadSuspension("Building interpreter shadow frame"); 191 MethodHelper mh(method); 192 const DexFile::CodeItem* code_item = mh.GetCodeItem(); 193 uint16_t num_regs = code_item->registers_size_; 194 void* memory = alloca(ShadowFrame::ComputeSize(num_regs)); 195 ShadowFrame* shadow_frame(ShadowFrame::Create(num_regs, NULL, // No last shadow coming from quick. 196 method, 0, memory)); 197 size_t first_arg_reg = code_item->registers_size_ - code_item->ins_size_; 198 BuildPortableShadowFrameVisitor shadow_frame_builder(mh, sp, 199 *shadow_frame, first_arg_reg); 200 shadow_frame_builder.VisitArguments(); 201 // Push a transition back into managed code onto the linked list in thread. 202 ManagedStack fragment; 203 self->PushManagedStackFragment(&fragment); 204 self->PushShadowFrame(shadow_frame); 205 self->EndAssertNoThreadSuspension(old_cause); 206 207 if (method->IsStatic() && !method->GetDeclaringClass()->IsInitializing()) { 208 // Ensure static method's class is initialized. 209 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(method->GetDeclaringClass(), 210 true, true)) { 211 DCHECK(Thread::Current()->IsExceptionPending()); 212 self->PopManagedStackFragment(fragment); 213 return 0; 214 } 215 } 216 217 JValue result = interpreter::EnterInterpreterFromStub(self, mh, code_item, *shadow_frame); 218 // Pop transition. 219 self->PopManagedStackFragment(fragment); 220 return result.GetJ(); 221 } 222} 223 224// Visits arguments on the stack placing them into the args vector, Object* arguments are converted 225// to jobjects. 226class BuildPortableArgumentVisitor : public PortableArgumentVisitor { 227 public: 228 BuildPortableArgumentVisitor(MethodHelper& caller_mh, mirror::ArtMethod** sp, 229 ScopedObjectAccessUnchecked& soa, std::vector<jvalue>& args) : 230 PortableArgumentVisitor(caller_mh, sp), soa_(soa), args_(args) {} 231 232 virtual void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 233 jvalue val; 234 Primitive::Type type = GetParamPrimitiveType(); 235 switch (type) { 236 case Primitive::kPrimNot: { 237 mirror::Object* obj = *reinterpret_cast<mirror::Object**>(GetParamAddress()); 238 val.l = soa_.AddLocalReference<jobject>(obj); 239 break; 240 } 241 case Primitive::kPrimLong: // Fall-through. 242 case Primitive::kPrimDouble: 243 val.j = *reinterpret_cast<jlong*>(GetParamAddress()); 244 break; 245 case Primitive::kPrimBoolean: // Fall-through. 246 case Primitive::kPrimByte: // Fall-through. 247 case Primitive::kPrimChar: // Fall-through. 248 case Primitive::kPrimShort: // Fall-through. 249 case Primitive::kPrimInt: // Fall-through. 250 case Primitive::kPrimFloat: 251 val.i = *reinterpret_cast<jint*>(GetParamAddress()); 252 break; 253 case Primitive::kPrimVoid: 254 LOG(FATAL) << "UNREACHABLE"; 255 val.j = 0; 256 break; 257 } 258 args_.push_back(val); 259 } 260 261 private: 262 ScopedObjectAccessUnchecked& soa_; 263 std::vector<jvalue>& args_; 264 265 DISALLOW_COPY_AND_ASSIGN(BuildPortableArgumentVisitor); 266}; 267 268// Handler for invocation on proxy methods. On entry a frame will exist for the proxy object method 269// which is responsible for recording callee save registers. We explicitly place into jobjects the 270// incoming reference arguments (so they survive GC). We invoke the invocation handler, which is a 271// field within the proxy object, which will box the primitive arguments and deal with error cases. 272extern "C" uint64_t artPortableProxyInvokeHandler(mirror::ArtMethod* proxy_method, 273 mirror::Object* receiver, 274 Thread* self, mirror::ArtMethod** sp) 275 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 276 // Ensure we don't get thread suspension until the object arguments are safely in jobjects. 277 const char* old_cause = 278 self->StartAssertNoThreadSuspension("Adding to IRT proxy object arguments"); 279 self->VerifyStack(); 280 // Start new JNI local reference state. 281 JNIEnvExt* env = self->GetJniEnv(); 282 ScopedObjectAccessUnchecked soa(env); 283 ScopedJniEnvLocalRefState env_state(env); 284 // Create local ref. copies of proxy method and the receiver. 285 jobject rcvr_jobj = soa.AddLocalReference<jobject>(receiver); 286 287 // Placing arguments into args vector and remove the receiver. 288 MethodHelper proxy_mh(proxy_method); 289 std::vector<jvalue> args; 290 BuildPortableArgumentVisitor local_ref_visitor(proxy_mh, sp, soa, args); 291 local_ref_visitor.VisitArguments(); 292 args.erase(args.begin()); 293 294 // Convert proxy method into expected interface method. 295 mirror::ArtMethod* interface_method = proxy_method->FindOverriddenMethod(); 296 DCHECK(interface_method != NULL); 297 DCHECK(!interface_method->IsProxyMethod()) << PrettyMethod(interface_method); 298 jobject interface_method_jobj = soa.AddLocalReference<jobject>(interface_method); 299 300 // All naked Object*s should now be in jobjects, so its safe to go into the main invoke code 301 // that performs allocations. 302 self->EndAssertNoThreadSuspension(old_cause); 303 JValue result = InvokeProxyInvocationHandler(soa, proxy_mh.GetShorty(), 304 rcvr_jobj, interface_method_jobj, args); 305 return result.GetJ(); 306} 307 308// Lazily resolve a method for portable. Called by stub code. 309extern "C" const void* artPortableResolutionTrampoline(mirror::ArtMethod* called, 310 mirror::Object* receiver, 311 Thread* thread, 312 mirror::ArtMethod** called_addr) 313 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 314 uint32_t dex_pc; 315 mirror::ArtMethod* caller = thread->GetCurrentMethod(&dex_pc); 316 317 ClassLinker* linker = Runtime::Current()->GetClassLinker(); 318 InvokeType invoke_type; 319 bool is_range; 320 if (called->IsRuntimeMethod()) { 321 const DexFile::CodeItem* code = MethodHelper(caller).GetCodeItem(); 322 CHECK_LT(dex_pc, code->insns_size_in_code_units_); 323 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]); 324 Instruction::Code instr_code = instr->Opcode(); 325 switch (instr_code) { 326 case Instruction::INVOKE_DIRECT: 327 invoke_type = kDirect; 328 is_range = false; 329 break; 330 case Instruction::INVOKE_DIRECT_RANGE: 331 invoke_type = kDirect; 332 is_range = true; 333 break; 334 case Instruction::INVOKE_STATIC: 335 invoke_type = kStatic; 336 is_range = false; 337 break; 338 case Instruction::INVOKE_STATIC_RANGE: 339 invoke_type = kStatic; 340 is_range = true; 341 break; 342 case Instruction::INVOKE_SUPER: 343 invoke_type = kSuper; 344 is_range = false; 345 break; 346 case Instruction::INVOKE_SUPER_RANGE: 347 invoke_type = kSuper; 348 is_range = true; 349 break; 350 case Instruction::INVOKE_VIRTUAL: 351 invoke_type = kVirtual; 352 is_range = false; 353 break; 354 case Instruction::INVOKE_VIRTUAL_RANGE: 355 invoke_type = kVirtual; 356 is_range = true; 357 break; 358 case Instruction::INVOKE_INTERFACE: 359 invoke_type = kInterface; 360 is_range = false; 361 break; 362 case Instruction::INVOKE_INTERFACE_RANGE: 363 invoke_type = kInterface; 364 is_range = true; 365 break; 366 default: 367 LOG(FATAL) << "Unexpected call into trampoline: " << instr->DumpString(NULL); 368 // Avoid used uninitialized warnings. 369 invoke_type = kDirect; 370 is_range = true; 371 } 372 uint32_t dex_method_idx = (is_range) ? instr->VRegB_3rc() : instr->VRegB_35c(); 373 called = linker->ResolveMethod(dex_method_idx, caller, invoke_type); 374 // Incompatible class change should have been handled in resolve method. 375 CHECK(!called->CheckIncompatibleClassChange(invoke_type)); 376 // Refine called method based on receiver. 377 if (invoke_type == kVirtual) { 378 called = receiver->GetClass()->FindVirtualMethodForVirtual(called); 379 } else if (invoke_type == kInterface) { 380 called = receiver->GetClass()->FindVirtualMethodForInterface(called); 381 } 382 } else { 383 CHECK(called->IsStatic()) << PrettyMethod(called); 384 invoke_type = kStatic; 385 // Incompatible class change should have been handled in resolve method. 386 CHECK(!called->CheckIncompatibleClassChange(invoke_type)); 387 } 388 const void* code = NULL; 389 if (LIKELY(!thread->IsExceptionPending())) { 390 // Ensure that the called method's class is initialized. 391 mirror::Class* called_class = called->GetDeclaringClass(); 392 linker->EnsureInitialized(called_class, true, true); 393 if (LIKELY(called_class->IsInitialized())) { 394 code = called->GetEntryPointFromCompiledCode(); 395 // TODO: remove this after we solve the link issue. 396 { // for lazy link. 397 if (code == NULL) { 398 code = linker->GetOatCodeFor(called); 399 } 400 } 401 } else if (called_class->IsInitializing()) { 402 if (invoke_type == kStatic) { 403 // Class is still initializing, go to oat and grab code (trampoline must be left in place 404 // until class is initialized to stop races between threads). 405 code = linker->GetOatCodeFor(called); 406 } else { 407 // No trampoline for non-static methods. 408 code = called->GetEntryPointFromCompiledCode(); 409 // TODO: remove this after we solve the link issue. 410 { // for lazy link. 411 if (code == NULL) { 412 code = linker->GetOatCodeFor(called); 413 } 414 } 415 } 416 } else { 417 DCHECK(called_class->IsErroneous()); 418 } 419 } 420 if (LIKELY(code != NULL)) { 421 // Expect class to at least be initializing. 422 DCHECK(called->GetDeclaringClass()->IsInitializing()); 423 // Don't want infinite recursion. 424 DCHECK(code != GetResolutionTrampoline(linker)); 425 // Set up entry into main method 426 *called_addr = called; 427 } 428 return code; 429} 430 431} // namespace art 432 433#endif // ART_RUNTIME_ENTRYPOINTS_PORTABLE_PORTABLE_ARGUMENT_VISITOR_H_ 434