interpreter.cc revision 504a69081f63818ca332ddaf54e8198448554538
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "interpreter.h"
18
19#include <limits>
20
21#include "common_throws.h"
22#include "interpreter_common.h"
23#include "mirror/string-inl.h"
24#include "scoped_thread_state_change.h"
25#include "ScopedLocalRef.h"
26#include "stack.h"
27#include "unstarted_runtime.h"
28#include "mterp/mterp.h"
29#include "jit/jit.h"
30#include "jit/jit_code_cache.h"
31
32namespace art {
33namespace interpreter {
34
35static void InterpreterJni(Thread* self, ArtMethod* method, const StringPiece& shorty,
36                           Object* receiver, uint32_t* args, JValue* result)
37    SHARED_REQUIRES(Locks::mutator_lock_) {
38  // TODO: The following enters JNI code using a typedef-ed function rather than the JNI compiler,
39  //       it should be removed and JNI compiled stubs used instead.
40  ScopedObjectAccessUnchecked soa(self);
41  if (method->IsStatic()) {
42    if (shorty == "L") {
43      typedef jobject (fntype)(JNIEnv*, jclass);
44      fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
45      ScopedLocalRef<jclass> klass(soa.Env(),
46                                   soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
47      jobject jresult;
48      {
49        ScopedThreadStateChange tsc(self, kNative);
50        jresult = fn(soa.Env(), klass.get());
51      }
52      result->SetL(soa.Decode<Object*>(jresult));
53    } else if (shorty == "V") {
54      typedef void (fntype)(JNIEnv*, jclass);
55      fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
56      ScopedLocalRef<jclass> klass(soa.Env(),
57                                   soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
58      ScopedThreadStateChange tsc(self, kNative);
59      fn(soa.Env(), klass.get());
60    } else if (shorty == "Z") {
61      typedef jboolean (fntype)(JNIEnv*, jclass);
62      fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
63      ScopedLocalRef<jclass> klass(soa.Env(),
64                                   soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
65      ScopedThreadStateChange tsc(self, kNative);
66      result->SetZ(fn(soa.Env(), klass.get()));
67    } else if (shorty == "BI") {
68      typedef jbyte (fntype)(JNIEnv*, jclass, jint);
69      fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
70      ScopedLocalRef<jclass> klass(soa.Env(),
71                                   soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
72      ScopedThreadStateChange tsc(self, kNative);
73      result->SetB(fn(soa.Env(), klass.get(), args[0]));
74    } else if (shorty == "II") {
75      typedef jint (fntype)(JNIEnv*, jclass, jint);
76      fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
77      ScopedLocalRef<jclass> klass(soa.Env(),
78                                   soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
79      ScopedThreadStateChange tsc(self, kNative);
80      result->SetI(fn(soa.Env(), klass.get(), args[0]));
81    } else if (shorty == "LL") {
82      typedef jobject (fntype)(JNIEnv*, jclass, jobject);
83      fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
84      ScopedLocalRef<jclass> klass(soa.Env(),
85                                   soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
86      ScopedLocalRef<jobject> arg0(soa.Env(),
87                                   soa.AddLocalReference<jobject>(
88                                       reinterpret_cast<Object*>(args[0])));
89      jobject jresult;
90      {
91        ScopedThreadStateChange tsc(self, kNative);
92        jresult = fn(soa.Env(), klass.get(), arg0.get());
93      }
94      result->SetL(soa.Decode<Object*>(jresult));
95    } else if (shorty == "IIZ") {
96      typedef jint (fntype)(JNIEnv*, jclass, jint, jboolean);
97      fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
98      ScopedLocalRef<jclass> klass(soa.Env(),
99                                   soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
100      ScopedThreadStateChange tsc(self, kNative);
101      result->SetI(fn(soa.Env(), klass.get(), args[0], args[1]));
102    } else if (shorty == "ILI") {
103      typedef jint (fntype)(JNIEnv*, jclass, jobject, jint);
104      fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(
105          method->GetEntryPointFromJni()));
106      ScopedLocalRef<jclass> klass(soa.Env(),
107                                   soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
108      ScopedLocalRef<jobject> arg0(soa.Env(),
109                                   soa.AddLocalReference<jobject>(
110                                       reinterpret_cast<Object*>(args[0])));
111      ScopedThreadStateChange tsc(self, kNative);
112      result->SetI(fn(soa.Env(), klass.get(), arg0.get(), args[1]));
113    } else if (shorty == "SIZ") {
114      typedef jshort (fntype)(JNIEnv*, jclass, jint, jboolean);
115      fntype* const fn =
116          reinterpret_cast<fntype*>(const_cast<void*>(method->GetEntryPointFromJni()));
117      ScopedLocalRef<jclass> klass(soa.Env(),
118                                   soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
119      ScopedThreadStateChange tsc(self, kNative);
120      result->SetS(fn(soa.Env(), klass.get(), args[0], args[1]));
121    } else if (shorty == "VIZ") {
122      typedef void (fntype)(JNIEnv*, jclass, jint, jboolean);
123      fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
124      ScopedLocalRef<jclass> klass(soa.Env(),
125                                   soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
126      ScopedThreadStateChange tsc(self, kNative);
127      fn(soa.Env(), klass.get(), args[0], args[1]);
128    } else if (shorty == "ZLL") {
129      typedef jboolean (fntype)(JNIEnv*, jclass, jobject, jobject);
130      fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
131      ScopedLocalRef<jclass> klass(soa.Env(),
132                                   soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
133      ScopedLocalRef<jobject> arg0(soa.Env(),
134                                   soa.AddLocalReference<jobject>(
135                                       reinterpret_cast<Object*>(args[0])));
136      ScopedLocalRef<jobject> arg1(soa.Env(),
137                                   soa.AddLocalReference<jobject>(
138                                       reinterpret_cast<Object*>(args[1])));
139      ScopedThreadStateChange tsc(self, kNative);
140      result->SetZ(fn(soa.Env(), klass.get(), arg0.get(), arg1.get()));
141    } else if (shorty == "ZILL") {
142      typedef jboolean (fntype)(JNIEnv*, jclass, jint, jobject, jobject);
143      fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
144      ScopedLocalRef<jclass> klass(soa.Env(),
145                                   soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
146      ScopedLocalRef<jobject> arg1(soa.Env(),
147                                   soa.AddLocalReference<jobject>(
148                                       reinterpret_cast<Object*>(args[1])));
149      ScopedLocalRef<jobject> arg2(soa.Env(),
150                                   soa.AddLocalReference<jobject>(
151                                       reinterpret_cast<Object*>(args[2])));
152      ScopedThreadStateChange tsc(self, kNative);
153      result->SetZ(fn(soa.Env(), klass.get(), args[0], arg1.get(), arg2.get()));
154    } else if (shorty == "VILII") {
155      typedef void (fntype)(JNIEnv*, jclass, jint, jobject, jint, jint);
156      fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
157      ScopedLocalRef<jclass> klass(soa.Env(),
158                                   soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
159      ScopedLocalRef<jobject> arg1(soa.Env(),
160                                   soa.AddLocalReference<jobject>(
161                                       reinterpret_cast<Object*>(args[1])));
162      ScopedThreadStateChange tsc(self, kNative);
163      fn(soa.Env(), klass.get(), args[0], arg1.get(), args[2], args[3]);
164    } else if (shorty == "VLILII") {
165      typedef void (fntype)(JNIEnv*, jclass, jobject, jint, jobject, jint, jint);
166      fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
167      ScopedLocalRef<jclass> klass(soa.Env(),
168                                   soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
169      ScopedLocalRef<jobject> arg0(soa.Env(),
170                                   soa.AddLocalReference<jobject>(
171                                       reinterpret_cast<Object*>(args[0])));
172      ScopedLocalRef<jobject> arg2(soa.Env(),
173                                   soa.AddLocalReference<jobject>(
174                                       reinterpret_cast<Object*>(args[2])));
175      ScopedThreadStateChange tsc(self, kNative);
176      fn(soa.Env(), klass.get(), arg0.get(), args[1], arg2.get(), args[3], args[4]);
177    } else {
178      LOG(FATAL) << "Do something with static native method: " << PrettyMethod(method)
179          << " shorty: " << shorty;
180    }
181  } else {
182    if (shorty == "L") {
183      typedef jobject (fntype)(JNIEnv*, jobject);
184      fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
185      ScopedLocalRef<jobject> rcvr(soa.Env(),
186                                   soa.AddLocalReference<jobject>(receiver));
187      jobject jresult;
188      {
189        ScopedThreadStateChange tsc(self, kNative);
190        jresult = fn(soa.Env(), rcvr.get());
191      }
192      result->SetL(soa.Decode<Object*>(jresult));
193    } else if (shorty == "V") {
194      typedef void (fntype)(JNIEnv*, jobject);
195      fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
196      ScopedLocalRef<jobject> rcvr(soa.Env(),
197                                   soa.AddLocalReference<jobject>(receiver));
198      ScopedThreadStateChange tsc(self, kNative);
199      fn(soa.Env(), rcvr.get());
200    } else if (shorty == "LL") {
201      typedef jobject (fntype)(JNIEnv*, jobject, jobject);
202      fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
203      ScopedLocalRef<jobject> rcvr(soa.Env(),
204                                   soa.AddLocalReference<jobject>(receiver));
205      ScopedLocalRef<jobject> arg0(soa.Env(),
206                                   soa.AddLocalReference<jobject>(
207                                       reinterpret_cast<Object*>(args[0])));
208      jobject jresult;
209      {
210        ScopedThreadStateChange tsc(self, kNative);
211        jresult = fn(soa.Env(), rcvr.get(), arg0.get());
212      }
213      result->SetL(soa.Decode<Object*>(jresult));
214      ScopedThreadStateChange tsc(self, kNative);
215    } else if (shorty == "III") {
216      typedef jint (fntype)(JNIEnv*, jobject, jint, jint);
217      fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
218      ScopedLocalRef<jobject> rcvr(soa.Env(),
219                                   soa.AddLocalReference<jobject>(receiver));
220      ScopedThreadStateChange tsc(self, kNative);
221      result->SetI(fn(soa.Env(), rcvr.get(), args[0], args[1]));
222    } else {
223      LOG(FATAL) << "Do something with native method: " << PrettyMethod(method)
224          << " shorty: " << shorty;
225    }
226  }
227}
228
229enum InterpreterImplKind {
230  kSwitchImplKind,        // Switch-based interpreter implementation.
231  kComputedGotoImplKind,  // Computed-goto-based interpreter implementation.
232  kMterpImplKind          // Assembly interpreter
233};
234static std::ostream& operator<<(std::ostream& os, const InterpreterImplKind& rhs) {
235  os << ((rhs == kSwitchImplKind)
236              ? "Switch-based interpreter"
237              : (rhs == kComputedGotoImplKind)
238                  ? "Computed-goto-based interpreter"
239                  : "Asm interpreter");
240  return os;
241}
242
243static constexpr InterpreterImplKind kInterpreterImplKind = kMterpImplKind;
244
245#if defined(__clang__)
246// Clang 3.4 fails to build the goto interpreter implementation.
247template<bool do_access_check, bool transaction_active>
248JValue ExecuteGotoImpl(Thread*, const DexFile::CodeItem*, ShadowFrame&, JValue) {
249  LOG(FATAL) << "UNREACHABLE";
250  UNREACHABLE();
251}
252// Explicit definitions of ExecuteGotoImpl.
253template<> SHARED_REQUIRES(Locks::mutator_lock_)
254JValue ExecuteGotoImpl<true, false>(Thread* self, const DexFile::CodeItem* code_item,
255                                    ShadowFrame& shadow_frame, JValue result_register);
256template<> SHARED_REQUIRES(Locks::mutator_lock_)
257JValue ExecuteGotoImpl<false, false>(Thread* self, const DexFile::CodeItem* code_item,
258                                     ShadowFrame& shadow_frame, JValue result_register);
259template<> SHARED_REQUIRES(Locks::mutator_lock_)
260JValue ExecuteGotoImpl<true, true>(Thread* self,  const DexFile::CodeItem* code_item,
261                                   ShadowFrame& shadow_frame, JValue result_register);
262template<> SHARED_REQUIRES(Locks::mutator_lock_)
263JValue ExecuteGotoImpl<false, true>(Thread* self, const DexFile::CodeItem* code_item,
264                                    ShadowFrame& shadow_frame, JValue result_register);
265#endif
266
267static JValue Execute(Thread* self, const DexFile::CodeItem* code_item, ShadowFrame& shadow_frame,
268                      JValue result_register)
269    SHARED_REQUIRES(Locks::mutator_lock_);
270
271static inline JValue Execute(Thread* self, const DexFile::CodeItem* code_item,
272                             ShadowFrame& shadow_frame, JValue result_register) {
273  DCHECK(!shadow_frame.GetMethod()->IsAbstract());
274  DCHECK(!shadow_frame.GetMethod()->IsNative());
275  if (LIKELY(shadow_frame.GetDexPC() == 0)) {  // Entering the method, but not via deoptimization.
276    if (kIsDebugBuild) {
277      self->AssertNoPendingException();
278    }
279    instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
280    ArtMethod *method = shadow_frame.GetMethod();
281
282    if (UNLIKELY(instrumentation->HasMethodEntryListeners())) {
283      instrumentation->MethodEnterEvent(self, shadow_frame.GetThisObject(code_item->ins_size_),
284                                        method, 0);
285    }
286
287    jit::Jit* jit = Runtime::Current()->GetJit();
288    if (jit != nullptr) {
289      jit->MethodEntered(self, shadow_frame.GetMethod());
290      if (jit->CanInvokeCompiledCode(method)) {
291        JValue result;
292
293        // Pop the shadow frame before calling into compiled code.
294        self->PopShadowFrame();
295        ArtInterpreterToCompiledCodeBridge(self, nullptr, code_item, &shadow_frame, &result);
296        // Push the shadow frame back as the caller will expect it.
297        self->PushShadowFrame(&shadow_frame);
298
299        return result;
300      }
301    }
302  }
303
304  shadow_frame.GetMethod()->GetDeclaringClass()->AssertInitializedOrInitializingInThread(self);
305
306  // Lock counting is a special version of accessibility checks, and for simplicity and
307  // reduction of template parameters, we gate it behind access-checks mode.
308  ArtMethod* method = shadow_frame.GetMethod();
309  DCHECK(!method->SkipAccessChecks() || !method->MustCountLocks());
310
311  bool transaction_active = Runtime::Current()->IsActiveTransaction();
312  if (LIKELY(method->SkipAccessChecks())) {
313    // Enter the "without access check" interpreter.
314    if (kInterpreterImplKind == kMterpImplKind) {
315      if (transaction_active) {
316        // No Mterp variant - just use the switch interpreter.
317        return ExecuteSwitchImpl<false, true>(self, code_item, shadow_frame, result_register,
318                                              false);
319      } else if (UNLIKELY(!Runtime::Current()->IsStarted())) {
320        return ExecuteSwitchImpl<false, false>(self, code_item, shadow_frame, result_register,
321                                               false);
322      } else {
323        while (true) {
324          // Mterp does not support all instrumentation/debugging.
325          if (MterpShouldSwitchInterpreters()) {
326            return ExecuteSwitchImpl<false, false>(self, code_item, shadow_frame, result_register,
327                                                   false);
328          }
329          bool returned = ExecuteMterpImpl(self, code_item, &shadow_frame, &result_register);
330          if (returned) {
331            return result_register;
332          } else {
333            // Mterp didn't like that instruction.  Single-step it with the reference interpreter.
334            result_register = ExecuteSwitchImpl<false, false>(self, code_item, shadow_frame,
335                                                               result_register, true);
336            if (shadow_frame.GetDexPC() == DexFile::kDexNoIndex) {
337              // Single-stepped a return or an exception not handled locally.  Return to caller.
338              return result_register;
339            }
340          }
341        }
342      }
343    } else if (kInterpreterImplKind == kSwitchImplKind) {
344      if (transaction_active) {
345        return ExecuteSwitchImpl<false, true>(self, code_item, shadow_frame, result_register,
346                                              false);
347      } else {
348        return ExecuteSwitchImpl<false, false>(self, code_item, shadow_frame, result_register,
349                                               false);
350      }
351    } else {
352      DCHECK_EQ(kInterpreterImplKind, kComputedGotoImplKind);
353      if (transaction_active) {
354        return ExecuteGotoImpl<false, true>(self, code_item, shadow_frame, result_register);
355      } else {
356        return ExecuteGotoImpl<false, false>(self, code_item, shadow_frame, result_register);
357      }
358    }
359  } else {
360    // Enter the "with access check" interpreter.
361    if (kInterpreterImplKind == kMterpImplKind) {
362      // No access check variants for Mterp.  Just use the switch version.
363      if (transaction_active) {
364        return ExecuteSwitchImpl<true, true>(self, code_item, shadow_frame, result_register,
365                                             false);
366      } else {
367        return ExecuteSwitchImpl<true, false>(self, code_item, shadow_frame, result_register,
368                                              false);
369      }
370    } else if (kInterpreterImplKind == kSwitchImplKind) {
371      if (transaction_active) {
372        return ExecuteSwitchImpl<true, true>(self, code_item, shadow_frame, result_register,
373                                             false);
374      } else {
375        return ExecuteSwitchImpl<true, false>(self, code_item, shadow_frame, result_register,
376                                              false);
377      }
378    } else {
379      DCHECK_EQ(kInterpreterImplKind, kComputedGotoImplKind);
380      if (transaction_active) {
381        return ExecuteGotoImpl<true, true>(self, code_item, shadow_frame, result_register);
382      } else {
383        return ExecuteGotoImpl<true, false>(self, code_item, shadow_frame, result_register);
384      }
385    }
386  }
387}
388
389void EnterInterpreterFromInvoke(Thread* self, ArtMethod* method, Object* receiver,
390                                uint32_t* args, JValue* result) {
391  DCHECK_EQ(self, Thread::Current());
392  bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
393  if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
394    ThrowStackOverflowError(self);
395    return;
396  }
397
398  const char* old_cause = self->StartAssertNoThreadSuspension("EnterInterpreterFromInvoke");
399  const DexFile::CodeItem* code_item = method->GetCodeItem();
400  uint16_t num_regs;
401  uint16_t num_ins;
402  if (code_item != nullptr) {
403    num_regs =  code_item->registers_size_;
404    num_ins = code_item->ins_size_;
405  } else if (!method->IsInvokable()) {
406    self->EndAssertNoThreadSuspension(old_cause);
407    method->ThrowInvocationTimeError();
408    return;
409  } else {
410    DCHECK(method->IsNative());
411    num_regs = num_ins = ArtMethod::NumArgRegisters(method->GetShorty());
412    if (!method->IsStatic()) {
413      num_regs++;
414      num_ins++;
415    }
416  }
417  // Set up shadow frame with matching number of reference slots to vregs.
418  ShadowFrame* last_shadow_frame = self->GetManagedStack()->GetTopShadowFrame();
419  ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
420      CREATE_SHADOW_FRAME(num_regs, last_shadow_frame, method, /* dex pc */ 0);
421  ShadowFrame* shadow_frame = shadow_frame_unique_ptr.get();
422  self->PushShadowFrame(shadow_frame);
423
424  size_t cur_reg = num_regs - num_ins;
425  if (!method->IsStatic()) {
426    CHECK(receiver != nullptr);
427    shadow_frame->SetVRegReference(cur_reg, receiver);
428    ++cur_reg;
429  }
430  uint32_t shorty_len = 0;
431  const char* shorty = method->GetShorty(&shorty_len);
432  for (size_t shorty_pos = 0, arg_pos = 0; cur_reg < num_regs; ++shorty_pos, ++arg_pos, cur_reg++) {
433    DCHECK_LT(shorty_pos + 1, shorty_len);
434    switch (shorty[shorty_pos + 1]) {
435      case 'L': {
436        Object* o = reinterpret_cast<StackReference<Object>*>(&args[arg_pos])->AsMirrorPtr();
437        shadow_frame->SetVRegReference(cur_reg, o);
438        break;
439      }
440      case 'J': case 'D': {
441        uint64_t wide_value = (static_cast<uint64_t>(args[arg_pos + 1]) << 32) | args[arg_pos];
442        shadow_frame->SetVRegLong(cur_reg, wide_value);
443        cur_reg++;
444        arg_pos++;
445        break;
446      }
447      default:
448        shadow_frame->SetVReg(cur_reg, args[arg_pos]);
449        break;
450    }
451  }
452  self->EndAssertNoThreadSuspension(old_cause);
453  // Do this after populating the shadow frame in case EnsureInitialized causes a GC.
454  if (method->IsStatic() && UNLIKELY(!method->GetDeclaringClass()->IsInitialized())) {
455    ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
456    StackHandleScope<1> hs(self);
457    Handle<mirror::Class> h_class(hs.NewHandle(method->GetDeclaringClass()));
458    if (UNLIKELY(!class_linker->EnsureInitialized(self, h_class, true, true))) {
459      CHECK(self->IsExceptionPending());
460      self->PopShadowFrame();
461      return;
462    }
463  }
464  if (LIKELY(!method->IsNative())) {
465    JValue r = Execute(self, code_item, *shadow_frame, JValue());
466    if (result != nullptr) {
467      *result = r;
468    }
469  } else {
470    // We don't expect to be asked to interpret native code (which is entered via a JNI compiler
471    // generated stub) except during testing and image writing.
472    // Update args to be the args in the shadow frame since the input ones could hold stale
473    // references pointers due to moving GC.
474    args = shadow_frame->GetVRegArgs(method->IsStatic() ? 0 : 1);
475    if (!Runtime::Current()->IsStarted()) {
476      UnstartedRuntime::Jni(self, method, receiver, args, result);
477    } else {
478      InterpreterJni(self, method, shorty, receiver, args, result);
479    }
480  }
481  self->PopShadowFrame();
482}
483
484void EnterInterpreterFromDeoptimize(Thread* self,
485                                    ShadowFrame* shadow_frame,
486                                    bool from_code,
487                                    JValue* ret_val)
488    SHARED_REQUIRES(Locks::mutator_lock_) {
489  JValue value;
490  // Set value to last known result in case the shadow frame chain is empty.
491  value.SetJ(ret_val->GetJ());
492  // Are we executing the first shadow frame?
493  bool first = true;
494  while (shadow_frame != nullptr) {
495    // We do not want to recover lock state for lock counting when deoptimizing. Currently,
496    // the compiler should not have compiled a method that failed structured-locking checks.
497    DCHECK(!shadow_frame->GetMethod()->MustCountLocks());
498
499    self->SetTopOfShadowStack(shadow_frame);
500    const DexFile::CodeItem* code_item = shadow_frame->GetMethod()->GetCodeItem();
501    const uint32_t dex_pc = shadow_frame->GetDexPC();
502    uint32_t new_dex_pc = dex_pc;
503    if (UNLIKELY(self->IsExceptionPending())) {
504      // If we deoptimize from the QuickExceptionHandler, we already reported the exception to
505      // the instrumentation. To prevent from reporting it a second time, we simply pass a
506      // null Instrumentation*.
507      const instrumentation::Instrumentation* const instrumentation =
508          first ? nullptr : Runtime::Current()->GetInstrumentation();
509      uint32_t found_dex_pc = FindNextInstructionFollowingException(self, *shadow_frame, dex_pc,
510                                                                    instrumentation);
511      new_dex_pc = found_dex_pc;  // the dex pc of a matching catch handler
512                                  // or DexFile::kDexNoIndex if there is none.
513    } else if (!from_code) {
514      // For the debugger and full deoptimization stack, we must go past the invoke
515      // instruction, as it already executed.
516      // TODO: should be tested more once b/17586779 is fixed.
517      const Instruction* instr = Instruction::At(&code_item->insns_[dex_pc]);
518      if (instr->IsInvoke()) {
519        new_dex_pc = dex_pc + instr->SizeInCodeUnits();
520      } else if (instr->Opcode() == Instruction::NEW_INSTANCE) {
521        // It's possible to deoptimize at a NEW_INSTANCE dex instruciton that's for a
522        // java string, which is turned into a call into StringFactory.newEmptyString();
523        if (kIsDebugBuild) {
524          ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
525          mirror::Class* klass = class_linker->ResolveType(
526              instr->VRegB_21c(), shadow_frame->GetMethod());
527          DCHECK(klass->IsStringClass());
528        }
529        // Skip the dex instruction since we essentially come back from an invocation.
530        new_dex_pc = dex_pc + instr->SizeInCodeUnits();
531      } else {
532        DCHECK(false) << "Unexpected instruction opcode " << instr->Opcode()
533                      << " at dex_pc " << dex_pc
534                      << " of method: " << PrettyMethod(shadow_frame->GetMethod(), false);
535      }
536    } else {
537      // Nothing to do, the dex_pc is the one at which the code requested
538      // the deoptimization.
539    }
540    if (new_dex_pc != DexFile::kDexNoIndex) {
541      shadow_frame->SetDexPC(new_dex_pc);
542      value = Execute(self, code_item, *shadow_frame, value);
543    }
544    ShadowFrame* old_frame = shadow_frame;
545    shadow_frame = shadow_frame->GetLink();
546    ShadowFrame::DeleteDeoptimizedFrame(old_frame);
547    // Following deoptimizations of shadow frames must pass the invoke instruction.
548    from_code = false;
549    first = false;
550  }
551  ret_val->SetJ(value.GetJ());
552}
553
554JValue EnterInterpreterFromEntryPoint(Thread* self, const DexFile::CodeItem* code_item,
555                                      ShadowFrame* shadow_frame) {
556  DCHECK_EQ(self, Thread::Current());
557  bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
558  if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
559    ThrowStackOverflowError(self);
560    return JValue();
561  }
562
563  jit::Jit* jit = Runtime::Current()->GetJit();
564  if (jit != nullptr) {
565    jit->NotifyCompiledCodeToInterpreterTransition(self, shadow_frame->GetMethod());
566  }
567  return Execute(self, code_item, *shadow_frame, JValue());
568}
569
570void ArtInterpreterToInterpreterBridge(Thread* self, const DexFile::CodeItem* code_item,
571                                       ShadowFrame* shadow_frame, JValue* result) {
572  bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
573  if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
574    ThrowStackOverflowError(self);
575    return;
576  }
577
578  self->PushShadowFrame(shadow_frame);
579  ArtMethod* method = shadow_frame->GetMethod();
580  // Ensure static methods are initialized.
581  const bool is_static = method->IsStatic();
582  if (is_static) {
583    mirror::Class* declaring_class = method->GetDeclaringClass();
584    if (UNLIKELY(!declaring_class->IsInitialized())) {
585      StackHandleScope<1> hs(self);
586      HandleWrapper<Class> h_declaring_class(hs.NewHandleWrapper(&declaring_class));
587      if (UNLIKELY(!Runtime::Current()->GetClassLinker()->EnsureInitialized(
588          self, h_declaring_class, true, true))) {
589        DCHECK(self->IsExceptionPending());
590        self->PopShadowFrame();
591        return;
592      }
593      CHECK(h_declaring_class->IsInitializing());
594    }
595  }
596
597  if (LIKELY(!shadow_frame->GetMethod()->IsNative())) {
598    result->SetJ(Execute(self, code_item, *shadow_frame, JValue()).GetJ());
599  } else {
600    // We don't expect to be asked to interpret native code (which is entered via a JNI compiler
601    // generated stub) except during testing and image writing.
602    CHECK(!Runtime::Current()->IsStarted());
603    Object* receiver = is_static ? nullptr : shadow_frame->GetVRegReference(0);
604    uint32_t* args = shadow_frame->GetVRegArgs(is_static ? 0 : 1);
605    UnstartedRuntime::Jni(self, shadow_frame->GetMethod(), receiver, args, result);
606  }
607
608  self->PopShadowFrame();
609}
610
611void CheckInterpreterAsmConstants() {
612  CheckMterpAsmConstants();
613}
614
615void InitInterpreterTls(Thread* self) {
616  InitMterpTls(self);
617}
618
619}  // namespace interpreter
620}  // namespace art
621