1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "quick_exception_handler.h"
18
19#include "arch/context.h"
20#include "dex_instruction.h"
21#include "entrypoints/entrypoint_utils.h"
22#include "handle_scope-inl.h"
23#include "mirror/art_method-inl.h"
24#include "mirror/class-inl.h"
25#include "mirror/class_loader.h"
26#include "mirror/throwable.h"
27#include "verifier/method_verifier.h"
28
29namespace art {
30
31static constexpr bool kDebugExceptionDelivery = false;
32static constexpr size_t kInvalidFrameDepth = 0xffffffff;
33
34QuickExceptionHandler::QuickExceptionHandler(Thread* self, bool is_deoptimization)
35  : self_(self), context_(self->GetLongJumpContext()), is_deoptimization_(is_deoptimization),
36    method_tracing_active_(is_deoptimization ||
37                           Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled()),
38    handler_quick_frame_(nullptr), handler_quick_frame_pc_(0), handler_method_(nullptr),
39    handler_dex_pc_(0), clear_exception_(false), handler_frame_depth_(kInvalidFrameDepth) {
40}
41
42// Finds catch handler or prepares for deoptimization.
43class CatchBlockStackVisitor FINAL : public StackVisitor {
44 public:
45  CatchBlockStackVisitor(Thread* self, Context* context, Handle<mirror::Throwable>* exception,
46                         QuickExceptionHandler* exception_handler)
47      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
48      : StackVisitor(self, context), self_(self), exception_(exception),
49        exception_handler_(exception_handler) {
50  }
51
52  bool VisitFrame() OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
53    mirror::ArtMethod* method = GetMethod();
54    exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
55    if (method == nullptr) {
56      // This is the upcall, we remember the frame and last pc so that we may long jump to them.
57      exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
58      exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
59      uint32_t next_dex_pc;
60      mirror::ArtMethod* next_art_method;
61      bool has_next = GetNextMethodAndDexPc(&next_art_method, &next_dex_pc);
62      // Report the method that did the down call as the handler.
63      exception_handler_->SetHandlerDexPc(next_dex_pc);
64      exception_handler_->SetHandlerMethod(next_art_method);
65      if (!has_next) {
66        // No next method? Check exception handler is set up for the unhandled exception handler
67        // case.
68        DCHECK_EQ(0U, exception_handler_->GetHandlerDexPc());
69        DCHECK(nullptr == exception_handler_->GetHandlerMethod());
70      }
71      return false;  // End stack walk.
72    }
73    if (method->IsRuntimeMethod()) {
74      // Ignore callee save method.
75      DCHECK(method->IsCalleeSaveMethod());
76      return true;
77    }
78    StackHandleScope<1> hs(self_);
79    return HandleTryItems(hs.NewHandle(method));
80  }
81
82 private:
83  bool HandleTryItems(Handle<mirror::ArtMethod> method)
84      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
85    uint32_t dex_pc = DexFile::kDexNoIndex;
86    if (!method->IsNative()) {
87      dex_pc = GetDexPc();
88    }
89    if (dex_pc != DexFile::kDexNoIndex) {
90      bool clear_exception = false;
91      StackHandleScope<1> hs(Thread::Current());
92      Handle<mirror::Class> to_find(hs.NewHandle((*exception_)->GetClass()));
93      uint32_t found_dex_pc = mirror::ArtMethod::FindCatchBlock(method, to_find, dex_pc,
94                                                                &clear_exception);
95      exception_handler_->SetClearException(clear_exception);
96      if (found_dex_pc != DexFile::kDexNoIndex) {
97        exception_handler_->SetHandlerMethod(method.Get());
98        exception_handler_->SetHandlerDexPc(found_dex_pc);
99        exception_handler_->SetHandlerQuickFramePc(method->ToNativePc(found_dex_pc));
100        exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
101        return false;  // End stack walk.
102      }
103    }
104    return true;  // Continue stack walk.
105  }
106
107  Thread* const self_;
108  // The exception we're looking for the catch block of.
109  Handle<mirror::Throwable>* exception_;
110  // The quick exception handler we're visiting for.
111  QuickExceptionHandler* const exception_handler_;
112
113  DISALLOW_COPY_AND_ASSIGN(CatchBlockStackVisitor);
114};
115
116void QuickExceptionHandler::FindCatch(const ThrowLocation& throw_location,
117                                      mirror::Throwable* exception,
118                                      bool is_exception_reported) {
119  DCHECK(!is_deoptimization_);
120  if (kDebugExceptionDelivery) {
121    mirror::String* msg = exception->GetDetailMessage();
122    std::string str_msg(msg != nullptr ? msg->ToModifiedUtf8() : "");
123    self_->DumpStack(LOG(INFO) << "Delivering exception: " << PrettyTypeOf(exception)
124                     << ": " << str_msg << "\n");
125  }
126  StackHandleScope<1> hs(self_);
127  Handle<mirror::Throwable> exception_ref(hs.NewHandle(exception));
128
129  // Walk the stack to find catch handler or prepare for deoptimization.
130  CatchBlockStackVisitor visitor(self_, context_, &exception_ref, this);
131  visitor.WalkStack(true);
132
133  if (kDebugExceptionDelivery) {
134    if (handler_quick_frame_->AsMirrorPtr() == nullptr) {
135      LOG(INFO) << "Handler is upcall";
136    }
137    if (handler_method_ != nullptr) {
138      const DexFile& dex_file = *handler_method_->GetDeclaringClass()->GetDexCache()->GetDexFile();
139      int line_number = dex_file.GetLineNumFromPC(handler_method_, handler_dex_pc_);
140      LOG(INFO) << "Handler: " << PrettyMethod(handler_method_) << " (line: " << line_number << ")";
141    }
142  }
143  if (clear_exception_) {
144    // Exception was cleared as part of delivery.
145    DCHECK(!self_->IsExceptionPending());
146  } else {
147    // Put exception back in root set with clear throw location.
148    self_->SetException(ThrowLocation(), exception_ref.Get());
149    self_->SetExceptionReportedToInstrumentation(is_exception_reported);
150  }
151  // The debugger may suspend this thread and walk its stack. Let's do this before popping
152  // instrumentation frames.
153  if (!is_exception_reported) {
154    instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
155    instrumentation->ExceptionCaughtEvent(self_, throw_location, handler_method_, handler_dex_pc_,
156                                          exception_ref.Get());
157      // We're not catching this exception but let's remind we already reported the exception above
158      // to avoid reporting it twice.
159      self_->SetExceptionReportedToInstrumentation(true);
160  }
161  bool caught_exception = (handler_method_ != nullptr && handler_dex_pc_ != DexFile::kDexNoIndex);
162  if (caught_exception) {
163    // We're catching this exception so we finish reporting it. We do it here to avoid doing it
164    // in the compiled code.
165    self_->SetExceptionReportedToInstrumentation(false);
166  }
167}
168
169// Prepares deoptimization.
170class DeoptimizeStackVisitor FINAL : public StackVisitor {
171 public:
172  DeoptimizeStackVisitor(Thread* self, Context* context, QuickExceptionHandler* exception_handler)
173      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
174      : StackVisitor(self, context), self_(self), exception_handler_(exception_handler),
175        prev_shadow_frame_(nullptr) {
176    CHECK(!self_->HasDeoptimizationShadowFrame());
177  }
178
179  bool VisitFrame() OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
180    exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
181    mirror::ArtMethod* method = GetMethod();
182    if (method == nullptr) {
183      // This is the upcall, we remember the frame and last pc so that we may long jump to them.
184      exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
185      exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
186      return false;  // End stack walk.
187    } else if (method->IsRuntimeMethod()) {
188      // Ignore callee save method.
189      DCHECK(method->IsCalleeSaveMethod());
190      return true;
191    } else {
192      return HandleDeoptimization(method);
193    }
194  }
195
196 private:
197  static VRegKind GetVRegKind(uint16_t reg, const std::vector<int32_t>& kinds) {
198    return static_cast<VRegKind>(kinds.at(reg * 2));
199  }
200
201  bool HandleDeoptimization(mirror::ArtMethod* m) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
202    const DexFile::CodeItem* code_item = m->GetCodeItem();
203    CHECK(code_item != nullptr);
204    uint16_t num_regs = code_item->registers_size_;
205    uint32_t dex_pc = GetDexPc();
206    const Instruction* inst = Instruction::At(code_item->insns_ + dex_pc);
207    uint32_t new_dex_pc = dex_pc + inst->SizeInCodeUnits();
208    ShadowFrame* new_frame = ShadowFrame::Create(num_regs, nullptr, m, new_dex_pc);
209    StackHandleScope<2> hs(self_);
210    mirror::Class* declaring_class = m->GetDeclaringClass();
211    Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(declaring_class->GetDexCache()));
212    Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(declaring_class->GetClassLoader()));
213    verifier::MethodVerifier verifier(h_dex_cache->GetDexFile(), &h_dex_cache, &h_class_loader,
214                                      &m->GetClassDef(), code_item, m->GetDexMethodIndex(), m,
215                                      m->GetAccessFlags(), false, true, true);
216    verifier.Verify();
217    const std::vector<int32_t> kinds(verifier.DescribeVRegs(dex_pc));
218    for (uint16_t reg = 0; reg < num_regs; ++reg) {
219      VRegKind kind = GetVRegKind(reg, kinds);
220      switch (kind) {
221        case kUndefined:
222          new_frame->SetVReg(reg, 0xEBADDE09);
223          break;
224        case kConstant:
225          new_frame->SetVReg(reg, kinds.at((reg * 2) + 1));
226          break;
227        case kReferenceVReg:
228          new_frame->SetVRegReference(reg,
229                                      reinterpret_cast<mirror::Object*>(GetVReg(m, reg, kind)));
230          break;
231        case kLongLoVReg:
232          if (GetVRegKind(reg + 1, kinds), kLongHiVReg) {
233            // Treat it as a "long" register pair.
234            new_frame->SetVRegLong(reg, GetVRegPair(m, reg, kLongLoVReg, kLongHiVReg));
235          } else {
236            new_frame->SetVReg(reg, GetVReg(m, reg, kind));
237          }
238          break;
239        case kLongHiVReg:
240          if (GetVRegKind(reg - 1, kinds), kLongLoVReg) {
241            // Nothing to do: we treated it as a "long" register pair.
242          } else {
243            new_frame->SetVReg(reg, GetVReg(m, reg, kind));
244          }
245          break;
246        case kDoubleLoVReg:
247          if (GetVRegKind(reg + 1, kinds), kDoubleHiVReg) {
248            // Treat it as a "double" register pair.
249            new_frame->SetVRegLong(reg, GetVRegPair(m, reg, kDoubleLoVReg, kDoubleHiVReg));
250          } else {
251            new_frame->SetVReg(reg, GetVReg(m, reg, kind));
252          }
253          break;
254        case kDoubleHiVReg:
255          if (GetVRegKind(reg - 1, kinds), kDoubleLoVReg) {
256            // Nothing to do: we treated it as a "double" register pair.
257          } else {
258            new_frame->SetVReg(reg, GetVReg(m, reg, kind));
259          }
260          break;
261        default:
262          new_frame->SetVReg(reg, GetVReg(m, reg, kind));
263          break;
264      }
265    }
266    if (prev_shadow_frame_ != nullptr) {
267      prev_shadow_frame_->SetLink(new_frame);
268    } else {
269      self_->SetDeoptimizationShadowFrame(new_frame);
270    }
271    prev_shadow_frame_ = new_frame;
272    return true;
273  }
274
275  Thread* const self_;
276  QuickExceptionHandler* const exception_handler_;
277  ShadowFrame* prev_shadow_frame_;
278
279  DISALLOW_COPY_AND_ASSIGN(DeoptimizeStackVisitor);
280};
281
282void QuickExceptionHandler::DeoptimizeStack() {
283  DCHECK(is_deoptimization_);
284  if (kDebugExceptionDelivery) {
285    self_->DumpStack(LOG(INFO) << "Deoptimizing: ");
286  }
287
288  DeoptimizeStackVisitor visitor(self_, context_, this);
289  visitor.WalkStack(true);
290
291  // Restore deoptimization exception
292  self_->SetException(ThrowLocation(), Thread::GetDeoptimizationException());
293}
294
295// Unwinds all instrumentation stack frame prior to catch handler or upcall.
296class InstrumentationStackVisitor : public StackVisitor {
297 public:
298  InstrumentationStackVisitor(Thread* self, bool is_deoptimization, size_t frame_depth)
299      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
300      : StackVisitor(self, nullptr),
301        self_(self), frame_depth_(frame_depth),
302        instrumentation_frames_to_pop_(0) {
303    CHECK_NE(frame_depth_, kInvalidFrameDepth);
304  }
305
306  bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
307    size_t current_frame_depth = GetFrameDepth();
308    if (current_frame_depth < frame_depth_) {
309      CHECK(GetMethod() != nullptr);
310      if (UNLIKELY(GetQuickInstrumentationExitPc() == GetReturnPc())) {
311        ++instrumentation_frames_to_pop_;
312      }
313      return true;
314    } else {
315      // We reached the frame of the catch handler or the upcall.
316      return false;
317    }
318  }
319
320  size_t GetInstrumentationFramesToPop() const {
321    return instrumentation_frames_to_pop_;
322  }
323
324 private:
325  Thread* const self_;
326  const size_t frame_depth_;
327  size_t instrumentation_frames_to_pop_;
328
329  DISALLOW_COPY_AND_ASSIGN(InstrumentationStackVisitor);
330};
331
332void QuickExceptionHandler::UpdateInstrumentationStack() {
333  if (method_tracing_active_) {
334    InstrumentationStackVisitor visitor(self_, is_deoptimization_, handler_frame_depth_);
335    visitor.WalkStack(true);
336
337    size_t instrumentation_frames_to_pop = visitor.GetInstrumentationFramesToPop();
338    instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
339    for (size_t i = 0; i < instrumentation_frames_to_pop; ++i) {
340      instrumentation->PopMethodForUnwind(self_, is_deoptimization_);
341    }
342  }
343}
344
345void QuickExceptionHandler::DoLongJump() {
346  // Place context back on thread so it will be available when we continue.
347  self_->ReleaseLongJumpContext(context_);
348  context_->SetSP(reinterpret_cast<uintptr_t>(handler_quick_frame_));
349  CHECK_NE(handler_quick_frame_pc_, 0u);
350  context_->SetPC(handler_quick_frame_pc_);
351  context_->SmashCallerSaves();
352  context_->DoLongJump();
353}
354
355}  // namespace art
356