stack.cc revision ef7d42fca18c16fbaf103822ad16f23246e2905d
1/* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "stack.h" 18 19#include "mirror/art_method-inl.h" 20#include "mirror/class-inl.h" 21#include "mirror/object.h" 22#include "mirror/object-inl.h" 23#include "mirror/object_array-inl.h" 24#include "object_utils.h" 25#include "runtime.h" 26#include "thread_list.h" 27#include "throw_location.h" 28#include "vmap_table.h" 29 30namespace art { 31 32bool ShadowFrame::VerifyReference(const mirror::Object* val) const { 33 return !Runtime::Current()->GetHeap()->IsInTempSpace(val); 34} 35 36mirror::Object* ShadowFrame::GetThisObject() const { 37 mirror::ArtMethod* m = GetMethod(); 38 if (m->IsStatic()) { 39 return NULL; 40 } else if (m->IsNative()) { 41 return GetVRegReference(0); 42 } else { 43 const DexFile::CodeItem* code_item = MethodHelper(m).GetCodeItem(); 44 CHECK(code_item != NULL) << PrettyMethod(m); 45 uint16_t reg = code_item->registers_size_ - code_item->ins_size_; 46 return GetVRegReference(reg); 47 } 48} 49 50mirror::Object* ShadowFrame::GetThisObject(uint16_t num_ins) const { 51 mirror::ArtMethod* m = GetMethod(); 52 if (m->IsStatic()) { 53 return NULL; 54 } else { 55 return GetVRegReference(NumberOfVRegs() - num_ins); 56 } 57} 58 59ThrowLocation ShadowFrame::GetCurrentLocationForThrow() const { 60 return ThrowLocation(GetThisObject(), GetMethod(), GetDexPC()); 61} 62 63size_t ManagedStack::NumJniShadowFrameReferences() const { 64 size_t count = 0; 65 for (const ManagedStack* current_fragment = this; current_fragment != NULL; 66 current_fragment = current_fragment->GetLink()) { 67 for (ShadowFrame* current_frame = current_fragment->top_shadow_frame_; current_frame != NULL; 68 current_frame = current_frame->GetLink()) { 69 if (current_frame->GetMethod()->IsNative()) { 70 // The JNI ShadowFrame only contains references. (For indirect reference.) 71 count += current_frame->NumberOfVRegs(); 72 } 73 } 74 } 75 return count; 76} 77 78bool ManagedStack::ShadowFramesContain(StackReference<mirror::Object>* shadow_frame_entry) const { 79 for (const ManagedStack* current_fragment = this; current_fragment != NULL; 80 current_fragment = current_fragment->GetLink()) { 81 for (ShadowFrame* current_frame = current_fragment->top_shadow_frame_; current_frame != NULL; 82 current_frame = current_frame->GetLink()) { 83 if (current_frame->Contains(shadow_frame_entry)) { 84 return true; 85 } 86 } 87 } 88 return false; 89} 90 91StackVisitor::StackVisitor(Thread* thread, Context* context) 92 : thread_(thread), cur_shadow_frame_(NULL), 93 cur_quick_frame_(NULL), cur_quick_frame_pc_(0), num_frames_(0), cur_depth_(0), 94 context_(context) { 95 DCHECK(thread == Thread::Current() || thread->IsSuspended()) << *thread; 96} 97 98uint32_t StackVisitor::GetDexPc() const { 99 if (cur_shadow_frame_ != NULL) { 100 return cur_shadow_frame_->GetDexPC(); 101 } else if (cur_quick_frame_ != NULL) { 102 return GetMethod()->ToDexPc(cur_quick_frame_pc_); 103 } else { 104 return 0; 105 } 106} 107 108mirror::Object* StackVisitor::GetThisObject() const { 109 mirror::ArtMethod* m = GetMethod(); 110 if (m->IsStatic()) { 111 return NULL; 112 } else if (m->IsNative()) { 113 if (cur_quick_frame_ != NULL) { 114 StackIndirectReferenceTable* sirt = 115 reinterpret_cast<StackIndirectReferenceTable*>( 116 reinterpret_cast<char*>(cur_quick_frame_) + 117 m->GetSirtOffsetInBytes()); 118 return sirt->GetReference(0); 119 } else { 120 return cur_shadow_frame_->GetVRegReference(0); 121 } 122 } else { 123 const DexFile::CodeItem* code_item = MethodHelper(m).GetCodeItem(); 124 if (code_item == NULL) { 125 UNIMPLEMENTED(ERROR) << "Failed to determine this object of abstract or proxy method" 126 << PrettyMethod(m); 127 return NULL; 128 } else { 129 uint16_t reg = code_item->registers_size_ - code_item->ins_size_; 130 return reinterpret_cast<mirror::Object*>(GetVReg(m, reg, kReferenceVReg)); 131 } 132 } 133} 134 135size_t StackVisitor::GetNativePcOffset() const { 136 DCHECK(!IsShadowFrame()); 137 return GetMethod()->NativePcOffset(cur_quick_frame_pc_); 138} 139 140uint32_t StackVisitor::GetVReg(mirror::ArtMethod* m, uint16_t vreg, VRegKind kind) const { 141 if (cur_quick_frame_ != NULL) { 142 DCHECK(context_ != NULL); // You can't reliably read registers without a context. 143 DCHECK(m == GetMethod()); 144 const VmapTable vmap_table(m->GetVmapTable()); 145 uint32_t vmap_offset; 146 // TODO: IsInContext stops before spotting floating point registers. 147 if (vmap_table.IsInContext(vreg, kind, &vmap_offset)) { 148 bool is_float = (kind == kFloatVReg) || (kind == kDoubleLoVReg) || (kind == kDoubleHiVReg); 149 uint32_t spill_mask = is_float ? m->GetFpSpillMask() 150 : m->GetCoreSpillMask(); 151 return GetGPR(vmap_table.ComputeRegister(spill_mask, vmap_offset, kind)); 152 } else { 153 const DexFile::CodeItem* code_item = MethodHelper(m).GetCodeItem(); 154 DCHECK(code_item != NULL) << PrettyMethod(m); // Can't be NULL or how would we compile its instructions? 155 size_t frame_size = m->GetFrameSizeInBytes(); 156 return *GetVRegAddr(cur_quick_frame_, code_item, m->GetCoreSpillMask(), m->GetFpSpillMask(), 157 frame_size, vreg); 158 } 159 } else { 160 return cur_shadow_frame_->GetVReg(vreg); 161 } 162} 163 164void StackVisitor::SetVReg(mirror::ArtMethod* m, uint16_t vreg, uint32_t new_value, 165 VRegKind kind) { 166 if (cur_quick_frame_ != NULL) { 167 DCHECK(context_ != NULL); // You can't reliably write registers without a context. 168 DCHECK(m == GetMethod()); 169 const VmapTable vmap_table(m->GetVmapTable()); 170 uint32_t vmap_offset; 171 // TODO: IsInContext stops before spotting floating point registers. 172 if (vmap_table.IsInContext(vreg, kind, &vmap_offset)) { 173 bool is_float = (kind == kFloatVReg) || (kind == kDoubleLoVReg) || (kind == kDoubleHiVReg); 174 uint32_t spill_mask = is_float ? m->GetFpSpillMask() : m->GetCoreSpillMask(); 175 const uint32_t reg = vmap_table.ComputeRegister(spill_mask, vmap_offset, kReferenceVReg); 176 SetGPR(reg, new_value); 177 } else { 178 const DexFile::CodeItem* code_item = MethodHelper(m).GetCodeItem(); 179 DCHECK(code_item != NULL) << PrettyMethod(m); // Can't be NULL or how would we compile its instructions? 180 uint32_t core_spills = m->GetCoreSpillMask(); 181 uint32_t fp_spills = m->GetFpSpillMask(); 182 size_t frame_size = m->GetFrameSizeInBytes(); 183 int offset = GetVRegOffset(code_item, core_spills, fp_spills, frame_size, vreg); 184 byte* vreg_addr = reinterpret_cast<byte*>(GetCurrentQuickFrame()) + offset; 185 *reinterpret_cast<uint32_t*>(vreg_addr) = new_value; 186 } 187 } else { 188 return cur_shadow_frame_->SetVReg(vreg, new_value); 189 } 190} 191 192uintptr_t StackVisitor::GetGPR(uint32_t reg) const { 193 DCHECK(cur_quick_frame_ != NULL) << "This is a quick frame routine"; 194 return context_->GetGPR(reg); 195} 196 197void StackVisitor::SetGPR(uint32_t reg, uintptr_t value) { 198 DCHECK(cur_quick_frame_ != NULL) << "This is a quick frame routine"; 199 context_->SetGPR(reg, value); 200} 201 202uintptr_t StackVisitor::GetReturnPc() const { 203 mirror::ArtMethod** sp = GetCurrentQuickFrame(); 204 DCHECK(sp != NULL); 205 byte* pc_addr = reinterpret_cast<byte*>(sp) + GetMethod()->GetReturnPcOffsetInBytes(); 206 return *reinterpret_cast<uintptr_t*>(pc_addr); 207} 208 209void StackVisitor::SetReturnPc(uintptr_t new_ret_pc) { 210 mirror::ArtMethod** sp = GetCurrentQuickFrame(); 211 CHECK(sp != NULL); 212 byte* pc_addr = reinterpret_cast<byte*>(sp) + GetMethod()->GetReturnPcOffsetInBytes(); 213 *reinterpret_cast<uintptr_t*>(pc_addr) = new_ret_pc; 214} 215 216size_t StackVisitor::ComputeNumFrames(Thread* thread) { 217 struct NumFramesVisitor : public StackVisitor { 218 explicit NumFramesVisitor(Thread* thread) 219 : StackVisitor(thread, NULL), frames(0) {} 220 221 virtual bool VisitFrame() { 222 frames++; 223 return true; 224 } 225 226 size_t frames; 227 }; 228 NumFramesVisitor visitor(thread); 229 visitor.WalkStack(true); 230 return visitor.frames; 231} 232 233void StackVisitor::DescribeStack(Thread* thread) { 234 struct DescribeStackVisitor : public StackVisitor { 235 explicit DescribeStackVisitor(Thread* thread) 236 : StackVisitor(thread, NULL) {} 237 238 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 239 LOG(INFO) << "Frame Id=" << GetFrameId() << " " << DescribeLocation(); 240 return true; 241 } 242 }; 243 DescribeStackVisitor visitor(thread); 244 visitor.WalkStack(true); 245} 246 247std::string StackVisitor::DescribeLocation() const { 248 std::string result("Visiting method '"); 249 mirror::ArtMethod* m = GetMethod(); 250 if (m == NULL) { 251 return "upcall"; 252 } 253 result += PrettyMethod(m); 254 result += StringPrintf("' at dex PC 0x%04x", GetDexPc()); 255 if (!IsShadowFrame()) { 256 result += StringPrintf(" (native PC %p)", reinterpret_cast<void*>(GetCurrentQuickFramePc())); 257 } 258 return result; 259} 260 261instrumentation::InstrumentationStackFrame& StackVisitor::GetInstrumentationStackFrame(uint32_t depth) const { 262 CHECK_LT(depth, thread_->GetInstrumentationStack()->size()); 263 return thread_->GetInstrumentationStack()->at(depth); 264} 265 266void StackVisitor::SanityCheckFrame() const { 267 if (kIsDebugBuild) { 268 mirror::ArtMethod* method = GetMethod(); 269 CHECK(method->GetClass() == mirror::ArtMethod::GetJavaLangReflectArtMethod()); 270 if (cur_quick_frame_ != nullptr) { 271 method->AssertPcIsWithinQuickCode(cur_quick_frame_pc_); 272 // Frame sanity. 273 size_t frame_size = method->GetFrameSizeInBytes(); 274 CHECK_NE(frame_size, 0u); 275 // A rough guess at an upper size we expect to see for a frame. The 256 is 276 // a dex register limit. The 16 incorporates callee save spills and 277 // outgoing argument set up. 278 const size_t kMaxExpectedFrameSize = 256 * sizeof(word) + 16; 279 CHECK_LE(frame_size, kMaxExpectedFrameSize); 280 size_t return_pc_offset = method->GetReturnPcOffsetInBytes(); 281 CHECK_LT(return_pc_offset, frame_size); 282 } 283 } 284} 285 286void StackVisitor::WalkStack(bool include_transitions) { 287 DCHECK(thread_ == Thread::Current() || thread_->IsSuspended()); 288 CHECK_EQ(cur_depth_, 0U); 289 bool exit_stubs_installed = Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled(); 290 uint32_t instrumentation_stack_depth = 0; 291 for (const ManagedStack* current_fragment = thread_->GetManagedStack(); current_fragment != NULL; 292 current_fragment = current_fragment->GetLink()) { 293 cur_shadow_frame_ = current_fragment->GetTopShadowFrame(); 294 cur_quick_frame_ = current_fragment->GetTopQuickFrame(); 295 cur_quick_frame_pc_ = current_fragment->GetTopQuickFramePc(); 296 if (cur_quick_frame_ != NULL) { // Handle quick stack frames. 297 // Can't be both a shadow and a quick fragment. 298 DCHECK(current_fragment->GetTopShadowFrame() == NULL); 299 mirror::ArtMethod* method = *cur_quick_frame_; 300 while (method != NULL) { 301 SanityCheckFrame(); 302 bool should_continue = VisitFrame(); 303 if (UNLIKELY(!should_continue)) { 304 return; 305 } 306 if (context_ != NULL) { 307 context_->FillCalleeSaves(*this); 308 } 309 size_t frame_size = method->GetFrameSizeInBytes(); 310 // Compute PC for next stack frame from return PC. 311 size_t return_pc_offset = method->GetReturnPcOffsetInBytes(); 312 byte* return_pc_addr = reinterpret_cast<byte*>(cur_quick_frame_) + return_pc_offset; 313 uintptr_t return_pc = *reinterpret_cast<uintptr_t*>(return_pc_addr); 314 if (UNLIKELY(exit_stubs_installed)) { 315 // While profiling, the return pc is restored from the side stack, except when walking 316 // the stack for an exception where the side stack will be unwound in VisitFrame. 317 if (GetQuickInstrumentationExitPc() == return_pc) { 318 const instrumentation::InstrumentationStackFrame& instrumentation_frame = 319 GetInstrumentationStackFrame(instrumentation_stack_depth); 320 instrumentation_stack_depth++; 321 if (GetMethod() == Runtime::Current()->GetCalleeSaveMethod(Runtime::kSaveAll)) { 322 // Skip runtime save all callee frames which are used to deliver exceptions. 323 } else if (instrumentation_frame.interpreter_entry_) { 324 mirror::ArtMethod* callee = Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs); 325 CHECK_EQ(GetMethod(), callee) << "Expected: " << PrettyMethod(callee) << " Found: " 326 << PrettyMethod(GetMethod()); 327 } else if (instrumentation_frame.method_ != GetMethod()) { 328 LOG(FATAL) << "Expected: " << PrettyMethod(instrumentation_frame.method_) 329 << " Found: " << PrettyMethod(GetMethod()); 330 } 331 if (num_frames_ != 0) { 332 // Check agreement of frame Ids only if num_frames_ is computed to avoid infinite 333 // recursion. 334 CHECK(instrumentation_frame.frame_id_ == GetFrameId()) 335 << "Expected: " << instrumentation_frame.frame_id_ 336 << " Found: " << GetFrameId(); 337 } 338 return_pc = instrumentation_frame.return_pc_; 339 } 340 } 341 cur_quick_frame_pc_ = return_pc; 342 byte* next_frame = reinterpret_cast<byte*>(cur_quick_frame_) + frame_size; 343 cur_quick_frame_ = reinterpret_cast<mirror::ArtMethod**>(next_frame); 344 cur_depth_++; 345 method = *cur_quick_frame_; 346 } 347 } else if (cur_shadow_frame_ != NULL) { 348 do { 349 SanityCheckFrame(); 350 bool should_continue = VisitFrame(); 351 if (UNLIKELY(!should_continue)) { 352 return; 353 } 354 cur_depth_++; 355 cur_shadow_frame_ = cur_shadow_frame_->GetLink(); 356 } while (cur_shadow_frame_ != NULL); 357 } 358 if (include_transitions) { 359 bool should_continue = VisitFrame(); 360 if (!should_continue) { 361 return; 362 } 363 } 364 cur_depth_++; 365 } 366 if (num_frames_ != 0) { 367 CHECK_EQ(cur_depth_, num_frames_); 368 } 369} 370 371} // namespace art 372