/art/runtime/entrypoints/jni/ |
H A D | jni_entrypoints.cc | 29 Thread* self = Thread::Current(); local 31 extern "C" void* artFindNativeMethod(Thread* self) { 32 DCHECK_EQ(self, Thread::Current()); 34 Locks::mutator_lock_->AssertNotHeld(self); // We come here as Native. 35 ScopedObjectAccess soa(self); 37 ArtMethod* method = self->GetCurrentMethod(nullptr); 44 DCHECK(self->IsExceptionPending());
|
/art/runtime/interpreter/mterp/ |
H A D | mterp_stub.cc | 32 void InitMterpTls(Thread* self) { argument 33 self->SetMterpDefaultIBase(nullptr); 34 self->SetMterpCurrentIBase(nullptr); 35 self->SetMterpAltIBase(nullptr); 41 extern "C" bool ExecuteMterpImpl(Thread* self, const DexFile::CodeItem* code_item, 44 UNUSED(self); UNUSED(shadow_frame); UNUSED(code_item); UNUSED(result_register);
|
/art/runtime/lambda/ |
H A D | leaking_allocator.cc | 25 void* LeakingAllocator::AllocateMemoryImpl(Thread* self, size_t byte_size, size_t align_size) { argument 27 void* mem = Runtime::Current()->GetLinearAlloc()->Alloc(self, byte_size);
|
H A D | leaking_allocator.h | 48 static AlignedMemoryStorage<T>* AllocateMemory(Thread* self, size_t byte_size = sizeof(T)) { argument 50 AllocateMemoryImpl(self, byte_size, alignof(T))); 55 static T* MakeFlexibleInstance(Thread* self, size_t byte_size, Args&&... args) { argument 56 return new (AllocateMemory<T>(self, byte_size)) T(std::forward<Args>(args)...); 61 static T* MakeInstance(Thread* self, Args&&... args) { argument 62 return new (AllocateMemory<T>(self, sizeof(T))) T(std::forward<Args>(args)...); 66 static void* AllocateMemoryImpl(Thread* self, size_t byte_size, size_t align_size);
|
/art/compiler/dex/quick/ |
H A D | dex_file_to_method_inliner_map.cc | 41 Thread* self = Thread::Current(); local 43 ReaderMutexLock mu(self, lock_); 58 WriterMutexLock mu(self, lock_); 66 locked_inliner->lock_.ExclusiveLock(self); // Acquire inliner's lock_ before releasing lock_. 69 locked_inliner->lock_.ExclusiveUnlock(self);
|
/art/runtime/gc/ |
H A D | reference_queue_test.cc | 29 Thread* self = Thread::Current(); local 30 ScopedObjectAccess soa(self); 31 StackHandleScope<20> hs(self); 37 Runtime::Current()->GetClassLinker()->FindClass(self, "Ljava/lang/ref/WeakReference;", 40 auto ref1(hs.NewHandle(ref_class->AllocObject(self)->AsReference())); 42 auto ref2(hs.NewHandle(ref_class->AllocObject(self)->AsReference())); 63 Thread* self = Thread::Current(); local 64 ScopedObjectAccess soa(self); 65 StackHandleScope<20> hs(self); 70 Runtime::Current()->GetClassLinker()->FindClass(self, "Ljav [all...] |
H A D | scoped_gc_critical_section.cc | 27 ScopedGCCriticalSection::ScopedGCCriticalSection(Thread* self, argument 30 : self_(self) { 31 Runtime::Current()->GetHeap()->StartGC(self, cause, collector_type); 32 old_cause_ = self->StartAssertNoThreadSuspension("ScopedGCCriticalSection");
|
/art/test/136-daemon-jni-shutdown/ |
H A D | daemon_jni_shutdown.cc | 44 Thread* const self = Thread::Current(); local 45 self->SetTopOfStack(nullptr); 46 self->SetTopOfShadowStack(nullptr);
|
/art/runtime/ |
H A D | art_field.cc | 59 mirror::String* ArtField::ResolveGetStringName(Thread* self, const DexFile& dex_file, argument 61 StackHandleScope<1> hs(self);
|
H A D | linear_alloc.cc | 26 void* LinearAlloc::Realloc(Thread* self, void* ptr, size_t old_size, size_t new_size) { argument 27 MutexLock mu(self, lock_); 31 void* LinearAlloc::Alloc(Thread* self, size_t size) { argument 32 MutexLock mu(self, lock_);
|
H A D | monitor_pool_test.cc | 39 static void VerifyMonitor(Monitor* mon, Thread* self) { argument 43 EXPECT_EQ(MonitorPool::ComputeMonitorId(mon, self), mon->GetMonitorId()); 58 Thread* self = Thread::Current(); local 59 ScopedObjectAccess soa(self); 74 Monitor* mon = MonitorPool::CreateMonitor(self, self, nullptr, static_cast<int32_t>(i)); 77 VerifyMonitor(mon, self); 85 VerifyMonitor(mon, self); 87 MonitorPool::ReleaseMonitor(self, mon); 97 Monitor* mon = MonitorPool::CreateMonitor(self, sel [all...] |
H A D | object_lock.cc | 25 ObjectLock<T>::ObjectLock(Thread* self, Handle<T> object) : self_(self), obj_(object) { argument 51 ObjectTryLock<T>::ObjectTryLock(Thread* self, Handle<T> object) : self_(self), obj_(object) { argument
|
H A D | barrier.cc | 32 void Barrier::Pass(Thread* self) { argument 33 MutexLock mu(self, lock_); 34 SetCountLocked(self, count_ - 1); 37 void Barrier::Wait(Thread* self) { argument 38 Increment(self, -1); 41 void Barrier::Init(Thread* self, int count) { argument 42 MutexLock mu(self, lock_); 43 SetCountLocked(self, count); 46 void Barrier::Increment(Thread* self, int delta) { argument 47 MutexLock mu(self, lock 62 Increment(Thread* self, int delta, uint32_t timeout_ms) argument 83 SetCountLocked(Thread* self, int count) argument [all...] |
H A D | jni_env_ext.h | 37 static JNIEnvExt* Create(Thread* self, JavaVMExt* vm); 60 Thread* const self; member in struct:art::JNIEnvExt 107 JNIEnvExt(Thread* self, JavaVMExt* vm);
|
/art/runtime/mirror/ |
H A D | field-inl.h | 31 inline mirror::Field* Field::CreateFromArtField(Thread* self, ArtField* field, argument 33 StackHandleScope<2> hs(self); 40 self->AssertPendingException(); 45 mirror::Throwable* exception = self->GetException(); 49 self->ClearException(); 52 auto ret = hs.NewHandle(static_cast<Field*>(StaticClass()->AllocObject(self))); 54 self->AssertPendingOOMException();
|
H A D | method.cc | 55 Method* Method::CreateFromArtMethod(Thread* self, ArtMethod* method) { argument 57 auto* ret = down_cast<Method*>(StaticClass()->AllocObject(self)); 64 template Method* Method::CreateFromArtMethod<false>(Thread* self, ArtMethod* method); 65 template Method* Method::CreateFromArtMethod<true>(Thread* self, ArtMethod* method); 100 Constructor* Constructor::CreateFromArtMethod(Thread* self, ArtMethod* method) { argument 102 auto* ret = down_cast<Constructor*>(StaticClass()->AllocObject(self)); 109 template Constructor* Constructor::CreateFromArtMethod<false>(Thread* self, ArtMethod* method); 110 template Constructor* Constructor::CreateFromArtMethod<true>(Thread* self, ArtMethod* method);
|
H A D | stack_trace_element.cc | 42 StackTraceElement* StackTraceElement::Alloc(Thread* self, Handle<String> declaring_class, argument 46 down_cast<StackTraceElement*>(GetStackTraceElement()->AllocObject(self));
|
/art/runtime/verifier/ |
H A D | method_verifier_test.cc | 35 Thread* self = Thread::Current(); local 36 mirror::Class* klass = class_linker_->FindSystemClass(self, descriptor.c_str()); 40 MethodVerifier::FailureKind failure = MethodVerifier::VerifyClass(self,
|
/art/compiler/dex/ |
H A D | verification_results.cc | 40 Thread* self = Thread::Current(); local 42 WriterMutexLock mu(self, verified_methods_lock_);
|
/art/compiler/utils/ |
H A D | dedupe_set_test.cc | 55 Thread* self = Thread::Current(); local 66 array1 = deduplicator.Add(self, test1); 75 array2 = deduplicator.Add(self, test2); 84 array3 = deduplicator.Add(self, test3);
|
/art/runtime/arch/arm/ |
H A D | quick_entrypoints_cc_arm.cc | 24 Thread* self, JValue* result, uint32_t, uint32_t*, 29 Thread* self, JValue* result, const char* shorty) { 98 art_quick_invoke_stub_internal(method, args, args_size, self, result, result_in_float, 105 Thread* self, JValue* result, const char* shorty) { 106 quick_invoke_reg_setup<false>(method, args, args_size, self, result, shorty); 112 uint32_t args_size, Thread* self, JValue* result, 114 quick_invoke_reg_setup<true>(method, args, args_size, self, result, shorty); 28 quick_invoke_reg_setup(ArtMethod* method, uint32_t* args, uint32_t args_size, Thread* self, JValue* result, const char* shorty) argument 104 art_quick_invoke_stub(ArtMethod* method, uint32_t* args, uint32_t args_size, Thread* self, JValue* result, const char* shorty) argument 111 art_quick_invoke_static_stub(ArtMethod* method, uint32_t* args, uint32_t args_size, Thread* self, JValue* result, const char* shorty) argument
|
/art/runtime/gc/space/ |
H A D | bump_pointer_space-inl.h | 42 inline mirror::Object* BumpPointerSpace::AllocThreadUnsafe(Thread* self, size_t num_bytes, argument 46 Locks::mutator_lock_->AssertExclusiveHeld(self);
|
H A D | dlmalloc_space-inl.h | 28 inline mirror::Object* DlMallocSpace::AllocNonvirtual(Thread* self, size_t num_bytes, argument 34 MutexLock mu(self, lock_); 35 obj = AllocWithoutGrowthLocked(self, num_bytes, bytes_allocated, usable_size, 55 Thread* /*self*/, size_t num_bytes,
|
H A D | rosalloc_space.h | 49 mirror::Object* AllocWithGrowth(Thread* self, size_t num_bytes, size_t* bytes_allocated, 52 mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated, 54 return AllocNonvirtual(self, num_bytes, bytes_allocated, usable_size, 57 mirror::Object* AllocThreadUnsafe(Thread* self, size_t num_bytes, size_t* bytes_allocated, 60 return AllocNonvirtualThreadUnsafe(self, num_bytes, bytes_allocated, usable_size, 66 size_t Free(Thread* self, mirror::Object* ptr) OVERRIDE 68 size_t FreeList(Thread* self, size_t num_ptrs, mirror::Object** ptrs) OVERRIDE 71 mirror::Object* AllocNonvirtual(Thread* self, size_t num_bytes, size_t* bytes_allocated, argument 74 return AllocCommon(self, num_bytes, bytes_allocated, usable_size, 77 mirror::Object* AllocNonvirtualThreadUnsafe(Thread* self, size_ argument [all...] |
/art/runtime/jit/ |
H A D | profiling_info.cc | 50 bool ProfilingInfo::Create(Thread* self, ArtMethod* method, bool retry_allocation) { argument 85 return code_cache->AddProfilingInfo(self, method, entries, retry_allocation) != nullptr;
|