Searched refs:self (Results 51 - 75 of 363) sorted by relevance

1234567891011>>

/art/runtime/interpreter/
H A Dunstarted_runtime.cc56 static void AbortTransactionOrFail(Thread* self, const char* fmt, ...)
60 static void AbortTransactionOrFail(Thread* self, const char* fmt, ...) { argument
64 AbortTransactionV(self, fmt, args);
78 static void CharacterLowerUpper(Thread* self,
87 AbortTransactionOrFail(self,
104 Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) {
105 CharacterLowerUpper(self, shadow_frame, result, arg_offset, true);
109 Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) {
110 CharacterLowerUpper(self, shadow_frame, result, arg_offset, false);
114 static void UnstartedRuntimeFindClass(Thread* self, Handl
103 UnstartedCharacterToLowerCase( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
108 UnstartedCharacterToUpperCase( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
169 UnstartedClassForName( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
187 UnstartedClassForNameLong( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
204 UnstartedClassClassForName( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
221 UnstartedClassNewInstance( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
276 UnstartedClassGetDeclaredField( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
311 UnstartedClassGetDeclaredMethod( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
330 UnstartedClassGetDeclaredConstructor( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
346 UnstartedClassGetEnclosingClass( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
356 UnstartedClassGetInnerClassFlags( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
499 UnstartedClassLoaderGetResourceAsStream( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
521 UnstartedVmClassLoaderFindLoadedClass( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
576 UnstartedSystemArraycopy( Thread* self, ShadowFrame* shadow_frame, JValue* result ATTRIBUTE_UNUSED, size_t arg_offset) argument
668 UnstartedSystemArraycopyByte( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
674 UnstartedSystemArraycopyChar( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
680 UnstartedSystemArraycopyInt( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
777 UnstartedSystemGetProperty( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
782 UnstartedSystemGetPropertyWithDefault( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
787 UnstartedThreadLocalGet( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset ATTRIBUTE_UNUSED) argument
893 UnstartedDexCacheGetDexNative( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
1024 UnstartedMemoryPeekByteArray( Thread* self, ShadowFrame* shadow_frame, JValue* result ATTRIBUTE_UNUSED, size_t arg_offset) argument
1030 UnstartedStringGetCharsNoCheck( Thread* self, ShadowFrame* shadow_frame, JValue* result ATTRIBUTE_UNUSED, size_t arg_offset) argument
1051 UnstartedStringCharAt( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
1063 UnstartedStringSetCharAt( Thread* self, ShadowFrame* shadow_frame, JValue* result ATTRIBUTE_UNUSED, size_t arg_offset) argument
1076 UnstartedStringFactoryNewStringFromChars( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
1090 UnstartedStringFactoryNewStringFromString( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
1105 UnstartedStringFastSubstring( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
1134 UnstartedReferenceGetReferent( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
1152 UnstartedRuntimeAvailableProcessors( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset ATTRIBUTE_UNUSED) argument
1172 UnstartedUnsafeCompareAndSwapLong( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
1210 UnstartedUnsafeCompareAndSwapObject( Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
1417 UnstartedJNIVMRuntimeNewUnpaddedArray( Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver ATTRIBUTE_UNUSED, uint32_t* args, JValue* result) argument
1437 UnstartedJNIVMStackGetStackClass2( Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver ATTRIBUTE_UNUSED, uint32_t* args ATTRIBUTE_UNUSED, JValue* result) argument
1474 UnstartedJNIClassGetNameNative( Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver, uint32_t* args ATTRIBUTE_UNUSED, JValue* result) argument
1500 UnstartedJNIObjectInternalClone( Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver, uint32_t* args ATTRIBUTE_UNUSED, JValue* result) argument
1506 UnstartedJNIObjectNotifyAll( Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver, uint32_t* args ATTRIBUTE_UNUSED, JValue* result ATTRIBUTE_UNUSED) argument
1512 UnstartedJNIStringCompareTo( Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver, uint32_t* args, JValue* result) argument
1534 UnstartedJNIArrayCreateMultiArray( Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver ATTRIBUTE_UNUSED, uint32_t* args, JValue* result) argument
1543 UnstartedJNIArrayCreateObjectArray( Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver ATTRIBUTE_UNUSED, uint32_t* args, JValue* result) argument
1565 UnstartedJNIThrowableNativeFillInStackTrace( Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver ATTRIBUTE_UNUSED, uint32_t* args ATTRIBUTE_UNUSED, JValue* result) argument
1607 UnstartedJNIUnsafeGetIntVolatile( Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver ATTRIBUTE_UNUSED, uint32_t* args, JValue* result) argument
1688 Invoke(Thread* self, const DexFile::CodeItem* code_item, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) argument
1713 Jni(Thread* self, ArtMethod* method, mirror::Object* receiver, uint32_t* args, JValue* result) argument
[all...]
H A Dinterpreter.cc35 static void InterpreterJni(Thread* self, ArtMethod* method, const StringPiece& shorty,
40 ScopedObjectAccessUnchecked soa(self);
49 ScopedThreadStateChange tsc(self, kNative);
58 ScopedThreadStateChange tsc(self, kNative);
65 ScopedThreadStateChange tsc(self, kNative);
72 ScopedThreadStateChange tsc(self, kNative);
79 ScopedThreadStateChange tsc(self, kNative);
91 ScopedThreadStateChange tsc(self, kNative);
100 ScopedThreadStateChange tsc(self, kNative);
111 ScopedThreadStateChange tsc(self, kNativ
391 EnterInterpreterFromInvoke(Thread* self, ArtMethod* method, Object* receiver, uint32_t* args, JValue* result, bool stay_in_interpreter) argument
603 EnterInterpreterFromEntryPoint(Thread* self, const DexFile::CodeItem* code_item, ShadowFrame* shadow_frame) argument
619 ArtInterpreterToInterpreterBridge(Thread* self, const DexFile::CodeItem* code_item, ShadowFrame* shadow_frame, JValue* result) argument
664 InitInterpreterTls(Thread* self) argument
[all...]
/art/runtime/entrypoints/quick/
H A Dquick_alloc_entrypoints.cc32 uint32_t type_idx, ArtMethod* method, Thread* self) \
34 ScopedQuickEntrypointChecks sqec(self); \
41 if (LIKELY(byte_count < self->TlabSize())) { \
42 obj = self->AllocTlab(byte_count); \
56 return AllocObjectFromCode<false, instrumented_bool>(type_idx, method, self, allocator_type); \
59 mirror::Class* klass, ArtMethod* method ATTRIBUTE_UNUSED, Thread* self) \
61 ScopedQuickEntrypointChecks sqec(self); \
67 if (LIKELY(byte_count < self->TlabSize())) { \
68 obj = self->AllocTlab(byte_count); \
82 return AllocObjectFromCodeResolved<instrumented_bool>(klass, self, allocator_typ
[all...]
H A Dquick_fillarray_entrypoints.cc28 ArtMethod* method, Thread* self)
30 ScopedQuickEntrypointChecks sqec(self);
H A Dquick_entrypoints.h54 extern uint32_t JniMethodStart(Thread* self) NO_THREAD_SAFETY_ANALYSIS HOT_ATTR;
55 extern uint32_t JniMethodStartSynchronized(jobject to_lock, Thread* self)
57 extern void JniMethodEnd(uint32_t saved_local_ref_cookie, Thread* self)
60 Thread* self)
63 Thread* self)
68 jobject locked, Thread* self)
72 Thread* self)
H A Dquick_field_entrypoints.cc43 Thread* self,
48 StackHandleScope<1> hs(self);
50 ArtField* field = FindFieldFromCode<type, kAccessCheck>(field_idx, referrer, self, size);
60 Thread* self)
62 ScopedQuickEntrypointChecks sqec(self);
67 field = FindFieldFromCode<StaticPrimitiveRead, true>(field_idx, referrer, self, sizeof(int8_t));
76 Thread* self)
78 ScopedQuickEntrypointChecks sqec(self);
83 field = FindFieldFromCode<StaticPrimitiveRead, true>(field_idx, referrer, self, sizeof(int8_t));
92 Thread* self)
[all...]
/art/runtime/interpreter/mterp/
H A Dmterp.h31 void InitMterpTls(Thread* self);
H A Dmterp.cc46 void InitMterpTls(Thread* self) { argument
47 self->SetMterpDefaultIBase(artMterpAsmInstructionStart);
48 self->SetMterpAltIBase(artMterpAsmAltInstructionStart);
49 self->SetMterpCurrentIBase(TraceExecutionEnabled() ?
153 extern "C" bool MterpInvokeVirtual(Thread* self, ShadowFrame* shadow_frame,
159 self, *shadow_frame, inst, inst_data, result_register);
162 extern "C" bool MterpInvokeSuper(Thread* self, ShadowFrame* shadow_frame,
168 self, *shadow_frame, inst, inst_data, result_register);
171 extern "C" bool MterpInvokeInterface(Thread* self, ShadowFrame* shadow_frame,
177 self, *shadow_fram
[all...]
/art/runtime/
H A Dlinear_alloc.cc26 void* LinearAlloc::Realloc(Thread* self, void* ptr, size_t old_size, size_t new_size) { argument
27 MutexLock mu(self, lock_);
31 void* LinearAlloc::Alloc(Thread* self, size_t size) { argument
32 MutexLock mu(self, lock_);
H A Dlinear_alloc.h31 void* Alloc(Thread* self, size_t size) REQUIRES(!lock_);
34 void* Realloc(Thread* self, void* ptr, size_t old_size, size_t new_size) REQUIRES(!lock_);
38 T* AllocArray(Thread* self, size_t elements) REQUIRES(!lock_) {
39 return reinterpret_cast<T*>(Alloc(self, elements * sizeof(T)));
H A Djava_vm_ext.cc58 SharedLibrary(JNIEnv* env, Thread* self, const std::string& path, void* handle, argument
67 jni_on_load_thread_id_(self->GetThreadId()),
73 Thread* self = Thread::Current(); local
74 if (self != nullptr) {
75 self->GetJniEnv()->DeleteWeakGlobalRef(class_loader_);
101 Thread* self = Thread::Current(); variable
104 MutexLock mu(self, jni_on_load_lock_);
106 if (jni_on_load_thread_id_ == self->GetThreadId()) {
109 LOG(INFO) << *self << " recursive attempt to load library " << "\"" << path_ << "\"";
113 VLOG(jni) << "[" << *self << " waitin
126 Thread* self = Thread::Current(); variable
373 Thread* self = Thread::Current(); local
448 Thread* self = Thread::Current(); local
521 AddGlobalRef(Thread* self, mirror::Object* obj) argument
531 AddWeakGlobalRef(Thread* self, mirror::Object* obj) argument
543 DeleteGlobalRef(Thread* self, jobject obj) argument
554 DeleteWeakGlobalRef(Thread* self, jweak obj) argument
584 Thread* self = Thread::Current(); local
605 Thread* const self = Thread::Current(); local
616 Thread* self = Thread::Current(); local
624 Thread* self = Thread::Current(); local
633 UpdateGlobal(Thread* self, IndirectRef ref, mirror::Object* result) argument
649 DecodeWeakGlobal(Thread* self, IndirectRef ref) argument
664 DecodeWeakGlobalLocked(Thread* self, IndirectRef ref) argument
674 DecodeWeakGlobalDuringShutdown(Thread* self, IndirectRef ref) argument
687 IsWeakGlobalCleared(Thread* self, IndirectRef ref) argument
700 UpdateWeakGlobal(Thread* self, IndirectRef ref, mirror::Object* result) argument
706 Thread* self = Thread::Current(); local
733 Thread* self = Thread::Current(); local
896 Thread* self = Thread::Current(); local
931 Thread* self = Thread::Current(); local
[all...]
H A Dthread_pool.h34 virtual void Run(Thread* self) = 0;
88 void StartWorkers(Thread* self) REQUIRES(!task_queue_lock_);
91 void StopWorkers(Thread* self) REQUIRES(!task_queue_lock_);
95 void AddTask(Thread* self, Task* task) REQUIRES(!task_queue_lock_);
98 void RemoveAllTasks(Thread* self) REQUIRES(!task_queue_lock_);
104 void Wait(Thread* self, bool do_work, bool may_hold_locks) REQUIRES(!task_queue_lock_);
106 size_t GetTaskCount(Thread* self) REQUIRES(!task_queue_lock_);
122 virtual Task* GetTask(Thread* self) REQUIRES(!task_queue_lock_);
125 Task* TryGetTask(Thread* self) REQUIRES(!task_queue_lock_);
H A Dsignal_catcher.cc48 // On Android, /proc/self/cmdline will have been rewritten to something like "system_server".
51 if (ReadFileToString("/proc/self/cmdline", &current_cmd_line)) {
77 Thread* self = Thread::Current(); local
78 MutexLock mu(self, lock_);
80 cond_.Wait(self);
148 if (ReadFileToString("/proc/self/maps", &maps)) {
149 os << "/proc/self/maps:\n" << maps;
161 int SignalCatcher::WaitForSignal(Thread* self, SignalSet& signals) { argument
162 ScopedThreadStateChange tsc(self, kWaitingInMainSignalCatcherLoop);
172 LOG(INFO) << *self << "
189 Thread* self = Thread::Current(); local
[all...]
/art/runtime/interpreter/mterp/arm/
H A Dop_return_void_no_barrier.S4 blne MterpSuspendCheck @ (self)
/art/runtime/interpreter/mterp/x86_64/
H A Dalt_stub.S16 call SYMBOL(MterpCheckBefore) # (self, shadow_frame)
H A Dop_monitor_enter.S8 call SYMBOL(artLockObjectFromCode) # (object, self)
H A Dop_monitor_exit.S12 call SYMBOL(artUnlockObjectFromCode) # (object, self)
/art/runtime/jdwp/
H A Dobject_registry.cc50 Thread* const self = Thread::Current(); local
51 StackHandleScope<1> hs(self);
79 Thread* const self = Thread::Current(); local
80 self->AssertNoPendingException();
83 Locks::thread_list_lock_->AssertNotHeld(self);
84 Locks::thread_suspend_count_lock_->AssertNotHeld(self);
89 ScopedObjectAccessUnchecked soa(self);
121 bool ObjectRegistry::ContainsLocked(Thread* self, mirror::Object* o, int32_t identity_hash_code, argument
127 if (o == self->DecodeJObject(entry->jni_reference)) {
138 Thread* const self local
171 Thread* self = Thread::Current(); local
187 Thread* self = Thread::Current(); local
196 Thread* self = Thread::Current(); local
204 Thread* self = Thread::Current(); local
213 Thread* self = Thread::Current(); local
224 Thread* self = Thread::Current(); local
234 Thread* self = Thread::Current(); local
248 Thread* self = Thread::Current(); local
[all...]
/art/runtime/gc/
H A Dtask_processor.h57 void AddTask(Thread* self, HeapTask* task) REQUIRES(!*lock_);
58 HeapTask* GetTask(Thread* self) REQUIRES(!*lock_);
59 void Start(Thread* self) REQUIRES(!*lock_);
62 void Stop(Thread* self) REQUIRES(!*lock_);
63 void RunAllTasks(Thread* self) REQUIRES(!*lock_);
65 void UpdateTargetRunTime(Thread* self, HeapTask* target_time, uint64_t new_target_time)
H A Dheap-inl.h41 inline mirror::Object* Heap::AllocObjectWithAllocator(Thread* self, argument
50 CHECK_EQ(self->GetState(), kRunnable);
51 self->AssertThreadSuspensionIsAllowable();
57 obj = AllocLargeObject<kInstrumented, PreFenceVisitor>(self, &klass, byte_count,
63 self->ClearException();
78 byte_count <= self->TlabSize()) {
79 obj = self->AllocTlab(byte_count);
93 (obj = rosalloc_space_->AllocThreadLocal(self, byte_count, &bytes_allocated)) &&
109 obj = TryToAllocate<kInstrumented, false>(self, allocator, byte_count, &bytes_allocated,
114 obj = AllocateInternalWithGc(self,
212 PushOnAllocationStack(Thread* self, mirror::Object** obj) argument
223 AllocLargeObject(Thread* self, mirror::Class** klass, size_t byte_count, const PreFenceVisitor& pre_fence_visitor) argument
236 TryToAllocate(Thread* self, AllocatorType allocator_type, size_t alloc_size, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument
426 CheckConcurrentGC(Thread* self, size_t new_num_bytes_allocated, mirror::Object** obj) argument
[all...]
/art/runtime/base/
H A Dmutex.cc195 void BaseMutex::CheckSafeToWait(Thread* self) { argument
196 if (self == nullptr) {
201 CHECK(self->GetHeldMutex(level_) == this || level_ == kMonitorLock)
206 BaseMutex* held_mutex = self->GetHeldMutex(static_cast<LockLevel>(i));
359 void Mutex::ExclusiveLock(Thread* self) { argument
360 DCHECK(self == nullptr || self == Thread::Current());
362 AssertNotHeld(self);
364 if (!recursive_ || !IsExclusiveHeld(self)) {
374 ScopedContentionRecorder scr(this, SafeGetTid(self), GetExclusiveOwnerTi
402 ExclusiveTryLock(Thread* self) argument
443 ExclusiveUnlock(Thread* self) argument
549 ExclusiveLock(Thread* self) argument
583 ExclusiveUnlock(Thread* self) argument
618 ExclusiveLockWithTimeout(Thread* self, int64_t ms, int32_t ns) argument
673 HandleSharedLockContention(Thread* self, int32_t cur_state) argument
686 SharedTryLock(Thread* self) argument
787 Broadcast(Thread* self) argument
815 Signal(Thread* self) argument
832 Wait(Thread* self) argument
837 WaitHoldingLocks(Thread* self) argument
884 TimedWait(Thread* self, int64_t ms, int32_t ns) argument
[all...]
/art/runtime/arch/arm/
H A Dquick_entrypoints_cc_arm.cc24 Thread* self, JValue* result, uint32_t, uint32_t*,
29 Thread* self, JValue* result, const char* shorty) {
98 art_quick_invoke_stub_internal(method, args, args_size, self, result, result_in_float,
105 Thread* self, JValue* result, const char* shorty) {
106 quick_invoke_reg_setup<false>(method, args, args_size, self, result, shorty);
112 uint32_t args_size, Thread* self, JValue* result,
114 quick_invoke_reg_setup<true>(method, args, args_size, self, result, shorty);
28 quick_invoke_reg_setup(ArtMethod* method, uint32_t* args, uint32_t args_size, Thread* self, JValue* result, const char* shorty) argument
104 art_quick_invoke_stub(ArtMethod* method, uint32_t* args, uint32_t args_size, Thread* self, JValue* result, const char* shorty) argument
111 art_quick_invoke_static_stub(ArtMethod* method, uint32_t* args, uint32_t args_size, Thread* self, JValue* result, const char* shorty) argument
/art/tools/
H A Dcpplint.py441 # self._section will move monotonically through this set. If it ever
464 def __init__(self):
465 dict.__init__(self)
467 self._section = self._INITIAL_SECTION
469 self._last_header = ''
471 def CanonicalizeAlphabeticalOrder(self, header_path):
486 def IsInAlphabeticalOrder(self, header_path):
495 canonical_header = self.CanonicalizeAlphabeticalOrder(header_path)
496 if self
[all...]
/art/runtime/gc/space/
H A Dmemory_tool_malloc_space.h34 mirror::Object* AllocWithGrowth(Thread* self, size_t num_bytes, size_t* bytes_allocated,
37 mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated,
39 mirror::Object* AllocThreadUnsafe(Thread* self, size_t num_bytes, size_t* bytes_allocated,
45 size_t Free(Thread* self, mirror::Object* ptr) OVERRIDE
48 size_t FreeList(Thread* self, size_t num_ptrs, mirror::Object** ptrs) OVERRIDE
/art/runtime/lambda/
H A Dbox_table.cc79 Thread* self = Thread::Current(); local
83 /*Reader*/MutexLock mu(self, *Locks::lambda_table_lock_);
113 mirror::ByteArray::Alloc(self, closure->GetSize());
119 CHECK(self->IsExceptionPending());
130 MutexLock mu(self, *Locks::lambda_table_lock_);
161 Thread* self = Thread::Current(); local
186 LeakingAllocator::MakeFlexibleInstance<Closure>(self, boxed_closure_as_array->GetLength());
210 Thread* self = Thread::Current(); local
212 (kUseReadBarrier && !self->GetWeakRefAccessEnabled()))) {
213 new_weaks_condition_.WaitHoldingLocks(self); // wai
220 Thread* self = Thread::Current(); local
263 Thread* self = Thread::Current(); local
271 Thread* self = Thread::Current(); local
280 Thread* self = Thread::Current(); local
[all...]

Completed in 405 milliseconds

1234567891011>>