Searched defs:ref (Results 1 - 25 of 39) sorted by relevance

12

/art/compiler/dex/
H A Dquick_compiler_callbacks.cc28 MethodReference ref = verifier->GetMethodReference(); local
29 method_inliner_map_->GetMethodInliner(ref.dex_file)
35 void QuickCompilerCallbacks::ClassRejected(ClassReference ref) { argument
36 verification_results_->AddRejectedClass(ref);
H A Dreg_location.h50 unsigned ref:1; // Something GC cares about. member in struct:art::RegLocation
H A Dverification_results.cc50 MethodReference ref = method_verifier->GetMethodReference(); local
51 bool compile = IsCandidateForCompilation(ref, method_verifier->GetAccessFlags());
59 auto it = verified_methods_.find(ref);
63 << PrettyMethod(ref.dex_method_index, *ref.dex_file);
75 verified_methods_.Put(ref, verified_method);
76 DCHECK(verified_methods_.find(ref) != verified_methods_.end());
80 const VerifiedMethod* VerificationResults::GetVerifiedMethod(MethodReference ref) { argument
82 auto it = verified_methods_.find(ref);
86 void VerificationResults::RemoveVerifiedMethod(MethodReference ref) { argument
95 AddRejectedClass(ClassReference ref) argument
103 IsClassRejected(ClassReference ref) argument
[all...]
H A Dmir_method_info.h118 void SetDevirtualizationTarget(const MethodReference& ref) { argument
121 DCHECK_LE(ref.dex_method_index, 0xffffu);
122 target_dex_file_ = ref.dex_file;
123 target_method_idx_ = ref.dex_method_index;
/art/runtime/
H A Djni_env_ext-inl.h28 IndirectRef ref = locals.Add(local_ref_cookie, obj); local
42 return reinterpret_cast<T>(ref);
H A Dgc_root-inl.h35 inline GcRoot<MirrorType>::GcRoot(MirrorType* ref) argument
36 : root_(mirror::CompressedReference<mirror::Object>::FromMirrorPtr(ref)) { }
H A Dhandle.h127 StackReference<mirror::Object>* ref = Handle<T>::GetReference(); local
128 T* old = down_cast<T*>(ref->AsMirrorPtr());
129 ref->Assign(reference);
H A Dread_barrier-inl.h38 // load to the ref field (ptr) load to avoid needing a load-load
44 MirrorType* ref = ref_addr->AsMirrorPtr(); local
47 ref = reinterpret_cast<MirrorType*>(Mark(ref));
52 AssertToSpaceInvariant(obj, offset, ref);
53 return ref;
58 MirrorType* ref = ref_addr->AsMirrorPtr(); local
59 MirrorType* old_ref = ref;
63 ref = reinterpret_cast<MirrorType*>(Mark(old_ref));
66 offset, old_ref, ref);
78 MirrorType* ref = *root; local
118 MirrorType* ref = root->AsMirrorPtr(); local
175 AssertToSpaceInvariant(mirror::Object* obj, MemberOffset offset, mirror::Object* ref) argument
[all...]
H A Dreference_table.cc128 // change during the sorting process. The classes are forwarded by ref->GetClass().
166 mirror::Object* ref = entries[idx].Read(); local
167 if (ref == nullptr) {
170 if (runtime->IsClearedJniWeakGlobal(ref)) {
174 if (ref->GetClass() == nullptr) {
176 size_t size = ref->SizeOf();
177 os << StringPrintf(" %5d: %p (raw) (%zd bytes)\n", idx, ref, size);
181 std::string className(PrettyTypeOf(ref));
184 size_t element_count = GetElementCount(ref);
187 } else if (ref
[all...]
H A Dreflection.cc883 IndirectRef ref = reinterpret_cast<IndirectRef>(obj); local
884 IndirectRefKind kind = GetIndirectRefKind(ref);
890 self->GetJniEnv()->vm->UpdateGlobal(self, ref, result);
893 self->GetJniEnv()->vm->UpdateWeakGlobal(self, ref, result);
/art/runtime/native/
H A Djava_lang_ref_FinalizerReference.cc30 mirror::FinalizerReference* const ref = soa.Decode<mirror::FinalizerReference*>(javaThis); member in class:art::mirror
31 return Runtime::Current()->GetHeap()->GetReferenceProcessor()->MakeCircularListIfUnenqueued(ref);
39 REGISTER_NATIVE_METHODS("java/lang/ref/FinalizerReference");
H A Djava_lang_ref_Reference.cc30 mirror::Reference* const ref = soa.Decode<mirror::Reference*>(javaThis); member in class:art::mirror
32 Runtime::Current()->GetHeap()->GetReferenceProcessor()->GetReferent(soa.Self(), ref);
41 REGISTER_NATIVE_METHODS("java/lang/ref/Reference");
/art/runtime/base/
H A Ddebug_stack.h66 explicit DebugStackIndirectTopRefImpl(DebugStackReferenceImpl<false>* ref) { UNUSED(ref); } argument
112 explicit DebugStackIndirectTopRefImpl(DebugStackReferenceImpl<kIsDebug>* ref) argument
113 : ref_(ref) {
/art/runtime/gc/
H A Dreference_processor.cc188 // Process the "referent" field in a java.lang.ref.Reference. If the referent has not yet been
190 void ReferenceProcessor::DelayReferenceReferent(mirror::Class* klass, mirror::Reference* ref, argument
193 // klass can be the class of the old object if the visitor already updated the class of ref.
196 mirror::HeapReference<mirror::Object>* referent = ref->GetReferentReferenceAddr();
203 soft_reference_queue_.AtomicEnqueueIfNotEnqueued(self, ref);
205 weak_reference_queue_.AtomicEnqueueIfNotEnqueued(self, ref);
207 finalizer_reference_queue_.AtomicEnqueueIfNotEnqueued(self, ref);
209 phantom_reference_queue_.AtomicEnqueueIfNotEnqueued(self, ref);
H A Dreference_queue.cc32 void ReferenceQueue::AtomicEnqueueIfNotEnqueued(Thread* self, mirror::Reference* ref) { argument
33 DCHECK(ref != nullptr);
35 if (!ref->IsEnqueued()) {
36 EnqueuePendingReference(ref);
40 void ReferenceQueue::EnqueueReference(mirror::Reference* ref) { argument
41 CHECK(ref->IsEnqueuable());
42 EnqueuePendingReference(ref);
45 void ReferenceQueue::EnqueuePendingReference(mirror::Reference* ref) { argument
46 DCHECK(ref != nullptr);
48 // 1 element cyclic queue, ie: Reference ref
69 mirror::Reference* ref; local
143 mirror::Reference* ref = DequeuePendingReference(); local
164 mirror::FinalizerReference* ref = DequeuePendingReference()->AsFinalizerReference(); local
189 mirror::Reference* ref = head; local
[all...]
/art/compiler/dex/quick/
H A Dmir_to_lir-inl.h274 inline void Mir2Lir::CheckRegStorage(RegStorage rs, WidenessCheck wide, RefCheck ref, FPCheck fp) argument
277 CheckRegStorageImpl(rs, wide, ref, fp, kFailOnSizeError, kReportSizeError);
H A Dmir_to_lir.cc196 OpSize op_size = rl_dest.wide ? k64 : (rl_dest.ref ? kReference : k32);
545 DCHECK(rl_src[0].ref);
1364 void Mir2Lir::CheckRegStorageImpl(RegStorage rs, WidenessCheck wide, RefCheck ref, FPCheck fp, argument
1368 if (ref == RefCheck::kCheckRef) {
1371 CHECK(false) << "Reg storage not 64b for ref.";
1373 LOG(WARNING) << "Reg storage not 64b for ref.";
1411 rl.ref ? RefCheck::kCheckRef : RefCheck::kCheckNotRef, FPCheck::kIgnoreFP, fail, report);
/art/runtime/gc/collector/
H A Dconcurrent_copying.h105 mirror::Object* ref = slot->LoadSequentiallyConsistent(); local
106 while (ref == nullptr) {
108 ref = slot->LoadSequentiallyConsistent();
112 return ref;
170 void AssertToSpaceInvariant(mirror::Object* obj, MemberOffset offset, mirror::Object* ref)
172 bool IsInToSpace(mirror::Object* ref) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
173 DCHECK(ref != nullptr);
174 return IsMarked(ref) == ref;
232 bool IsOnAllocStack(mirror::Object* ref) SHARED_LOCKS_REQUIRE
[all...]
H A Dmark_compact.cc305 void MarkCompact::DelayReferenceReferentCallback(mirror::Class* klass, mirror::Reference* ref, argument
307 reinterpret_cast<MarkCompact*>(arg)->DelayReferenceReferent(klass, ref);
469 void operator()(mirror::Class* /*klass*/, mirror::Reference* ref) const
472 ref->GetFieldObjectReferenceAddr<kVerifyNone>(mirror::Reference::ReferentOffset()));
592 // Process the "referent" field in a java.lang.ref.Reference. If the referent has not yet been
610 void operator()(mirror::Class* klass, mirror::Reference* ref) const
613 collector_->DelayReferenceReferent(klass, ref);
H A Dsemi_space.cc294 mirror::Object* ref = obj->GetFieldObject<mirror::Object>(offset); variable
295 if (from_space_->HasAddress(ref)) {
297 LOG(FATAL) << ref << " found in from space";
591 auto ref = StackReference<mirror::Object>::FromMirrorPtr(root); local
592 reinterpret_cast<SemiSpace*>(arg)->MarkObject(&ref);
593 return ref.AsMirrorPtr();
601 void SemiSpace::DelayReferenceReferentCallback(mirror::Class* klass, mirror::Reference* ref, argument
603 reinterpret_cast<SemiSpace*>(arg)->DelayReferenceReferent(klass, ref);
610 auto ref = StackReference<mirror::Object>::FromMirrorPtr(*root); local
611 MarkObject(&ref);
[all...]
/art/runtime/gc/space/
H A Dregion_space.cc329 void RegionSpace::RecordAlloc(mirror::Object* ref) { argument
330 CHECK(ref != nullptr);
331 Region* r = RefToRegion(ref);
H A Dregion_space.h171 bool IsInFromSpace(mirror::Object* ref) { argument
172 if (HasAddress(ref)) {
173 Region* r = RefToRegionUnlocked(ref);
179 bool IsInUnevacFromSpace(mirror::Object* ref) { argument
180 if (HasAddress(ref)) {
181 Region* r = RefToRegionUnlocked(ref);
187 bool IsInToSpace(mirror::Object* ref) { argument
188 if (HasAddress(ref)) {
189 Region* r = RefToRegionUnlocked(ref);
195 RegionType GetRegionType(mirror::Object* ref) { argument
211 AddLiveBytes(mirror::Object* ref, size_t alloc_size) argument
[all...]
/art/runtime/mirror/
H A Dobject.cc52 Object* ref = obj->GetFieldObject<Object>(offset); local
55 dest_obj_->SetFieldObjectWithoutWriteBarrier<false, false>(offset, ref);
58 void operator()(mirror::Class* klass, mirror::Reference* ref) const
60 // Copy java.lang.ref.Reference.referent which isn't visited in
63 this->operator()(ref, mirror::Reference::ReferentOffset(), false);
/art/runtime/quick/
H A Dinline_method_analyser.cc134 bool InlineMethodAnalyser::IsSyntheticAccessor(MethodReference ref) { argument
135 const DexFile::MethodId& method_id = ref.dex_file->GetMethodId(ref.dex_method_index);
136 const char* method_name = ref.dex_file->GetMethodName(method_id);
/art/runtime/gc/accounting/
H A Dmod_union_table.cc111 mirror::Object* ref = obj_ptr->AsMirrorPtr(); local
112 if (ref != nullptr && !from_space_->HasAddress(ref) && !immune_space_->HasAddress(ref)) {
176 mirror::Object* ref = ref_ptr->AsMirrorPtr(); local
178 if (ref != nullptr && mod_union_table_->ShouldAddReference(ref)) {
220 mirror::Object* ref = obj->GetFieldObject<mirror::Object>(offset); local
221 if (ref != nullptr && mod_union_table_->ShouldAddReference(ref)
[all...]

Completed in 402 milliseconds

12