Searched refs:ALWAYS_INLINE (Results 1 - 25 of 96) sorted by relevance

1234

/art/runtime/native/
H A Dscoped_fast_native_object_access.h30 ALWAYS_INLINE explicit ScopedFastNativeObjectAccess(JNIEnv* env)
34 ALWAYS_INLINE ~ScopedFastNativeObjectAccess() UNLOCK_FUNCTION(Locks::mutator_lock_) {}
/art/runtime/gc/accounting/
H A Dbitmap.h49 static ALWAYS_INLINE constexpr size_t BitIndexToWordIndex(uintptr_t offset) {
54 static ALWAYS_INLINE constexpr T WordIndexToBitIndex(T word_index) {
58 static ALWAYS_INLINE constexpr uintptr_t BitIndexToMask(uintptr_t bit_index) {
62 ALWAYS_INLINE bool SetBit(size_t bit_index) {
66 ALWAYS_INLINE bool ClearBit(size_t bit_index) {
70 ALWAYS_INLINE bool TestBit(size_t bit_index) const;
73 ALWAYS_INLINE bool AtomicTestAndSetBit(size_t bit_index);
96 ALWAYS_INLINE void CheckValidBitIndex(size_t bit_index) const {
112 ALWAYS_INLINE bool ModifyBit(uintptr_t bit_index);
137 ALWAYS_INLINE uintptr_
[all...]
H A Dmod_union_table-inl.h36 bool ShouldAddReference(const mirror::Object* ref) const OVERRIDE ALWAYS_INLINE {
H A Dcard_table.h59 ALWAYS_INLINE void MarkCard(const void *addr) {
127 void* AddrFromCard(const uint8_t *card_addr) const ALWAYS_INLINE; member in class:art::gc::accounting::CardTable
130 uint8_t* CardFromAddr(const void *addr) const ALWAYS_INLINE; member in class:art::gc::accounting::CardTable
138 bool IsValidCard(const uint8_t* card_addr) const ALWAYS_INLINE; member in class:art::gc::accounting::CardTable
140 void CheckCardValid(uint8_t* card) const ALWAYS_INLINE; member in class:art::gc::accounting::CardTable
/art/runtime/arch/
H A Dcode_offset.h32 ALWAYS_INLINE static CodeOffset FromOffset(uint32_t offset, InstructionSet isa = kRuntimeISA) {
36 ALWAYS_INLINE static CodeOffset FromCompressedOffset(uint32_t offset) {
40 ALWAYS_INLINE uint32_t Uint32Value(InstructionSet isa = kRuntimeISA) const {
47 ALWAYS_INLINE uint32_t CompressedValue() const {
51 ALWAYS_INLINE CodeOffset() = default;
52 ALWAYS_INLINE CodeOffset(const CodeOffset&) = default;
53 ALWAYS_INLINE CodeOffset& operator=(const CodeOffset&) = default;
54 ALWAYS_INLINE CodeOffset& operator=(CodeOffset&&) = default;
57 ALWAYS_INLINE explicit CodeOffset(uint32_t value) : value_(value) {}
/art/runtime/
H A Dobj_ptr.h45 ALWAYS_INLINE ObjPtr() REQUIRES_SHARED(Locks::mutator_lock_) : reference_(0u) {}
52 ALWAYS_INLINE ObjPtr(std::nullptr_t) // NOLINT
58 ALWAYS_INLINE ObjPtr(Type* ptr) // NOLINT
65 ALWAYS_INLINE ObjPtr(const ObjPtr<Type>& other) // NOLINT
72 ALWAYS_INLINE ObjPtr& operator=(const ObjPtr<Type>& other)
78 ALWAYS_INLINE ObjPtr& operator=(MirrorType* ptr) REQUIRES_SHARED(Locks::mutator_lock_) {
83 ALWAYS_INLINE void Assign(MirrorType* ptr) REQUIRES_SHARED(Locks::mutator_lock_) {
87 ALWAYS_INLINE MirrorType* operator->() const REQUIRES_SHARED(Locks::mutator_lock_) {
91 ALWAYS_INLINE bool IsNull() const {
96 ALWAYS_INLINE MirrorTyp
[all...]
H A Dhandle.h45 ALWAYS_INLINE Handle(const Handle<T>& handle) = default;
47 ALWAYS_INLINE Handle<T>& operator=(const Handle<T>& handle) = default;
49 ALWAYS_INLINE explicit Handle(StackReference<T>* reference) : reference_(reference) {
52 ALWAYS_INLINE T& operator*() const REQUIRES_SHARED(Locks::mutator_lock_) {
56 ALWAYS_INLINE T* operator->() const REQUIRES_SHARED(Locks::mutator_lock_) {
60 ALWAYS_INLINE T* Get() const REQUIRES_SHARED(Locks::mutator_lock_) {
64 ALWAYS_INLINE bool IsNull() const REQUIRES_SHARED(Locks::mutator_lock_) {
68 ALWAYS_INLINE jobject ToJObject() const REQUIRES_SHARED(Locks::mutator_lock_) {
76 ALWAYS_INLINE StackReference<mirror::Object>* GetReference() {
80 ALWAYS_INLINE cons
[all...]
H A Djni_internal.h46 ALWAYS_INLINE
51 ALWAYS_INLINE
56 ALWAYS_INLINE
61 ALWAYS_INLINE
H A Dscoped_thread_state_change.h44 ALWAYS_INLINE ScopedThreadStateChange(Thread* self, ThreadState new_thread_state)
47 ALWAYS_INLINE ~ScopedThreadStateChange() REQUIRES(!Locks::thread_suspend_count_lock_);
49 ALWAYS_INLINE Thread* Self() const {
101 ALWAYS_INLINE bool IsRunnable() const;
104 ALWAYS_INLINE explicit ScopedObjectAccessAlreadyRunnable(JNIEnv* env)
107 ALWAYS_INLINE explicit ScopedObjectAccessAlreadyRunnable(Thread* self)
118 ALWAYS_INLINE ~ScopedObjectAccessAlreadyRunnable() {}
147 ALWAYS_INLINE explicit ScopedObjectAccessUnchecked(JNIEnv* env)
150 ALWAYS_INLINE explicit ScopedObjectAccessUnchecked(Thread* self)
153 ALWAYS_INLINE ~ScopedObjectAccessUnchecke
180 ~ScopedObjectAccess() UNLOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE; member in class:art::ScopedObjectAccess
[all...]
H A Dbit_memory_region.h29 ALWAYS_INLINE BitMemoryRegion(MemoryRegion region, size_t bit_offset, size_t bit_size) {
43 ALWAYS_INLINE BitMemoryRegion Subregion(size_t bit_offset, size_t bit_size) const {
49 ALWAYS_INLINE bool LoadBit(uintptr_t bit_offset) const {
53 ALWAYS_INLINE void StoreBit(uintptr_t bit_offset, bool value) const {
57 ALWAYS_INLINE uint32_t LoadBits(uintptr_t bit_offset, size_t length) const {
62 ALWAYS_INLINE void StoreBits(uintptr_t bit_offset, uint32_t value, size_t length) {
H A Dhandle_scope.h49 ALWAYS_INLINE uint32_t NumberOfReferences() const;
51 ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const;
54 ALWAYS_INLINE void VisitRoots(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_);
61 ALWAYS_INLINE VariableSizedHandleScope* AsVariableSized();
62 ALWAYS_INLINE HandleScope* AsHandleScope();
63 ALWAYS_INLINE const VariableSizedHandleScope* AsVariableSized() const;
64 ALWAYS_INLINE const HandleScope* AsHandleScope() const;
105 ALWAYS_INLINE mirror::Object* GetReference(size_t i) const
108 ALWAYS_INLINE Handle<mirror::Object> GetHandle(size_t i);
110 ALWAYS_INLINE MutableHandl
[all...]
H A Dread_barrier.h49 ALWAYS_INLINE static MirrorType* Barrier(
56 ALWAYS_INLINE static MirrorType* BarrierForRoot(MirrorType** root,
63 ALWAYS_INLINE static MirrorType* BarrierForRoot(mirror::CompressedReference<MirrorType>* root,
69 ALWAYS_INLINE static MirrorType* IsMarked(MirrorType* ref)
87 // ALWAYS_INLINE on this caused a performance regression b/26744236.
101 ALWAYS_INLINE static bool IsGray(mirror::Object* obj, uintptr_t* fake_address_dependency)
106 ALWAYS_INLINE static bool IsGray(mirror::Object* obj)
H A Dmethod_handles.h61 ALWAYS_INLINE bool ConvertArgumentValue(Handle<mirror::MethodType> callsite_type,
72 ALWAYS_INLINE bool ConvertReturnValue(Handle<mirror::MethodType> callsite_type,
148 ALWAYS_INLINE uint32_t Get() REQUIRES_SHARED(Locks::mutator_lock_) {
155 ALWAYS_INLINE int64_t GetLong() REQUIRES_SHARED(Locks::mutator_lock_) {
162 ALWAYS_INLINE ObjPtr<mirror::Object> GetReference() REQUIRES_SHARED(Locks::mutator_lock_) {
186 ALWAYS_INLINE void Set(uint32_t value) REQUIRES_SHARED(Locks::mutator_lock_) {
190 ALWAYS_INLINE void SetReference(ObjPtr<mirror::Object> value)
195 ALWAYS_INLINE void SetLong(int64_t value) REQUIRES_SHARED(Locks::mutator_lock_) {
H A Dverify_object.h60 ALWAYS_INLINE
68 ALWAYS_INLINE bool VerifyClassClass(ObjPtr<mirror::Class> c) NO_THREAD_SAFETY_ANALYSIS;
H A Dmemory_region.h61 ALWAYS_INLINE T Load(uintptr_t offset) const {
71 ALWAYS_INLINE void Store(uintptr_t offset, T value) const {
80 ALWAYS_INLINE T LoadUnaligned(uintptr_t offset) const {
95 ALWAYS_INLINE void StoreUnaligned(uintptr_t offset, T value) const {
107 ALWAYS_INLINE T* PointerTo(uintptr_t offset) const {
113 ALWAYS_INLINE bool LoadBit(uintptr_t bit_offset) const {
119 ALWAYS_INLINE void StoreBit(uintptr_t bit_offset, bool value) const {
133 ALWAYS_INLINE uint32_t LoadBits(uintptr_t bit_offset, size_t length) const {
182 ALWAYS_INLINE MemoryRegion Subregion(uintptr_t offset, uintptr_t size_in) const {
189 ALWAYS_INLINE voi
[all...]
H A Dutf.h55 ALWAYS_INLINE int CompareModifiedUtf8ToModifiedUtf8AsUtf16CodePointValues(const char* utf8_1,
116 ALWAYS_INLINE uint16_t GetLeadingUtf16Char(uint32_t maybe_pair);
122 ALWAYS_INLINE uint16_t GetTrailingUtf16Char(uint32_t maybe_pair);
H A Dart_method.h74 ALWAYS_INLINE mirror::Class* GetDeclaringClass() REQUIRES_SHARED(Locks::mutator_lock_);
77 ALWAYS_INLINE mirror::Class* GetDeclaringClassUnchecked()
183 ALWAYS_INLINE void SetIntrinsic(uint32_t intrinsic) REQUIRES_SHARED(Locks::mutator_lock_);
340 ALWAYS_INLINE uint32_t GetDexMethodIndexUnchecked() {
343 ALWAYS_INLINE uint32_t GetDexMethodIndex() REQUIRES_SHARED(Locks::mutator_lock_);
350 ALWAYS_INLINE ArtMethod** GetDexCacheResolvedMethods(PointerSize pointer_size)
352 ALWAYS_INLINE ArtMethod* GetDexCacheResolvedMethod(uint16_t method_index,
356 ALWAYS_INLINE void SetDexCacheResolvedMethod(uint16_t method_index,
360 ALWAYS_INLINE void SetDexCacheResolvedMethods(ArtMethod** new_dex_cache_methods,
394 ALWAYS_INLINE cons
[all...]
/art/runtime/base/
H A Dmacros.h42 NO_RETURN ALWAYS_INLINE void operator delete(void*, size_t) { UNREACHABLE(); } \
43 ALWAYS_INLINE void* operator new(size_t, void* ptr) noexcept { return ptr; } \
44 ALWAYS_INLINE void operator delete(void*, void*) noexcept { } \
63 #define ALWAYS_INLINE macro
65 #define ALWAYS_INLINE __attribute__ ((always_inline)) macro
69 // #define ALWAYS_INLINE_LAMBDA ALWAYS_INLINE
H A Darena_object.h45 ALWAYS_INLINE void* operator new(size_t, void* ptr) noexcept { return ptr; }
46 ALWAYS_INLINE void operator delete(void*, void*) noexcept { }
/art/runtime/verifier/
H A Dverifier_compiler_binding.h28 ALWAYS_INLINE
/art/compiler/debug/dwarf/
H A Ddebug_frame_opcode_writer.h47 void ALWAYS_INLINE AdvancePC(int absolute_pc) {
73 void ALWAYS_INLINE RelOffset(Reg reg, int offset) {
78 void ALWAYS_INLINE AdjustCFAOffset(int delta) {
83 void ALWAYS_INLINE RelOffsetForMany(Reg reg_base, int offset,
99 void ALWAYS_INLINE RestoreMany(Reg reg_base, uint32_t reg_mask) {
111 void ALWAYS_INLINE Nop() {
117 void ALWAYS_INLINE Offset(Reg reg, int offset) {
139 void ALWAYS_INLINE Restore(Reg reg) {
151 void ALWAYS_INLINE Undefined(Reg reg) {
159 void ALWAYS_INLINE SameValu
[all...]
/art/runtime/mirror/
H A Dobject_array.h47 ALWAYS_INLINE T* Get(int32_t i) REQUIRES_SHARED(Locks::mutator_lock_);
55 ALWAYS_INLINE void Set(int32_t i, ObjPtr<T> object) REQUIRES_SHARED(Locks::mutator_lock_);
59 ALWAYS_INLINE void Set(int32_t i, ObjPtr<T> object) NO_THREAD_SAFETY_ANALYSIS;
67 ALWAYS_INLINE void SetWithoutChecks(int32_t i, ObjPtr<T> object) NO_THREAD_SAFETY_ANALYSIS;
72 ALWAYS_INLINE void SetWithoutChecksAndWriteBarrier(int32_t i, ObjPtr<T> object)
77 ALWAYS_INLINE T* GetWithoutChecks(int32_t i) REQUIRES_SHARED(Locks::mutator_lock_);
H A Dobject.h93 ALWAYS_INLINE Class* GetClass() REQUIRES_SHARED(Locks::mutator_lock_);
100 ALWAYS_INLINE uint32_t GetReadBarrierState(uintptr_t* fake_address_dependency)
103 ALWAYS_INLINE uint32_t GetReadBarrierState() REQUIRES_SHARED(Locks::mutator_lock_);
105 ALWAYS_INLINE uint32_t GetReadBarrierStateAcquire() REQUIRES_SHARED(Locks::mutator_lock_);
110 ALWAYS_INLINE void SetReadBarrierState(uint32_t rb_state) REQUIRES_SHARED(Locks::mutator_lock_);
113 ALWAYS_INLINE bool AtomicSetReadBarrierState(uint32_t expected_rb_state, uint32_t rb_state)
116 ALWAYS_INLINE uint32_t GetMarkBit() REQUIRES_SHARED(Locks::mutator_lock_);
118 ALWAYS_INLINE bool AtomicSetMarkBit(uint32_t expected_mark_bit, uint32_t mark_bit)
122 ALWAYS_INLINE void AssertReadBarrierState() const REQUIRES_SHARED(Locks::mutator_lock_);
129 ALWAYS_INLINE boo
[all...]
H A Darray.h42 ALWAYS_INLINE static Array* Alloc(Thread* self,
60 ALWAYS_INLINE int32_t GetLength() REQUIRES_SHARED(Locks::mutator_lock_) {
93 ALWAYS_INLINE bool CheckIsValidIndex(int32_t index) REQUIRES_SHARED(Locks::mutator_lock_);
126 const T* GetData() const ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
130 T* GetData() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
134 T Get(int32_t i) ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_);
136 T GetWithoutChecks(int32_t i) ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
141 void Set(int32_t i, T value) ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_);
146 void Set(int32_t i, T value) ALWAYS_INLINE NO_THREAD_SAFETY_ANALYSIS;
153 void SetWithoutChecks(int32_t i, T value) ALWAYS_INLINE NO_THREAD_SAFETY_ANALYSI
[all...]
H A Dclass.h234 ALWAYS_INLINE uint32_t GetAccessFlags() REQUIRES_SHARED(Locks::mutator_lock_) {
246 ALWAYS_INLINE uint32_t GetClassFlags() REQUIRES_SHARED(Locks::mutator_lock_) {
254 ALWAYS_INLINE bool IsInterface() REQUIRES_SHARED(Locks::mutator_lock_) {
259 ALWAYS_INLINE bool IsPublic() REQUIRES_SHARED(Locks::mutator_lock_) {
264 ALWAYS_INLINE bool IsFinal() REQUIRES_SHARED(Locks::mutator_lock_) {
268 ALWAYS_INLINE bool IsFinalizable() REQUIRES_SHARED(Locks::mutator_lock_) {
272 ALWAYS_INLINE void SetRecursivelyInitialized() REQUIRES_SHARED(Locks::mutator_lock_) {
278 ALWAYS_INLINE void SetHasDefaultMethods() REQUIRES_SHARED(Locks::mutator_lock_) {
284 ALWAYS_INLINE void SetFinalizable() REQUIRES_SHARED(Locks::mutator_lock_) {
289 ALWAYS_INLINE boo
908 REQUIRES_SHARED(Locks::mutator_lock_) ALWAYS_INLINE; member in class:art::mirror::FINAL
[all...]

Completed in 253 milliseconds

1234