Lines Matching refs:kref

2  * kref.h - library routines for handling generic reference counted objects
24 struct kref {
30 * @kref: object in question.
32 static inline void kref_init(struct kref *kref)
34 atomic_set(&kref->refcount, 1);
39 * @kref: object.
41 static inline void kref_get(struct kref *kref)
44 * condition when this kref is freeing by some other thread right now.
47 WARN_ON_ONCE(atomic_inc_return(&kref->refcount) < 2);
52 * @kref: object.
58 * function, you will be publicly mocked mercilessly by the kref
64 * function returns 0, you still can not count on the kref from remaining in
65 * memory. Only use the return value if you want to see if the kref is now
68 static inline int kref_sub(struct kref *kref, unsigned int count,
69 void (*release)(struct kref *kref))
73 if (atomic_sub_and_test((int) count, &kref->refcount)) {
74 release(kref);
82 * @kref: object.
87 * function, you will be publicly mocked mercilessly by the kref
93 * function returns 0, you still can not count on the kref from remaining in
94 * memory. Only use the return value if you want to see if the kref is now
97 static inline int kref_put(struct kref *kref, void (*release)(struct kref *kref))
99 return kref_sub(kref, 1, release);
104 * @kref: object.
115 static inline int kref_put_spinlock_irqsave(struct kref *kref,
116 void (*release)(struct kref *kref),
122 if (atomic_add_unless(&kref->refcount, -1, 1))
125 if (atomic_dec_and_test(&kref->refcount)) {
126 release(kref);
134 static inline int kref_put_mutex(struct kref *kref,
135 void (*release)(struct kref *kref),
139 if (unlikely(!atomic_add_unless(&kref->refcount, -1, 1))) {
141 if (unlikely(!atomic_dec_and_test(&kref->refcount))) {
145 release(kref);
153 * @kref: object.
167 static inline int __must_check kref_get_unless_zero(struct kref *kref)
169 return atomic_add_unless(&kref->refcount, 1, 0);