1/*
2 * Copyright (C) 2004-2006 Atmel Corporation
3 *
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License version 2 as
6 * published by the Free Software Foundation.
7 */
8#ifndef __ASM_AVR32_BITOPS_H
9#define __ASM_AVR32_BITOPS_H
10
11#ifndef _LINUX_BITOPS_H
12#error only <linux/bitops.h> can be included directly
13#endif
14
15#include <asm/byteorder.h>
16
17/*
18 * clear_bit() doesn't provide any barrier for the compiler
19 */
20#define smp_mb__before_clear_bit()	barrier()
21#define smp_mb__after_clear_bit()	barrier()
22
23/*
24 * set_bit - Atomically set a bit in memory
25 * @nr: the bit to set
26 * @addr: the address to start counting from
27 *
28 * This function is atomic and may not be reordered.  See __set_bit()
29 * if you do not require the atomic guarantees.
30 *
31 * Note that @nr may be almost arbitrarily large; this function is not
32 * restricted to acting on a single-word quantity.
33 */
34static inline void set_bit(int nr, volatile void * addr)
35{
36	unsigned long *p = ((unsigned long *)addr) + nr / BITS_PER_LONG;
37	unsigned long tmp;
38
39	if (__builtin_constant_p(nr)) {
40		asm volatile(
41			"1:	ssrf	5\n"
42			"	ld.w	%0, %2\n"
43			"	sbr	%0, %3\n"
44			"	stcond	%1, %0\n"
45			"	brne	1b"
46			: "=&r"(tmp), "=o"(*p)
47			: "m"(*p), "i"(nr)
48			: "cc");
49	} else {
50		unsigned long mask = 1UL << (nr % BITS_PER_LONG);
51		asm volatile(
52			"1:	ssrf	5\n"
53			"	ld.w	%0, %2\n"
54			"	or	%0, %3\n"
55			"	stcond	%1, %0\n"
56			"	brne	1b"
57			: "=&r"(tmp), "=o"(*p)
58			: "m"(*p), "r"(mask)
59			: "cc");
60	}
61}
62
63/*
64 * clear_bit - Clears a bit in memory
65 * @nr: Bit to clear
66 * @addr: Address to start counting from
67 *
68 * clear_bit() is atomic and may not be reordered.  However, it does
69 * not contain a memory barrier, so if it is used for locking purposes,
70 * you should call smp_mb__before_clear_bit() and/or smp_mb__after_clear_bit()
71 * in order to ensure changes are visible on other processors.
72 */
73static inline void clear_bit(int nr, volatile void * addr)
74{
75	unsigned long *p = ((unsigned long *)addr) + nr / BITS_PER_LONG;
76	unsigned long tmp;
77
78	if (__builtin_constant_p(nr)) {
79		asm volatile(
80			"1:	ssrf	5\n"
81			"	ld.w	%0, %2\n"
82			"	cbr	%0, %3\n"
83			"	stcond	%1, %0\n"
84			"	brne	1b"
85			: "=&r"(tmp), "=o"(*p)
86			: "m"(*p), "i"(nr)
87			: "cc");
88	} else {
89		unsigned long mask = 1UL << (nr % BITS_PER_LONG);
90		asm volatile(
91			"1:	ssrf	5\n"
92			"	ld.w	%0, %2\n"
93			"	andn	%0, %3\n"
94			"	stcond	%1, %0\n"
95			"	brne	1b"
96			: "=&r"(tmp), "=o"(*p)
97			: "m"(*p), "r"(mask)
98			: "cc");
99	}
100}
101
102/*
103 * change_bit - Toggle a bit in memory
104 * @nr: Bit to change
105 * @addr: Address to start counting from
106 *
107 * change_bit() is atomic and may not be reordered.
108 * Note that @nr may be almost arbitrarily large; this function is not
109 * restricted to acting on a single-word quantity.
110 */
111static inline void change_bit(int nr, volatile void * addr)
112{
113	unsigned long *p = ((unsigned long *)addr) + nr / BITS_PER_LONG;
114	unsigned long mask = 1UL << (nr % BITS_PER_LONG);
115	unsigned long tmp;
116
117	asm volatile(
118		"1:	ssrf	5\n"
119		"	ld.w	%0, %2\n"
120		"	eor	%0, %3\n"
121		"	stcond	%1, %0\n"
122		"	brne	1b"
123		: "=&r"(tmp), "=o"(*p)
124		: "m"(*p), "r"(mask)
125		: "cc");
126}
127
128/*
129 * test_and_set_bit - Set a bit and return its old value
130 * @nr: Bit to set
131 * @addr: Address to count from
132 *
133 * This operation is atomic and cannot be reordered.
134 * It also implies a memory barrier.
135 */
136static inline int test_and_set_bit(int nr, volatile void * addr)
137{
138	unsigned long *p = ((unsigned long *)addr) + nr / BITS_PER_LONG;
139	unsigned long mask = 1UL << (nr % BITS_PER_LONG);
140	unsigned long tmp, old;
141
142	if (__builtin_constant_p(nr)) {
143		asm volatile(
144			"1:	ssrf	5\n"
145			"	ld.w	%0, %3\n"
146			"	mov	%2, %0\n"
147			"	sbr	%0, %4\n"
148			"	stcond	%1, %0\n"
149			"	brne	1b"
150			: "=&r"(tmp), "=o"(*p), "=&r"(old)
151			: "m"(*p), "i"(nr)
152			: "memory", "cc");
153	} else {
154		asm volatile(
155			"1:	ssrf	5\n"
156			"	ld.w	%2, %3\n"
157			"	or	%0, %2, %4\n"
158			"	stcond	%1, %0\n"
159			"	brne	1b"
160			: "=&r"(tmp), "=o"(*p), "=&r"(old)
161			: "m"(*p), "r"(mask)
162			: "memory", "cc");
163	}
164
165	return (old & mask) != 0;
166}
167
168/*
169 * test_and_clear_bit - Clear a bit and return its old value
170 * @nr: Bit to clear
171 * @addr: Address to count from
172 *
173 * This operation is atomic and cannot be reordered.
174 * It also implies a memory barrier.
175 */
176static inline int test_and_clear_bit(int nr, volatile void * addr)
177{
178	unsigned long *p = ((unsigned long *)addr) + nr / BITS_PER_LONG;
179	unsigned long mask = 1UL << (nr % BITS_PER_LONG);
180	unsigned long tmp, old;
181
182	if (__builtin_constant_p(nr)) {
183		asm volatile(
184			"1:	ssrf	5\n"
185			"	ld.w	%0, %3\n"
186			"	mov	%2, %0\n"
187			"	cbr	%0, %4\n"
188			"	stcond	%1, %0\n"
189			"	brne	1b"
190			: "=&r"(tmp), "=o"(*p), "=&r"(old)
191			: "m"(*p), "i"(nr)
192			: "memory", "cc");
193	} else {
194		asm volatile(
195			"1:	ssrf	5\n"
196			"	ld.w	%0, %3\n"
197			"	mov	%2, %0\n"
198			"	andn	%0, %4\n"
199			"	stcond	%1, %0\n"
200			"	brne	1b"
201			: "=&r"(tmp), "=o"(*p), "=&r"(old)
202			: "m"(*p), "r"(mask)
203			: "memory", "cc");
204	}
205
206	return (old & mask) != 0;
207}
208
209/*
210 * test_and_change_bit - Change a bit and return its old value
211 * @nr: Bit to change
212 * @addr: Address to count from
213 *
214 * This operation is atomic and cannot be reordered.
215 * It also implies a memory barrier.
216 */
217static inline int test_and_change_bit(int nr, volatile void * addr)
218{
219	unsigned long *p = ((unsigned long *)addr) + nr / BITS_PER_LONG;
220	unsigned long mask = 1UL << (nr % BITS_PER_LONG);
221	unsigned long tmp, old;
222
223	asm volatile(
224		"1:	ssrf	5\n"
225		"	ld.w	%2, %3\n"
226		"	eor	%0, %2, %4\n"
227		"	stcond	%1, %0\n"
228		"	brne	1b"
229		: "=&r"(tmp), "=o"(*p), "=&r"(old)
230		: "m"(*p), "r"(mask)
231		: "memory", "cc");
232
233	return (old & mask) != 0;
234}
235
236#include <asm-generic/bitops/non-atomic.h>
237
238/* Find First bit Set */
239static inline unsigned long __ffs(unsigned long word)
240{
241	unsigned long result;
242
243	asm("brev %1\n\t"
244	    "clz %0,%1"
245	    : "=r"(result), "=&r"(word)
246	    : "1"(word));
247	return result;
248}
249
250/* Find First Zero */
251static inline unsigned long ffz(unsigned long word)
252{
253	return __ffs(~word);
254}
255
256/* Find Last bit Set */
257static inline int fls(unsigned long word)
258{
259	unsigned long result;
260
261	asm("clz %0,%1" : "=r"(result) : "r"(word));
262	return 32 - result;
263}
264
265static inline int __fls(unsigned long word)
266{
267	return fls(word) - 1;
268}
269
270unsigned long find_first_zero_bit(const unsigned long *addr,
271				  unsigned long size);
272#define find_first_zero_bit find_first_zero_bit
273
274unsigned long find_next_zero_bit(const unsigned long *addr,
275				 unsigned long size,
276				 unsigned long offset);
277#define find_next_zero_bit find_next_zero_bit
278
279unsigned long find_first_bit(const unsigned long *addr,
280			     unsigned long size);
281#define find_first_bit find_first_bit
282
283unsigned long find_next_bit(const unsigned long *addr,
284				 unsigned long size,
285				 unsigned long offset);
286#define find_next_bit find_next_bit
287
288/*
289 * ffs: find first bit set. This is defined the same way as
290 * the libc and compiler builtin ffs routines, therefore
291 * differs in spirit from the above ffz (man ffs).
292 *
293 * The difference is that bit numbering starts at 1, and if no bit is set,
294 * the function returns 0.
295 */
296static inline int ffs(unsigned long word)
297{
298	if(word == 0)
299		return 0;
300	return __ffs(word) + 1;
301}
302
303#include <asm-generic/bitops/fls64.h>
304#include <asm-generic/bitops/sched.h>
305#include <asm-generic/bitops/hweight.h>
306#include <asm-generic/bitops/lock.h>
307
308extern unsigned long find_next_zero_bit_le(const void *addr,
309		unsigned long size, unsigned long offset);
310#define find_next_zero_bit_le find_next_zero_bit_le
311
312extern unsigned long find_next_bit_le(const void *addr,
313		unsigned long size, unsigned long offset);
314#define find_next_bit_le find_next_bit_le
315
316#include <asm-generic/bitops/le.h>
317#include <asm-generic/bitops/ext2-atomic.h>
318
319#endif /* __ASM_AVR32_BITOPS_H */
320