1#ifndef __ASM_SH_ATOMIC_IRQ_H
2#define __ASM_SH_ATOMIC_IRQ_H
3
4#include <linux/irqflags.h>
5
6/*
7 * To get proper branch prediction for the main line, we must branch
8 * forward to code at the end of this object's .text section, then
9 * branch back to restart the operation.
10 */
11
12#define ATOMIC_OP(op, c_op)						\
13static inline void atomic_##op(int i, atomic_t *v)			\
14{									\
15	unsigned long flags;						\
16									\
17	raw_local_irq_save(flags);					\
18	v->counter c_op i;						\
19	raw_local_irq_restore(flags);					\
20}
21
22#define ATOMIC_OP_RETURN(op, c_op)					\
23static inline int atomic_##op##_return(int i, atomic_t *v)		\
24{									\
25	unsigned long temp, flags;					\
26									\
27	raw_local_irq_save(flags);					\
28	temp = v->counter;						\
29	temp c_op i;							\
30	v->counter = temp;						\
31	raw_local_irq_restore(flags);					\
32									\
33	return temp;							\
34}
35
36#define ATOMIC_OPS(op, c_op) ATOMIC_OP(op, c_op) ATOMIC_OP_RETURN(op, c_op)
37
38ATOMIC_OPS(add, +=)
39ATOMIC_OPS(sub, -=)
40
41#undef ATOMIC_OPS
42#undef ATOMIC_OP_RETURN
43#undef ATOMIC_OP
44
45static inline void atomic_clear_mask(unsigned int mask, atomic_t *v)
46{
47	unsigned long flags;
48
49	raw_local_irq_save(flags);
50	v->counter &= ~mask;
51	raw_local_irq_restore(flags);
52}
53
54static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
55{
56	unsigned long flags;
57
58	raw_local_irq_save(flags);
59	v->counter |= mask;
60	raw_local_irq_restore(flags);
61}
62
63#endif /* __ASM_SH_ATOMIC_IRQ_H */
64