1/* xchg and cmpxchg operation emulation for FR-V
2 *
3 * For an explanation of how atomic ops work in this arch, see:
4 *   Documentation/frv/atomic-ops.txt
5 *
6 * Copyright (C) 2004 Red Hat, Inc. All Rights Reserved.
7 * Written by David Howells (dhowells@redhat.com)
8 *
9 * This program is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU General Public License
11 * as published by the Free Software Foundation; either version
12 * 2 of the License, or (at your option) any later version.
13 */
14#ifndef _ASM_CMPXCHG_H
15#define _ASM_CMPXCHG_H
16
17#include <linux/types.h>
18
19/*****************************************************************************/
20/*
21 * exchange value with memory
22 */
23extern uint64_t __xchg_64(uint64_t i, volatile void *v);
24
25#ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS
26
27#define xchg(ptr, x)								\
28({										\
29	__typeof__(ptr) __xg_ptr = (ptr);					\
30	__typeof__(*(ptr)) __xg_orig;						\
31										\
32	switch (sizeof(__xg_orig)) {						\
33	case 4:									\
34		asm volatile(							\
35			"swap%I0 %M0,%1"					\
36			: "+m"(*__xg_ptr), "=r"(__xg_orig)			\
37			: "1"(x)						\
38			: "memory"						\
39			);							\
40		break;								\
41										\
42	default:								\
43		__xg_orig = (__typeof__(__xg_orig))0;				\
44		asm volatile("break");						\
45		break;								\
46	}									\
47										\
48	__xg_orig;								\
49})
50
51#else
52
53extern uint32_t __xchg_32(uint32_t i, volatile void *v);
54
55#define xchg(ptr, x)										\
56({												\
57	__typeof__(ptr) __xg_ptr = (ptr);							\
58	__typeof__(*(ptr)) __xg_orig;								\
59												\
60	switch (sizeof(__xg_orig)) {								\
61	case 4: __xg_orig = (__typeof__(*(ptr))) __xchg_32((uint32_t) x, __xg_ptr);	break;	\
62	default:										\
63		__xg_orig = (__typeof__(__xg_orig))0;									\
64		asm volatile("break");								\
65		break;										\
66	}											\
67	__xg_orig;										\
68})
69
70#endif
71
72#define tas(ptr) (xchg((ptr), 1))
73
74/*****************************************************************************/
75/*
76 * compare and conditionally exchange value with memory
77 * - if (*ptr == test) then orig = *ptr; *ptr = test;
78 * - if (*ptr != test) then orig = *ptr;
79 */
80extern uint64_t __cmpxchg_64(uint64_t test, uint64_t new, volatile uint64_t *v);
81
82#ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS
83
84#define cmpxchg(ptr, test, new)							\
85({										\
86	__typeof__(ptr) __xg_ptr = (ptr);					\
87	__typeof__(*(ptr)) __xg_orig, __xg_tmp;					\
88	__typeof__(*(ptr)) __xg_test = (test);					\
89	__typeof__(*(ptr)) __xg_new = (new);					\
90										\
91	switch (sizeof(__xg_orig)) {						\
92	case 4:									\
93		asm volatile(							\
94			"0:						\n"	\
95			"	orcc		gr0,gr0,gr0,icc3	\n"	\
96			"	ckeq		icc3,cc7		\n"	\
97			"	ld.p		%M0,%1			\n"	\
98			"	orcr		cc7,cc7,cc3		\n"	\
99			"	sub%I4cc	%1,%4,%2,icc0		\n"	\
100			"	bne		icc0,#0,1f		\n"	\
101			"	cst.p		%3,%M0		,cc3,#1	\n"	\
102			"	corcc		gr29,gr29,gr0	,cc3,#1	\n"	\
103			"	beq		icc3,#0,0b		\n"	\
104			"1:						\n"	\
105			: "+U"(*__xg_ptr), "=&r"(__xg_orig), "=&r"(__xg_tmp)	\
106			: "r"(__xg_new), "NPr"(__xg_test)			\
107			: "memory", "cc7", "cc3", "icc3", "icc0"		\
108			);							\
109		break;								\
110										\
111	default:								\
112		__xg_orig = (__typeof__(__xg_orig))0;				\
113		asm volatile("break");						\
114		break;								\
115	}									\
116										\
117	__xg_orig;								\
118})
119
120#else
121
122extern uint32_t __cmpxchg_32(uint32_t *v, uint32_t test, uint32_t new);
123
124#define cmpxchg(ptr, test, new)							\
125({										\
126	__typeof__(ptr) __xg_ptr = (ptr);					\
127	__typeof__(*(ptr)) __xg_orig;						\
128	__typeof__(*(ptr)) __xg_test = (test);					\
129	__typeof__(*(ptr)) __xg_new = (new);					\
130										\
131	switch (sizeof(__xg_orig)) {						\
132	case 4: __xg_orig = (__force __typeof__(*ptr))				\
133			__cmpxchg_32((__force uint32_t *)__xg_ptr,		\
134					 (__force uint32_t)__xg_test,		\
135					 (__force uint32_t)__xg_new); break;	\
136	default:								\
137		__xg_orig = (__typeof__(__xg_orig))0;				\
138		asm volatile("break");						\
139		break;								\
140	}									\
141										\
142	__xg_orig;								\
143})
144
145#endif
146
147#include <asm-generic/cmpxchg-local.h>
148
149static inline unsigned long __cmpxchg_local(volatile void *ptr,
150				      unsigned long old,
151				      unsigned long new, int size)
152{
153	switch (size) {
154	case 4:
155		return cmpxchg((unsigned long *)ptr, old, new);
156	default:
157		return __cmpxchg_local_generic(ptr, old, new, size);
158	}
159
160	return old;
161}
162
163/*
164 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
165 * them available.
166 */
167#define cmpxchg_local(ptr, o, n)				  	\
168	((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o),	\
169			(unsigned long)(n), sizeof(*(ptr))))
170#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
171
172#endif /* _ASM_CMPXCHG_H */
173