1/*===---- arm_acle.h - ARM Non-Neon intrinsics -----------------------------===
2 *
3 * Permission is hereby granted, free of charge, to any person obtaining a copy
4 * of this software and associated documentation files (the "Software"), to deal
5 * in the Software without restriction, including without limitation the rights
6 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 * copies of the Software, and to permit persons to whom the Software is
8 * furnished to do so, subject to the following conditions:
9 *
10 * The above copyright notice and this permission notice shall be included in
11 * all copies or substantial portions of the Software.
12 *
13 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19 * THE SOFTWARE.
20 *
21 *===-----------------------------------------------------------------------===
22 */
23
24#ifndef __ARM_ACLE_H
25#define __ARM_ACLE_H
26
27#ifndef __ARM_ACLE
28#error "ACLE intrinsics support not enabled."
29#endif
30
31#include <stdint.h>
32
33#if defined(__cplusplus)
34extern "C" {
35#endif
36
37/* 8 SYNCHRONIZATION, BARRIER AND HINT INTRINSICS */
38/* 8.3 Memory barriers */
39#if !defined(_MSC_VER)
40#define __dmb(i) __builtin_arm_dmb(i)
41#define __dsb(i) __builtin_arm_dsb(i)
42#define __isb(i) __builtin_arm_isb(i)
43#endif
44
45/* 8.4 Hints */
46
47#if !defined(_MSC_VER)
48static __inline__ void __attribute__((__always_inline__, __nodebug__)) __wfi(void) {
49  __builtin_arm_wfi();
50}
51
52static __inline__ void __attribute__((__always_inline__, __nodebug__)) __wfe(void) {
53  __builtin_arm_wfe();
54}
55
56static __inline__ void __attribute__((__always_inline__, __nodebug__)) __sev(void) {
57  __builtin_arm_sev();
58}
59
60static __inline__ void __attribute__((__always_inline__, __nodebug__)) __sevl(void) {
61  __builtin_arm_sevl();
62}
63
64static __inline__ void __attribute__((__always_inline__, __nodebug__)) __yield(void) {
65  __builtin_arm_yield();
66}
67#endif
68
69#if __ARM_32BIT_STATE
70#define __dbg(t) __builtin_arm_dbg(t)
71#endif
72
73/* 8.5 Swap */
74static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
75  __swp(uint32_t x, volatile uint32_t *p) {
76  uint32_t v;
77  do v = __builtin_arm_ldrex(p); while (__builtin_arm_strex(x, p));
78  return v;
79}
80
81/* 8.6 Memory prefetch intrinsics */
82/* 8.6.1 Data prefetch */
83#define __pld(addr) __pldx(0, 0, 0, addr)
84
85#if __ARM_32BIT_STATE
86#define __pldx(access_kind, cache_level, retention_policy, addr) \
87  __builtin_arm_prefetch(addr, access_kind, 1)
88#else
89#define __pldx(access_kind, cache_level, retention_policy, addr) \
90  __builtin_arm_prefetch(addr, access_kind, cache_level, retention_policy, 1)
91#endif
92
93/* 8.6.2 Instruction prefetch */
94#define __pli(addr) __plix(0, 0, addr)
95
96#if __ARM_32BIT_STATE
97#define __plix(cache_level, retention_policy, addr) \
98  __builtin_arm_prefetch(addr, 0, 0)
99#else
100#define __plix(cache_level, retention_policy, addr) \
101  __builtin_arm_prefetch(addr, 0, cache_level, retention_policy, 0)
102#endif
103
104/* 8.7 NOP */
105static __inline__ void __attribute__((__always_inline__, __nodebug__)) __nop(void) {
106  __builtin_arm_nop();
107}
108
109/* 9 DATA-PROCESSING INTRINSICS */
110/* 9.2 Miscellaneous data-processing intrinsics */
111/* ROR */
112static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
113  __ror(uint32_t x, uint32_t y) {
114  y %= 32;
115  if (y == 0)  return x;
116  return (x >> y) | (x << (32 - y));
117}
118
119static __inline__ uint64_t __attribute__((__always_inline__, __nodebug__))
120  __rorll(uint64_t x, uint32_t y) {
121  y %= 64;
122  if (y == 0)  return x;
123  return (x >> y) | (x << (64 - y));
124}
125
126static __inline__ unsigned long __attribute__((__always_inline__, __nodebug__))
127  __rorl(unsigned long x, uint32_t y) {
128#if __SIZEOF_LONG__ == 4
129  return __ror(x, y);
130#else
131  return __rorll(x, y);
132#endif
133}
134
135
136/* CLZ */
137static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
138  __clz(uint32_t t) {
139  return __builtin_clz(t);
140}
141
142static __inline__ unsigned long __attribute__((__always_inline__, __nodebug__))
143  __clzl(unsigned long t) {
144  return __builtin_clzl(t);
145}
146
147static __inline__ uint64_t __attribute__((__always_inline__, __nodebug__))
148  __clzll(uint64_t t) {
149  return __builtin_clzll(t);
150}
151
152/* REV */
153static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
154  __rev(uint32_t t) {
155  return __builtin_bswap32(t);
156}
157
158static __inline__ unsigned long __attribute__((__always_inline__, __nodebug__))
159  __revl(unsigned long t) {
160#if __SIZEOF_LONG__ == 4
161  return __builtin_bswap32(t);
162#else
163  return __builtin_bswap64(t);
164#endif
165}
166
167static __inline__ uint64_t __attribute__((__always_inline__, __nodebug__))
168  __revll(uint64_t t) {
169  return __builtin_bswap64(t);
170}
171
172/* REV16 */
173static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
174  __rev16(uint32_t t) {
175  return __ror(__rev(t), 16);
176}
177
178static __inline__ uint64_t __attribute__((__always_inline__, __nodebug__))
179  __rev16ll(uint64_t t) {
180  return (((uint64_t)__rev16(t >> 32)) << 32) | __rev16(t);
181}
182
183static __inline__ unsigned long __attribute__((__always_inline__, __nodebug__))
184  __rev16l(unsigned long t) {
185#if __SIZEOF_LONG__ == 4
186    return __rev16(t);
187#else
188    return __rev16ll(t);
189#endif
190}
191
192/* REVSH */
193static __inline__ int16_t __attribute__((__always_inline__, __nodebug__))
194  __revsh(int16_t t) {
195  return __builtin_bswap16(t);
196}
197
198/* RBIT */
199static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
200  __rbit(uint32_t t) {
201  return __builtin_arm_rbit(t);
202}
203
204static __inline__ uint64_t __attribute__((__always_inline__, __nodebug__))
205  __rbitll(uint64_t t) {
206#if __ARM_32BIT_STATE
207  return (((uint64_t) __builtin_arm_rbit(t)) << 32) |
208    __builtin_arm_rbit(t >> 32);
209#else
210  return __builtin_arm_rbit64(t);
211#endif
212}
213
214static __inline__ unsigned long __attribute__((__always_inline__, __nodebug__))
215  __rbitl(unsigned long t) {
216#if __SIZEOF_LONG__ == 4
217  return __rbit(t);
218#else
219  return __rbitll(t);
220#endif
221}
222
223/*
224 * 9.4 Saturating intrinsics
225 *
226 * FIXME: Change guard to their corrosponding __ARM_FEATURE flag when Q flag
227 * intrinsics are implemented and the flag is enabled.
228 */
229/* 9.4.1 Width-specified saturation intrinsics */
230#if __ARM_32BIT_STATE
231#define __ssat(x, y) __builtin_arm_ssat(x, y)
232#define __usat(x, y) __builtin_arm_usat(x, y)
233#endif
234
235/* 9.4.2 Saturating addition and subtraction intrinsics */
236#if __ARM_32BIT_STATE
237static __inline__ int32_t __attribute__((__always_inline__, __nodebug__))
238  __qadd(int32_t t, int32_t v) {
239  return __builtin_arm_qadd(t, v);
240}
241
242static __inline__ int32_t __attribute__((__always_inline__, __nodebug__))
243  __qsub(int32_t t, int32_t v) {
244  return __builtin_arm_qsub(t, v);
245}
246
247static __inline__ int32_t __attribute__((__always_inline__, __nodebug__))
248__qdbl(int32_t t) {
249  return __builtin_arm_qadd(t, t);
250}
251#endif
252
253/* 9.7 CRC32 intrinsics */
254#if __ARM_FEATURE_CRC32
255static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
256  __crc32b(uint32_t a, uint8_t b) {
257  return __builtin_arm_crc32b(a, b);
258}
259
260static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
261  __crc32h(uint32_t a, uint16_t b) {
262  return __builtin_arm_crc32h(a, b);
263}
264
265static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
266  __crc32w(uint32_t a, uint32_t b) {
267  return __builtin_arm_crc32w(a, b);
268}
269
270static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
271  __crc32d(uint32_t a, uint64_t b) {
272  return __builtin_arm_crc32d(a, b);
273}
274
275static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
276  __crc32cb(uint32_t a, uint8_t b) {
277  return __builtin_arm_crc32cb(a, b);
278}
279
280static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
281  __crc32ch(uint32_t a, uint16_t b) {
282  return __builtin_arm_crc32ch(a, b);
283}
284
285static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
286  __crc32cw(uint32_t a, uint32_t b) {
287  return __builtin_arm_crc32cw(a, b);
288}
289
290static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
291  __crc32cd(uint32_t a, uint64_t b) {
292  return __builtin_arm_crc32cd(a, b);
293}
294#endif
295
296/* 10.1 Special register intrinsics */
297#define __arm_rsr(sysreg) __builtin_arm_rsr(sysreg)
298#define __arm_rsr64(sysreg) __builtin_arm_rsr64(sysreg)
299#define __arm_rsrp(sysreg) __builtin_arm_rsrp(sysreg)
300#define __arm_wsr(sysreg, v) __builtin_arm_wsr(sysreg, v)
301#define __arm_wsr64(sysreg, v) __builtin_arm_wsr64(sysreg, v)
302#define __arm_wsrp(sysreg, v) __builtin_arm_wsrp(sysreg, v)
303
304#if defined(__cplusplus)
305}
306#endif
307
308#endif /* __ARM_ACLE_H */
309