cache.h revision 96f1050d3df105c9ae6c6ac224f370199ea82fcd
1/*
2 * Copyright 2004-2009 Analog Devices Inc.
3 *
4 * Licensed under the GPL-2 or later.
5 */
6
7#ifndef __ARCH_BLACKFIN_CACHE_H
8#define __ARCH_BLACKFIN_CACHE_H
9
10/*
11 * Bytes per L1 cache line
12 * Blackfin loads 32 bytes for cache
13 */
14#define L1_CACHE_SHIFT	5
15#define L1_CACHE_BYTES	(1 << L1_CACHE_SHIFT)
16#define SMP_CACHE_BYTES	L1_CACHE_BYTES
17
18#ifdef CONFIG_SMP
19#define __cacheline_aligned
20#else
21#define ____cacheline_aligned
22
23/*
24 * Put cacheline_aliged data to L1 data memory
25 */
26#ifdef CONFIG_CACHELINE_ALIGNED_L1
27#define __cacheline_aligned				\
28	  __attribute__((__aligned__(L1_CACHE_BYTES),	\
29		__section__(".data_l1.cacheline_aligned")))
30#endif
31
32#endif
33
34/*
35 * largest L1 which this arch supports
36 */
37#define L1_CACHE_SHIFT_MAX	5
38
39#if defined(CONFIG_SMP) && \
40    !defined(CONFIG_BFIN_CACHE_COHERENT)
41# if defined(CONFIG_BFIN_ICACHEABLE) || defined(CONFIG_BFIN_L2_ICACHEABLE)
42# define __ARCH_SYNC_CORE_ICACHE
43# endif
44# if defined(CONFIG_BFIN_DCACHEABLE) || defined(CONFIG_BFIN_L2_DCACHEABLE)
45# define __ARCH_SYNC_CORE_DCACHE
46# endif
47#ifndef __ASSEMBLY__
48asmlinkage void __raw_smp_mark_barrier_asm(void);
49asmlinkage void __raw_smp_check_barrier_asm(void);
50
51static inline void smp_mark_barrier(void)
52{
53	__raw_smp_mark_barrier_asm();
54}
55static inline void smp_check_barrier(void)
56{
57	__raw_smp_check_barrier_asm();
58}
59
60void resync_core_dcache(void);
61void resync_core_icache(void);
62#endif
63#endif
64
65
66#endif
67