percpu.h revision 9939ddaff52787b2a7c1adf1b2afc95421aa0884
1#ifndef _ASM_X86_PERCPU_H 2#define _ASM_X86_PERCPU_H 3 4#ifdef CONFIG_X86_64 5#define __percpu_seg gs 6#define __percpu_mov_op movq 7#else 8#define __percpu_seg fs 9#define __percpu_mov_op movl 10#endif 11 12#ifdef __ASSEMBLY__ 13 14/* 15 * PER_CPU finds an address of a per-cpu variable. 16 * 17 * Args: 18 * var - variable name 19 * reg - 32bit register 20 * 21 * The resulting address is stored in the "reg" argument. 22 * 23 * Example: 24 * PER_CPU(cpu_gdt_descr, %ebx) 25 */ 26#ifdef CONFIG_SMP 27#define PER_CPU(var, reg) \ 28 __percpu_mov_op %__percpu_seg:per_cpu__this_cpu_off, reg; \ 29 lea per_cpu__##var(reg), reg 30#define PER_CPU_VAR(var) %__percpu_seg:per_cpu__##var 31#else /* ! SMP */ 32#define PER_CPU(var, reg) \ 33 __percpu_mov_op $per_cpu__##var, reg 34#define PER_CPU_VAR(var) per_cpu__##var 35#endif /* SMP */ 36 37#else /* ...!ASSEMBLY */ 38 39#include <linux/stringify.h> 40 41#ifdef CONFIG_SMP 42#define __percpu_seg_str "%%"__stringify(__percpu_seg)":" 43#define __my_cpu_offset x86_read_percpu(this_cpu_off) 44#else 45#define __percpu_seg_str 46#endif 47 48#include <asm-generic/percpu.h> 49 50/* We can use this directly for local CPU (faster). */ 51DECLARE_PER_CPU(unsigned long, this_cpu_off); 52 53/* For arch-specific code, we can use direct single-insn ops (they 54 * don't give an lvalue though). */ 55extern void __bad_percpu_size(void); 56 57#define percpu_to_op(op, var, val) \ 58do { \ 59 typedef typeof(var) T__; \ 60 if (0) { \ 61 T__ tmp__; \ 62 tmp__ = (val); \ 63 } \ 64 switch (sizeof(var)) { \ 65 case 1: \ 66 asm(op "b %1,"__percpu_seg_str"%0" \ 67 : "+m" (var) \ 68 : "ri" ((T__)val)); \ 69 break; \ 70 case 2: \ 71 asm(op "w %1,"__percpu_seg_str"%0" \ 72 : "+m" (var) \ 73 : "ri" ((T__)val)); \ 74 break; \ 75 case 4: \ 76 asm(op "l %1,"__percpu_seg_str"%0" \ 77 : "+m" (var) \ 78 : "ri" ((T__)val)); \ 79 break; \ 80 case 8: \ 81 asm(op "q %1,"__percpu_seg_str"%0" \ 82 : "+m" (var) \ 83 : "r" ((T__)val)); \ 84 break; \ 85 default: __bad_percpu_size(); \ 86 } \ 87} while (0) 88 89#define percpu_from_op(op, var) \ 90({ \ 91 typeof(var) ret__; \ 92 switch (sizeof(var)) { \ 93 case 1: \ 94 asm(op "b "__percpu_seg_str"%1,%0" \ 95 : "=r" (ret__) \ 96 : "m" (var)); \ 97 break; \ 98 case 2: \ 99 asm(op "w "__percpu_seg_str"%1,%0" \ 100 : "=r" (ret__) \ 101 : "m" (var)); \ 102 break; \ 103 case 4: \ 104 asm(op "l "__percpu_seg_str"%1,%0" \ 105 : "=r" (ret__) \ 106 : "m" (var)); \ 107 break; \ 108 case 8: \ 109 asm(op "q "__percpu_seg_str"%1,%0" \ 110 : "=r" (ret__) \ 111 : "m" (var)); \ 112 break; \ 113 default: __bad_percpu_size(); \ 114 } \ 115 ret__; \ 116}) 117 118#define x86_read_percpu(var) percpu_from_op("mov", per_cpu__##var) 119#define x86_write_percpu(var, val) percpu_to_op("mov", per_cpu__##var, val) 120#define x86_add_percpu(var, val) percpu_to_op("add", per_cpu__##var, val) 121#define x86_sub_percpu(var, val) percpu_to_op("sub", per_cpu__##var, val) 122#define x86_or_percpu(var, val) percpu_to_op("or", per_cpu__##var, val) 123 124#ifdef CONFIG_X86_64 125extern void load_pda_offset(int cpu); 126#else 127static inline void load_pda_offset(int cpu) { } 128#endif 129 130#endif /* !__ASSEMBLY__ */ 131 132#ifdef CONFIG_SMP 133 134/* 135 * Define the "EARLY_PER_CPU" macros. These are used for some per_cpu 136 * variables that are initialized and accessed before there are per_cpu 137 * areas allocated. 138 */ 139 140#define DEFINE_EARLY_PER_CPU(_type, _name, _initvalue) \ 141 DEFINE_PER_CPU(_type, _name) = _initvalue; \ 142 __typeof__(_type) _name##_early_map[NR_CPUS] __initdata = \ 143 { [0 ... NR_CPUS-1] = _initvalue }; \ 144 __typeof__(_type) *_name##_early_ptr __refdata = _name##_early_map 145 146#define EXPORT_EARLY_PER_CPU_SYMBOL(_name) \ 147 EXPORT_PER_CPU_SYMBOL(_name) 148 149#define DECLARE_EARLY_PER_CPU(_type, _name) \ 150 DECLARE_PER_CPU(_type, _name); \ 151 extern __typeof__(_type) *_name##_early_ptr; \ 152 extern __typeof__(_type) _name##_early_map[] 153 154#define early_per_cpu_ptr(_name) (_name##_early_ptr) 155#define early_per_cpu_map(_name, _idx) (_name##_early_map[_idx]) 156#define early_per_cpu(_name, _cpu) \ 157 *(early_per_cpu_ptr(_name) ? \ 158 &early_per_cpu_ptr(_name)[_cpu] : \ 159 &per_cpu(_name, _cpu)) 160 161#else /* !CONFIG_SMP */ 162#define DEFINE_EARLY_PER_CPU(_type, _name, _initvalue) \ 163 DEFINE_PER_CPU(_type, _name) = _initvalue 164 165#define EXPORT_EARLY_PER_CPU_SYMBOL(_name) \ 166 EXPORT_PER_CPU_SYMBOL(_name) 167 168#define DECLARE_EARLY_PER_CPU(_type, _name) \ 169 DECLARE_PER_CPU(_type, _name) 170 171#define early_per_cpu(_name, _cpu) per_cpu(_name, _cpu) 172#define early_per_cpu_ptr(_name) NULL 173/* no early_per_cpu_map() */ 174 175#endif /* !CONFIG_SMP */ 176 177#endif /* _ASM_X86_PERCPU_H */ 178