|
@@ -42,26 +42,7 @@
|
|
|
*/
|
|
|
#ifdef CONFIG_SMP
|
|
|
|
|
|
-/* This is used for other cpus to find our section. */
|
|
|
-extern unsigned long __per_cpu_offset[];
|
|
|
-
|
|
|
-#define per_cpu_offset(x) (__per_cpu_offset[x])
|
|
|
-
|
|
|
-#define DECLARE_PER_CPU(type, name) extern __typeof__(type) per_cpu__##name
|
|
|
-/* We can use this directly for local CPU (faster). */
|
|
|
-DECLARE_PER_CPU(unsigned long, this_cpu_off);
|
|
|
-
|
|
|
-/* var is in discarded region: offset to particular copy we want */
|
|
|
-#define per_cpu(var, cpu) (*({ \
|
|
|
- extern int simple_indentifier_##var(void); \
|
|
|
- RELOC_HIDE(&per_cpu__##var, __per_cpu_offset[cpu]); }))
|
|
|
-
|
|
|
-#define __raw_get_cpu_var(var) (*({ \
|
|
|
- extern int simple_indentifier_##var(void); \
|
|
|
- RELOC_HIDE(&per_cpu__##var, x86_read_percpu(this_cpu_off)); \
|
|
|
-}))
|
|
|
-
|
|
|
-#define __get_cpu_var(var) __raw_get_cpu_var(var)
|
|
|
+#define __my_cpu_offset x86_read_percpu(this_cpu_off)
|
|
|
|
|
|
/* A macro to avoid #include hell... */
|
|
|
#define percpu_modcopy(pcpudst, src, size) \
|
|
@@ -74,11 +55,18 @@ do { \
|
|
|
|
|
|
/* fs segment starts at (positive) offset == __per_cpu_offset[cpu] */
|
|
|
#define __percpu_seg "%%fs:"
|
|
|
+
|
|
|
#else /* !SMP */
|
|
|
-#include <asm-generic/percpu.h>
|
|
|
+
|
|
|
#define __percpu_seg ""
|
|
|
+
|
|
|
#endif /* SMP */
|
|
|
|
|
|
+#include <asm-generic/percpu.h>
|
|
|
+
|
|
|
+/* We can use this directly for local CPU (faster). */
|
|
|
+DECLARE_PER_CPU(unsigned long, this_cpu_off);
|
|
|
+
|
|
|
/* For arch-specific code, we can use direct single-insn ops (they
|
|
|
* don't give an lvalue though). */
|
|
|
extern void __bad_percpu_size(void);
|