|
@@ -20,6 +20,11 @@
|
|
#define DEFINE_PER_CPU(type, name) \
|
|
#define DEFINE_PER_CPU(type, name) \
|
|
__attribute__((__section__(".data.percpu"))) __typeof__(type) per_cpu__##name
|
|
__attribute__((__section__(".data.percpu"))) __typeof__(type) per_cpu__##name
|
|
|
|
|
|
|
|
+#define DEFINE_PER_CPU_SHARED_ALIGNED(type, name) \
|
|
|
|
+ __attribute__((__section__(".data.percpu.shared_aligned"))) \
|
|
|
|
+ __typeof__(type) per_cpu__##name \
|
|
|
|
+ ____cacheline_aligned_in_smp
|
|
|
|
+
|
|
/* var is in discarded region: offset to particular copy we want */
|
|
/* var is in discarded region: offset to particular copy we want */
|
|
#define per_cpu(var, cpu) (*RELOC_HIDE(&per_cpu__##var, __per_cpu_offset(cpu)))
|
|
#define per_cpu(var, cpu) (*RELOC_HIDE(&per_cpu__##var, __per_cpu_offset(cpu)))
|
|
#define __get_cpu_var(var) (*RELOC_HIDE(&per_cpu__##var, __my_cpu_offset()))
|
|
#define __get_cpu_var(var) (*RELOC_HIDE(&per_cpu__##var, __my_cpu_offset()))
|
|
@@ -40,6 +45,8 @@ extern void setup_per_cpu_areas(void);
|
|
|
|
|
|
#define DEFINE_PER_CPU(type, name) \
|
|
#define DEFINE_PER_CPU(type, name) \
|
|
__typeof__(type) per_cpu__##name
|
|
__typeof__(type) per_cpu__##name
|
|
|
|
+#define DEFINE_PER_CPU_SHARED_ALIGNED(type, name) \
|
|
|
|
+ DEFINE_PER_CPU(type, name)
|
|
|
|
|
|
#define per_cpu(var, cpu) (*((void)(cpu), &per_cpu__##var))
|
|
#define per_cpu(var, cpu) (*((void)(cpu), &per_cpu__##var))
|
|
#define __get_cpu_var(var) per_cpu__##var
|
|
#define __get_cpu_var(var) per_cpu__##var
|