|
@@ -31,12 +31,12 @@ static inline bool cpu_has_ht_siblings(void)
|
|
|
return has_siblings;
|
|
|
}
|
|
|
|
|
|
-DECLARE_PER_CPU(cpumask_var_t, cpu_sibling_map);
|
|
|
-DECLARE_PER_CPU(cpumask_var_t, cpu_core_map);
|
|
|
+DECLARE_PER_CPU_READ_MOSTLY(cpumask_var_t, cpu_sibling_map);
|
|
|
+DECLARE_PER_CPU_READ_MOSTLY(cpumask_var_t, cpu_core_map);
|
|
|
/* cpus sharing the last level cache: */
|
|
|
-DECLARE_PER_CPU(cpumask_var_t, cpu_llc_shared_map);
|
|
|
-DECLARE_PER_CPU(u16, cpu_llc_id);
|
|
|
-DECLARE_PER_CPU(int, cpu_number);
|
|
|
+DECLARE_PER_CPU_READ_MOSTLY(cpumask_var_t, cpu_llc_shared_map);
|
|
|
+DECLARE_PER_CPU_READ_MOSTLY(u16, cpu_llc_id);
|
|
|
+DECLARE_PER_CPU_READ_MOSTLY(int, cpu_number);
|
|
|
|
|
|
static inline struct cpumask *cpu_sibling_mask(int cpu)
|
|
|
{
|
|
@@ -53,10 +53,10 @@ static inline struct cpumask *cpu_llc_shared_mask(int cpu)
|
|
|
return per_cpu(cpu_llc_shared_map, cpu);
|
|
|
}
|
|
|
|
|
|
-DECLARE_EARLY_PER_CPU(u16, x86_cpu_to_apicid);
|
|
|
-DECLARE_EARLY_PER_CPU(u16, x86_bios_cpu_apicid);
|
|
|
+DECLARE_EARLY_PER_CPU_READ_MOSTLY(u16, x86_cpu_to_apicid);
|
|
|
+DECLARE_EARLY_PER_CPU_READ_MOSTLY(u16, x86_bios_cpu_apicid);
|
|
|
#if defined(CONFIG_X86_LOCAL_APIC) && defined(CONFIG_X86_32)
|
|
|
-DECLARE_EARLY_PER_CPU(int, x86_cpu_to_logical_apicid);
|
|
|
+DECLARE_EARLY_PER_CPU_READ_MOSTLY(int, x86_cpu_to_logical_apicid);
|
|
|
#endif
|
|
|
|
|
|
/* Static state in head.S used to set up a CPU */
|