|
@@ -46,7 +46,7 @@
|
|
|
|
|
|
#ifdef CONFIG_SMP
|
|
|
#define __percpu_prefix "%%"__stringify(__percpu_seg)":"
|
|
|
-#define __my_cpu_offset percpu_read(this_cpu_off)
|
|
|
+#define __my_cpu_offset this_cpu_read(this_cpu_off)
|
|
|
|
|
|
/*
|
|
|
* Compared to the generic __my_cpu_offset version, the following
|
|
@@ -351,23 +351,15 @@ do { \
|
|
|
})
|
|
|
|
|
|
/*
|
|
|
- * percpu_read() makes gcc load the percpu variable every time it is
|
|
|
- * accessed while percpu_read_stable() allows the value to be cached.
|
|
|
- * percpu_read_stable() is more efficient and can be used if its value
|
|
|
+ * this_cpu_read() makes gcc load the percpu variable every time it is
|
|
|
+ * accessed while this_cpu_read_stable() allows the value to be cached.
|
|
|
+ * this_cpu_read_stable() is more efficient and can be used if its value
|
|
|
* is guaranteed to be valid across cpus. The current users include
|
|
|
* get_current() and get_thread_info() both of which are actually
|
|
|
* per-thread variables implemented as per-cpu variables and thus
|
|
|
* stable for the duration of the respective task.
|
|
|
*/
|
|
|
-#define percpu_read(var) percpu_from_op("mov", var, "m" (var))
|
|
|
-#define percpu_read_stable(var) percpu_from_op("mov", var, "p" (&(var)))
|
|
|
-#define percpu_write(var, val) percpu_to_op("mov", var, val)
|
|
|
-#define percpu_add(var, val) percpu_add_op(var, val)
|
|
|
-#define percpu_sub(var, val) percpu_add_op(var, -(val))
|
|
|
-#define percpu_and(var, val) percpu_to_op("and", var, val)
|
|
|
-#define percpu_or(var, val) percpu_to_op("or", var, val)
|
|
|
-#define percpu_xor(var, val) percpu_to_op("xor", var, val)
|
|
|
-#define percpu_inc(var) percpu_unary_op("inc", var)
|
|
|
+#define this_cpu_read_stable(var) percpu_from_op("mov", var, "p" (&(var)))
|
|
|
|
|
|
#define __this_cpu_read_1(pcp) percpu_from_op("mov", (pcp), "m"(pcp))
|
|
|
#define __this_cpu_read_2(pcp) percpu_from_op("mov", (pcp), "m"(pcp))
|
|
@@ -512,7 +504,11 @@ static __always_inline int x86_this_cpu_constant_test_bit(unsigned int nr,
|
|
|
{
|
|
|
unsigned long __percpu *a = (unsigned long *)addr + nr / BITS_PER_LONG;
|
|
|
|
|
|
- return ((1UL << (nr % BITS_PER_LONG)) & percpu_read(*a)) != 0;
|
|
|
+#ifdef CONFIG_X86_64
|
|
|
+ return ((1UL << (nr % BITS_PER_LONG)) & __this_cpu_read_8(*a)) != 0;
|
|
|
+#else
|
|
|
+ return ((1UL << (nr % BITS_PER_LONG)) & __this_cpu_read_4(*a)) != 0;
|
|
|
+#endif
|
|
|
}
|
|
|
|
|
|
static inline int x86_this_cpu_variable_test_bit(int nr,
|