|
@@ -167,11 +167,25 @@ extern unsigned int user_debug;
|
|
|
: : "r" (0) : "memory")
|
|
|
#define dmb() __asm__ __volatile__ ("" : : : "memory")
|
|
|
#endif
|
|
|
-#define mb() dmb()
|
|
|
-#define rmb() mb()
|
|
|
-#define wmb() mb()
|
|
|
-#define read_barrier_depends() do { } while(0)
|
|
|
-#define set_mb(var, value) do { var = value; mb(); } while (0)
|
|
|
+
|
|
|
+#define mb() barrier()
|
|
|
+#define rmb() barrier()
|
|
|
+#define wmb() barrier()
|
|
|
+#define read_barrier_depends() do { } while(0)
|
|
|
+
|
|
|
+#ifdef CONFIG_SMP
|
|
|
+#define smp_mb() dmb()
|
|
|
+#define smp_rmb() dmb()
|
|
|
+#define smp_wmb() dmb()
|
|
|
+#define smp_read_barrier_depends() read_barrier_depends()
|
|
|
+#else
|
|
|
+#define smp_mb() barrier()
|
|
|
+#define smp_rmb() barrier()
|
|
|
+#define smp_wmb() barrier()
|
|
|
+#define smp_read_barrier_depends() read_barrier_depends()
|
|
|
+#endif /* CONFIG_SMP */
|
|
|
+
|
|
|
+#define set_mb(var, value) do { var = value; smp_mb(); } while (0)
|
|
|
#define nop() __asm__ __volatile__("mov\tr0,r0\t@ nop\n\t");
|
|
|
|
|
|
extern unsigned long cr_no_alignment; /* defined in entry-armv.S */
|
|
@@ -243,22 +257,6 @@ static inline void sched_cacheflush(void)
|
|
|
{
|
|
|
}
|
|
|
|
|
|
-#ifdef CONFIG_SMP
|
|
|
-
|
|
|
-#define smp_mb() mb()
|
|
|
-#define smp_rmb() rmb()
|
|
|
-#define smp_wmb() wmb()
|
|
|
-#define smp_read_barrier_depends() read_barrier_depends()
|
|
|
-
|
|
|
-#else
|
|
|
-
|
|
|
-#define smp_mb() barrier()
|
|
|
-#define smp_rmb() barrier()
|
|
|
-#define smp_wmb() barrier()
|
|
|
-#define smp_read_barrier_depends() do { } while(0)
|
|
|
-
|
|
|
-#endif /* CONFIG_SMP */
|
|
|
-
|
|
|
#if defined(CONFIG_CPU_SA1100) || defined(CONFIG_CPU_SA110)
|
|
|
/*
|
|
|
* On the StrongARM, "swp" is terminally broken since it bypasses the
|