|
@@ -23,72 +23,71 @@
|
|
|
* simply bail out immediately through the slow path where the lock will be
|
|
|
* reattempted until it succeeds.
|
|
|
*/
|
|
|
-#define __mutex_fastpath_lock(count, fail_fn) \
|
|
|
-do { \
|
|
|
- int __ex_flag, __res; \
|
|
|
- \
|
|
|
- typecheck(atomic_t *, count); \
|
|
|
- typecheck_fn(fastcall void (*)(atomic_t *), fail_fn); \
|
|
|
- \
|
|
|
- __asm__ ( \
|
|
|
- "ldrex %0, [%2] \n" \
|
|
|
- "sub %0, %0, #1 \n" \
|
|
|
- "strex %1, %0, [%2] \n" \
|
|
|
- \
|
|
|
- : "=&r" (__res), "=&r" (__ex_flag) \
|
|
|
- : "r" (&(count)->counter) \
|
|
|
- : "cc","memory" ); \
|
|
|
- \
|
|
|
- if (unlikely(__res || __ex_flag)) \
|
|
|
- fail_fn(count); \
|
|
|
-} while (0)
|
|
|
-
|
|
|
-#define __mutex_fastpath_lock_retval(count, fail_fn) \
|
|
|
-({ \
|
|
|
- int __ex_flag, __res; \
|
|
|
- \
|
|
|
- typecheck(atomic_t *, count); \
|
|
|
- typecheck_fn(fastcall int (*)(atomic_t *), fail_fn); \
|
|
|
- \
|
|
|
- __asm__ ( \
|
|
|
- "ldrex %0, [%2] \n" \
|
|
|
- "sub %0, %0, #1 \n" \
|
|
|
- "strex %1, %0, [%2] \n" \
|
|
|
- \
|
|
|
- : "=&r" (__res), "=&r" (__ex_flag) \
|
|
|
- : "r" (&(count)->counter) \
|
|
|
- : "cc","memory" ); \
|
|
|
- \
|
|
|
- __res |= __ex_flag; \
|
|
|
- if (unlikely(__res != 0)) \
|
|
|
- __res = fail_fn(count); \
|
|
|
- __res; \
|
|
|
-})
|
|
|
+static inline void
|
|
|
+__mutex_fastpath_lock(atomic_t *count, fastcall void (*fail_fn)(atomic_t *))
|
|
|
+{
|
|
|
+ int __ex_flag, __res;
|
|
|
+
|
|
|
+ __asm__ (
|
|
|
+
|
|
|
+ "ldrex %0, [%2] \n\t"
|
|
|
+ "sub %0, %0, #1 \n\t"
|
|
|
+ "strex %1, %0, [%2] "
|
|
|
+
|
|
|
+ : "=&r" (__res), "=&r" (__ex_flag)
|
|
|
+ : "r" (&(count)->counter)
|
|
|
+ : "cc","memory" );
|
|
|
+
|
|
|
+ __res |= __ex_flag;
|
|
|
+ if (unlikely(__res != 0))
|
|
|
+ fail_fn(count);
|
|
|
+}
|
|
|
+
|
|
|
+static inline int
|
|
|
+__mutex_fastpath_lock_retval(atomic_t *count, fastcall int (*fail_fn)(atomic_t *))
|
|
|
+{
|
|
|
+ int __ex_flag, __res;
|
|
|
+
|
|
|
+ __asm__ (
|
|
|
+
|
|
|
+ "ldrex %0, [%2] \n\t"
|
|
|
+ "sub %0, %0, #1 \n\t"
|
|
|
+ "strex %1, %0, [%2] "
|
|
|
+
|
|
|
+ : "=&r" (__res), "=&r" (__ex_flag)
|
|
|
+ : "r" (&(count)->counter)
|
|
|
+ : "cc","memory" );
|
|
|
+
|
|
|
+ __res |= __ex_flag;
|
|
|
+ if (unlikely(__res != 0))
|
|
|
+ __res = fail_fn(count);
|
|
|
+ return __res;
|
|
|
+}
|
|
|
|
|
|
/*
|
|
|
* Same trick is used for the unlock fast path. However the original value,
|
|
|
* rather than the result, is used to test for success in order to have
|
|
|
* better generated assembly.
|
|
|
*/
|
|
|
-#define __mutex_fastpath_unlock(count, fail_fn) \
|
|
|
-do { \
|
|
|
- int __ex_flag, __res, __orig; \
|
|
|
- \
|
|
|
- typecheck(atomic_t *, count); \
|
|
|
- typecheck_fn(fastcall void (*)(atomic_t *), fail_fn); \
|
|
|
- \
|
|
|
- __asm__ ( \
|
|
|
- "ldrex %0, [%3] \n" \
|
|
|
- "add %1, %0, #1 \n" \
|
|
|
- "strex %2, %1, [%3] \n" \
|
|
|
- \
|
|
|
- : "=&r" (__orig), "=&r" (__res), "=&r" (__ex_flag) \
|
|
|
- : "r" (&(count)->counter) \
|
|
|
- : "cc","memory" ); \
|
|
|
- \
|
|
|
- if (unlikely(__orig || __ex_flag)) \
|
|
|
- fail_fn(count); \
|
|
|
-} while (0)
|
|
|
+static inline void
|
|
|
+__mutex_fastpath_unlock(atomic_t *count, fastcall void (*fail_fn)(atomic_t *))
|
|
|
+{
|
|
|
+ int __ex_flag, __res, __orig;
|
|
|
+
|
|
|
+ __asm__ (
|
|
|
+
|
|
|
+ "ldrex %0, [%3] \n\t"
|
|
|
+ "add %1, %0, #1 \n\t"
|
|
|
+ "strex %2, %1, [%3] "
|
|
|
+
|
|
|
+ : "=&r" (__orig), "=&r" (__res), "=&r" (__ex_flag)
|
|
|
+ : "r" (&(count)->counter)
|
|
|
+ : "cc","memory" );
|
|
|
+
|
|
|
+ __orig |= __ex_flag;
|
|
|
+ if (unlikely(__orig != 0))
|
|
|
+ fail_fn(count);
|
|
|
+}
|
|
|
|
|
|
/*
|
|
|
* If the unlock was done on a contended lock, or if the unlock simply fails
|
|
@@ -110,12 +109,12 @@ __mutex_fastpath_trylock(atomic_t *count, int (*fail_fn)(atomic_t *))
|
|
|
|
|
|
__asm__ (
|
|
|
|
|
|
- "1: ldrex %0, [%3] \n"
|
|
|
- "subs %1, %0, #1 \n"
|
|
|
- "strexeq %2, %1, [%3] \n"
|
|
|
- "movlt %0, #0 \n"
|
|
|
- "cmpeq %2, #0 \n"
|
|
|
- "bgt 1b \n"
|
|
|
+ "1: ldrex %0, [%3] \n\t"
|
|
|
+ "subs %1, %0, #1 \n\t"
|
|
|
+ "strexeq %2, %1, [%3] \n\t"
|
|
|
+ "movlt %0, #0 \n\t"
|
|
|
+ "cmpeq %2, #0 \n\t"
|
|
|
+ "bgt 1b "
|
|
|
|
|
|
: "=&r" (__orig), "=&r" (__res), "=&r" (__ex_flag)
|
|
|
: "r" (&count->counter)
|