local.h 4.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181
  1. #ifndef _ASM_X86_LOCAL_H
  2. #define _ASM_X86_LOCAL_H
  3. #include <linux/percpu.h>
  4. #include <linux/atomic.h>
  5. #include <asm/asm.h>
  6. typedef struct {
  7. atomic_long_t a;
  8. } local_t;
  9. #define LOCAL_INIT(i) { ATOMIC_LONG_INIT(i) }
  10. #define local_read(l) atomic_long_read(&(l)->a)
  11. #define local_set(l, i) atomic_long_set(&(l)->a, (i))
  12. static inline void local_inc(local_t *l)
  13. {
  14. asm volatile(_ASM_INC "%0"
  15. : "+m" (l->a.counter));
  16. }
  17. static inline void local_dec(local_t *l)
  18. {
  19. asm volatile(_ASM_DEC "%0"
  20. : "+m" (l->a.counter));
  21. }
  22. static inline void local_add(long i, local_t *l)
  23. {
  24. asm volatile(_ASM_ADD "%1,%0"
  25. : "+m" (l->a.counter)
  26. : "ir" (i));
  27. }
  28. static inline void local_sub(long i, local_t *l)
  29. {
  30. asm volatile(_ASM_SUB "%1,%0"
  31. : "+m" (l->a.counter)
  32. : "ir" (i));
  33. }
  34. /**
  35. * local_sub_and_test - subtract value from variable and test result
  36. * @i: integer value to subtract
  37. * @l: pointer to type local_t
  38. *
  39. * Atomically subtracts @i from @l and returns
  40. * true if the result is zero, or false for all
  41. * other cases.
  42. */
  43. static inline int local_sub_and_test(long i, local_t *l)
  44. {
  45. unsigned char c;
  46. asm volatile(_ASM_SUB "%2,%0; sete %1"
  47. : "+m" (l->a.counter), "=qm" (c)
  48. : "ir" (i) : "memory");
  49. return c;
  50. }
  51. /**
  52. * local_dec_and_test - decrement and test
  53. * @l: pointer to type local_t
  54. *
  55. * Atomically decrements @l by 1 and
  56. * returns true if the result is 0, or false for all other
  57. * cases.
  58. */
  59. static inline int local_dec_and_test(local_t *l)
  60. {
  61. unsigned char c;
  62. asm volatile(_ASM_DEC "%0; sete %1"
  63. : "+m" (l->a.counter), "=qm" (c)
  64. : : "memory");
  65. return c != 0;
  66. }
  67. /**
  68. * local_inc_and_test - increment and test
  69. * @l: pointer to type local_t
  70. *
  71. * Atomically increments @l by 1
  72. * and returns true if the result is zero, or false for all
  73. * other cases.
  74. */
  75. static inline int local_inc_and_test(local_t *l)
  76. {
  77. unsigned char c;
  78. asm volatile(_ASM_INC "%0; sete %1"
  79. : "+m" (l->a.counter), "=qm" (c)
  80. : : "memory");
  81. return c != 0;
  82. }
  83. /**
  84. * local_add_negative - add and test if negative
  85. * @i: integer value to add
  86. * @l: pointer to type local_t
  87. *
  88. * Atomically adds @i to @l and returns true
  89. * if the result is negative, or false when
  90. * result is greater than or equal to zero.
  91. */
  92. static inline int local_add_negative(long i, local_t *l)
  93. {
  94. unsigned char c;
  95. asm volatile(_ASM_ADD "%2,%0; sets %1"
  96. : "+m" (l->a.counter), "=qm" (c)
  97. : "ir" (i) : "memory");
  98. return c;
  99. }
  100. /**
  101. * local_add_return - add and return
  102. * @i: integer value to add
  103. * @l: pointer to type local_t
  104. *
  105. * Atomically adds @i to @l and returns @i + @l
  106. */
  107. static inline long local_add_return(long i, local_t *l)
  108. {
  109. long __i = i;
  110. asm volatile(_ASM_XADD "%0, %1;"
  111. : "+r" (i), "+m" (l->a.counter)
  112. : : "memory");
  113. return i + __i;
  114. }
  115. static inline long local_sub_return(long i, local_t *l)
  116. {
  117. return local_add_return(-i, l);
  118. }
  119. #define local_inc_return(l) (local_add_return(1, l))
  120. #define local_dec_return(l) (local_sub_return(1, l))
  121. #define local_cmpxchg(l, o, n) \
  122. (cmpxchg_local(&((l)->a.counter), (o), (n)))
  123. /* Always has a lock prefix */
  124. #define local_xchg(l, n) (xchg(&((l)->a.counter), (n)))
  125. /**
  126. * local_add_unless - add unless the number is a given value
  127. * @l: pointer of type local_t
  128. * @a: the amount to add to l...
  129. * @u: ...unless l is equal to u.
  130. *
  131. * Atomically adds @a to @l, so long as it was not @u.
  132. * Returns non-zero if @l was not @u, and zero otherwise.
  133. */
  134. #define local_add_unless(l, a, u) \
  135. ({ \
  136. long c, old; \
  137. c = local_read((l)); \
  138. for (;;) { \
  139. if (unlikely(c == (u))) \
  140. break; \
  141. old = local_cmpxchg((l), c, c + (a)); \
  142. if (likely(old == c)) \
  143. break; \
  144. c = old; \
  145. } \
  146. c != (u); \
  147. })
  148. #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
  149. /* On x86_32, these are no better than the atomic variants.
  150. * On x86-64 these are better than the atomic variants on SMP kernels
  151. * because they dont use a lock prefix.
  152. */
  153. #define __local_inc(l) local_inc(l)
  154. #define __local_dec(l) local_dec(l)
  155. #define __local_add(i, l) local_add((i), (l))
  156. #define __local_sub(i, l) local_sub((i), (l))
  157. #endif /* _ASM_X86_LOCAL_H */