atomic64_32.h 6.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304
  1. #ifndef _ASM_X86_ATOMIC64_32_H
  2. #define _ASM_X86_ATOMIC64_32_H
  3. #include <linux/compiler.h>
  4. #include <linux/types.h>
  5. #include <asm/processor.h>
  6. //#include <asm/cmpxchg.h>
  7. /* An 64bit atomic type */
  8. typedef struct {
  9. u64 __aligned(8) counter;
  10. } atomic64_t;
  11. #define ATOMIC64_INIT(val) { (val) }
  12. #ifdef CONFIG_X86_CMPXCHG64
  13. #define ATOMIC64_ALTERNATIVE_(f, g) "call atomic64_" #g "_cx8"
  14. #else
  15. #define ATOMIC64_ALTERNATIVE_(f, g) ALTERNATIVE("call atomic64_" #f "_386", "call atomic64_" #g "_cx8", X86_FEATURE_CX8)
  16. #endif
  17. #define ATOMIC64_ALTERNATIVE(f) ATOMIC64_ALTERNATIVE_(f, f)
  18. /**
  19. * atomic64_cmpxchg - cmpxchg atomic64 variable
  20. * @p: pointer to type atomic64_t
  21. * @o: expected value
  22. * @n: new value
  23. *
  24. * Atomically sets @v to @n if it was equal to @o and returns
  25. * the old value.
  26. */
  27. static inline long long atomic64_cmpxchg(atomic64_t *v, long long o, long long n)
  28. {
  29. return cmpxchg64(&v->counter, o, n);
  30. }
  31. /**
  32. * atomic64_xchg - xchg atomic64 variable
  33. * @v: pointer to type atomic64_t
  34. * @n: value to assign
  35. *
  36. * Atomically xchgs the value of @v to @n and returns
  37. * the old value.
  38. */
  39. static inline long long atomic64_xchg(atomic64_t *v, long long n)
  40. {
  41. long long o;
  42. unsigned high = (unsigned)(n >> 32);
  43. unsigned low = (unsigned)n;
  44. asm volatile(ATOMIC64_ALTERNATIVE(xchg)
  45. : "=A" (o), "+b" (low), "+c" (high)
  46. : "S" (v)
  47. : "memory"
  48. );
  49. return o;
  50. }
  51. /**
  52. * atomic64_set - set atomic64 variable
  53. * @v: pointer to type atomic64_t
  54. * @n: value to assign
  55. *
  56. * Atomically sets the value of @v to @n.
  57. */
  58. static inline void atomic64_set(atomic64_t *v, long long i)
  59. {
  60. unsigned high = (unsigned)(i >> 32);
  61. unsigned low = (unsigned)i;
  62. asm volatile(ATOMIC64_ALTERNATIVE(set)
  63. : "+b" (low), "+c" (high)
  64. : "S" (v)
  65. : "eax", "edx", "memory"
  66. );
  67. }
  68. /**
  69. * atomic64_read - read atomic64 variable
  70. * @v: pointer to type atomic64_t
  71. *
  72. * Atomically reads the value of @v and returns it.
  73. */
  74. static inline long long atomic64_read(atomic64_t *v)
  75. {
  76. long long r;
  77. asm volatile(ATOMIC64_ALTERNATIVE(read)
  78. : "=A" (r), "+c" (v)
  79. : : "memory"
  80. );
  81. return r;
  82. }
  83. /**
  84. * atomic64_add_return - add and return
  85. * @i: integer value to add
  86. * @v: pointer to type atomic64_t
  87. *
  88. * Atomically adds @i to @v and returns @i + *@v
  89. */
  90. static inline long long atomic64_add_return(long long i, atomic64_t *v)
  91. {
  92. asm volatile(ATOMIC64_ALTERNATIVE(add_return)
  93. : "+A" (i), "+c" (v)
  94. : : "memory"
  95. );
  96. return i;
  97. }
  98. /*
  99. * Other variants with different arithmetic operators:
  100. */
  101. static inline long long atomic64_sub_return(long long i, atomic64_t *v)
  102. {
  103. asm volatile(ATOMIC64_ALTERNATIVE(sub_return)
  104. : "+A" (i), "+c" (v)
  105. : : "memory"
  106. );
  107. return i;
  108. }
  109. static inline long long atomic64_inc_return(atomic64_t *v)
  110. {
  111. long long a;
  112. asm volatile(ATOMIC64_ALTERNATIVE(inc_return)
  113. : "=A" (a)
  114. : "S" (v)
  115. : "memory", "ecx"
  116. );
  117. return a;
  118. }
  119. static inline long long atomic64_dec_return(atomic64_t *v)
  120. {
  121. long long a;
  122. asm volatile(ATOMIC64_ALTERNATIVE(dec_return)
  123. : "=A" (a)
  124. : "S" (v)
  125. : "memory", "ecx"
  126. );
  127. return a;
  128. }
  129. /**
  130. * atomic64_add - add integer to atomic64 variable
  131. * @i: integer value to add
  132. * @v: pointer to type atomic64_t
  133. *
  134. * Atomically adds @i to @v.
  135. */
  136. static inline long long atomic64_add(long long i, atomic64_t *v)
  137. {
  138. asm volatile(ATOMIC64_ALTERNATIVE_(add, add_return)
  139. : "+A" (i), "+c" (v)
  140. : : "memory"
  141. );
  142. return i;
  143. }
  144. /**
  145. * atomic64_sub - subtract the atomic64 variable
  146. * @i: integer value to subtract
  147. * @v: pointer to type atomic64_t
  148. *
  149. * Atomically subtracts @i from @v.
  150. */
  151. static inline long long atomic64_sub(long long i, atomic64_t *v)
  152. {
  153. asm volatile(ATOMIC64_ALTERNATIVE_(sub, sub_return)
  154. : "+A" (i), "+c" (v)
  155. : : "memory"
  156. );
  157. return i;
  158. }
  159. /**
  160. * atomic64_sub_and_test - subtract value from variable and test result
  161. * @i: integer value to subtract
  162. * @v: pointer to type atomic64_t
  163. *
  164. * Atomically subtracts @i from @v and returns
  165. * true if the result is zero, or false for all
  166. * other cases.
  167. */
  168. static inline int atomic64_sub_and_test(long long i, atomic64_t *v)
  169. {
  170. return atomic64_sub_return(i, v) == 0;
  171. }
  172. /**
  173. * atomic64_inc - increment atomic64 variable
  174. * @v: pointer to type atomic64_t
  175. *
  176. * Atomically increments @v by 1.
  177. */
  178. static inline void atomic64_inc(atomic64_t *v)
  179. {
  180. asm volatile(ATOMIC64_ALTERNATIVE_(inc, inc_return)
  181. : : "S" (v)
  182. : "memory", "eax", "ecx", "edx"
  183. );
  184. }
  185. /**
  186. * atomic64_dec - decrement atomic64 variable
  187. * @ptr: pointer to type atomic64_t
  188. *
  189. * Atomically decrements @ptr by 1.
  190. */
  191. static inline void atomic64_dec(atomic64_t *v)
  192. {
  193. asm volatile(ATOMIC64_ALTERNATIVE_(dec, dec_return)
  194. : : "S" (v)
  195. : "memory", "eax", "ecx", "edx"
  196. );
  197. }
  198. /**
  199. * atomic64_dec_and_test - decrement and test
  200. * @v: pointer to type atomic64_t
  201. *
  202. * Atomically decrements @v by 1 and
  203. * returns true if the result is 0, or false for all other
  204. * cases.
  205. */
  206. static inline int atomic64_dec_and_test(atomic64_t *v)
  207. {
  208. return atomic64_dec_return(v) == 0;
  209. }
  210. /**
  211. * atomic64_inc_and_test - increment and test
  212. * @v: pointer to type atomic64_t
  213. *
  214. * Atomically increments @v by 1
  215. * and returns true if the result is zero, or false for all
  216. * other cases.
  217. */
  218. static inline int atomic64_inc_and_test(atomic64_t *v)
  219. {
  220. return atomic64_inc_return(v) == 0;
  221. }
  222. /**
  223. * atomic64_add_negative - add and test if negative
  224. * @i: integer value to add
  225. * @v: pointer to type atomic64_t
  226. *
  227. * Atomically adds @i to @v and returns true
  228. * if the result is negative, or false when
  229. * result is greater than or equal to zero.
  230. */
  231. static inline int atomic64_add_negative(long long i, atomic64_t *v)
  232. {
  233. return atomic64_add_return(i, v) < 0;
  234. }
  235. /**
  236. * atomic64_add_unless - add unless the number is a given value
  237. * @v: pointer of type atomic64_t
  238. * @a: the amount to add to v...
  239. * @u: ...unless v is equal to u.
  240. *
  241. * Atomically adds @a to @v, so long as it was not @u.
  242. * Returns non-zero if @v was not @u, and zero otherwise.
  243. */
  244. static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u)
  245. {
  246. unsigned low = (unsigned)u;
  247. unsigned high = (unsigned)(u >> 32);
  248. asm volatile(ATOMIC64_ALTERNATIVE(add_unless) "\n\t"
  249. : "+A" (a), "+c" (v), "+S" (low), "+D" (high)
  250. : : "memory");
  251. return (int)a;
  252. }
  253. static inline int atomic64_inc_not_zero(atomic64_t *v)
  254. {
  255. int r;
  256. asm volatile(ATOMIC64_ALTERNATIVE(inc_not_zero)
  257. : "=a" (r)
  258. : "S" (v)
  259. : "ecx", "edx", "memory"
  260. );
  261. return r;
  262. }
  263. static inline long long atomic64_dec_if_positive(atomic64_t *v)
  264. {
  265. long long r;
  266. asm volatile(ATOMIC64_ALTERNATIVE(dec_if_positive)
  267. : "=A" (r)
  268. : "S" (v)
  269. : "ecx", "memory"
  270. );
  271. return r;
  272. }
  273. #undef ATOMIC64_ALTERNATIVE
  274. #undef ATOMIC64_ALTERNATIVE_
  275. #endif /* _ASM_X86_ATOMIC64_32_H */