atomic.h 7.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317
  1. #ifndef _ASM_M32R_ATOMIC_H
  2. #define _ASM_M32R_ATOMIC_H
  3. /*
  4. * linux/include/asm-m32r/atomic.h
  5. *
  6. * M32R version:
  7. * Copyright (C) 2001, 2002 Hitoshi Yamamoto
  8. * Copyright (C) 2004 Hirokazu Takata <takata at linux-m32r.org>
  9. */
  10. #include <linux/types.h>
  11. #include <asm/assembler.h>
  12. #include <asm/cmpxchg.h>
  13. #include <asm/dcache_clear.h>
  14. /*
  15. * Atomic operations that C can't guarantee us. Useful for
  16. * resource counting etc..
  17. */
  18. #define ATOMIC_INIT(i) { (i) }
  19. /**
  20. * atomic_read - read atomic variable
  21. * @v: pointer of type atomic_t
  22. *
  23. * Atomically reads the value of @v.
  24. */
  25. #define atomic_read(v) (*(volatile int *)&(v)->counter)
  26. /**
  27. * atomic_set - set atomic variable
  28. * @v: pointer of type atomic_t
  29. * @i: required value
  30. *
  31. * Atomically sets the value of @v to @i.
  32. */
  33. #define atomic_set(v,i) (((v)->counter) = (i))
  34. /**
  35. * atomic_add_return - add integer to atomic variable and return it
  36. * @i: integer value to add
  37. * @v: pointer of type atomic_t
  38. *
  39. * Atomically adds @i to @v and return (@i + @v).
  40. */
  41. static __inline__ int atomic_add_return(int i, atomic_t *v)
  42. {
  43. unsigned long flags;
  44. int result;
  45. local_irq_save(flags);
  46. __asm__ __volatile__ (
  47. "# atomic_add_return \n\t"
  48. DCACHE_CLEAR("%0", "r4", "%1")
  49. M32R_LOCK" %0, @%1; \n\t"
  50. "add %0, %2; \n\t"
  51. M32R_UNLOCK" %0, @%1; \n\t"
  52. : "=&r" (result)
  53. : "r" (&v->counter), "r" (i)
  54. : "memory"
  55. #ifdef CONFIG_CHIP_M32700_TS1
  56. , "r4"
  57. #endif /* CONFIG_CHIP_M32700_TS1 */
  58. );
  59. local_irq_restore(flags);
  60. return result;
  61. }
  62. /**
  63. * atomic_sub_return - subtract integer from atomic variable and return it
  64. * @i: integer value to subtract
  65. * @v: pointer of type atomic_t
  66. *
  67. * Atomically subtracts @i from @v and return (@v - @i).
  68. */
  69. static __inline__ int atomic_sub_return(int i, atomic_t *v)
  70. {
  71. unsigned long flags;
  72. int result;
  73. local_irq_save(flags);
  74. __asm__ __volatile__ (
  75. "# atomic_sub_return \n\t"
  76. DCACHE_CLEAR("%0", "r4", "%1")
  77. M32R_LOCK" %0, @%1; \n\t"
  78. "sub %0, %2; \n\t"
  79. M32R_UNLOCK" %0, @%1; \n\t"
  80. : "=&r" (result)
  81. : "r" (&v->counter), "r" (i)
  82. : "memory"
  83. #ifdef CONFIG_CHIP_M32700_TS1
  84. , "r4"
  85. #endif /* CONFIG_CHIP_M32700_TS1 */
  86. );
  87. local_irq_restore(flags);
  88. return result;
  89. }
  90. /**
  91. * atomic_add - add integer to atomic variable
  92. * @i: integer value to add
  93. * @v: pointer of type atomic_t
  94. *
  95. * Atomically adds @i to @v.
  96. */
  97. #define atomic_add(i,v) ((void) atomic_add_return((i), (v)))
  98. /**
  99. * atomic_sub - subtract the atomic variable
  100. * @i: integer value to subtract
  101. * @v: pointer of type atomic_t
  102. *
  103. * Atomically subtracts @i from @v.
  104. */
  105. #define atomic_sub(i,v) ((void) atomic_sub_return((i), (v)))
  106. /**
  107. * atomic_sub_and_test - subtract value from variable and test result
  108. * @i: integer value to subtract
  109. * @v: pointer of type atomic_t
  110. *
  111. * Atomically subtracts @i from @v and returns
  112. * true if the result is zero, or false for all
  113. * other cases.
  114. */
  115. #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
  116. /**
  117. * atomic_inc_return - increment atomic variable and return it
  118. * @v: pointer of type atomic_t
  119. *
  120. * Atomically increments @v by 1 and returns the result.
  121. */
  122. static __inline__ int atomic_inc_return(atomic_t *v)
  123. {
  124. unsigned long flags;
  125. int result;
  126. local_irq_save(flags);
  127. __asm__ __volatile__ (
  128. "# atomic_inc_return \n\t"
  129. DCACHE_CLEAR("%0", "r4", "%1")
  130. M32R_LOCK" %0, @%1; \n\t"
  131. "addi %0, #1; \n\t"
  132. M32R_UNLOCK" %0, @%1; \n\t"
  133. : "=&r" (result)
  134. : "r" (&v->counter)
  135. : "memory"
  136. #ifdef CONFIG_CHIP_M32700_TS1
  137. , "r4"
  138. #endif /* CONFIG_CHIP_M32700_TS1 */
  139. );
  140. local_irq_restore(flags);
  141. return result;
  142. }
  143. /**
  144. * atomic_dec_return - decrement atomic variable and return it
  145. * @v: pointer of type atomic_t
  146. *
  147. * Atomically decrements @v by 1 and returns the result.
  148. */
  149. static __inline__ int atomic_dec_return(atomic_t *v)
  150. {
  151. unsigned long flags;
  152. int result;
  153. local_irq_save(flags);
  154. __asm__ __volatile__ (
  155. "# atomic_dec_return \n\t"
  156. DCACHE_CLEAR("%0", "r4", "%1")
  157. M32R_LOCK" %0, @%1; \n\t"
  158. "addi %0, #-1; \n\t"
  159. M32R_UNLOCK" %0, @%1; \n\t"
  160. : "=&r" (result)
  161. : "r" (&v->counter)
  162. : "memory"
  163. #ifdef CONFIG_CHIP_M32700_TS1
  164. , "r4"
  165. #endif /* CONFIG_CHIP_M32700_TS1 */
  166. );
  167. local_irq_restore(flags);
  168. return result;
  169. }
  170. /**
  171. * atomic_inc - increment atomic variable
  172. * @v: pointer of type atomic_t
  173. *
  174. * Atomically increments @v by 1.
  175. */
  176. #define atomic_inc(v) ((void)atomic_inc_return(v))
  177. /**
  178. * atomic_dec - decrement atomic variable
  179. * @v: pointer of type atomic_t
  180. *
  181. * Atomically decrements @v by 1.
  182. */
  183. #define atomic_dec(v) ((void)atomic_dec_return(v))
  184. /**
  185. * atomic_inc_and_test - increment and test
  186. * @v: pointer of type atomic_t
  187. *
  188. * Atomically increments @v by 1
  189. * and returns true if the result is zero, or false for all
  190. * other cases.
  191. */
  192. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  193. /**
  194. * atomic_dec_and_test - decrement and test
  195. * @v: pointer of type atomic_t
  196. *
  197. * Atomically decrements @v by 1 and
  198. * returns true if the result is 0, or false for all
  199. * other cases.
  200. */
  201. #define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
  202. /**
  203. * atomic_add_negative - add and test if negative
  204. * @v: pointer of type atomic_t
  205. * @i: integer value to add
  206. *
  207. * Atomically adds @i to @v and returns true
  208. * if the result is negative, or false when
  209. * result is greater than or equal to zero.
  210. */
  211. #define atomic_add_negative(i,v) (atomic_add_return((i), (v)) < 0)
  212. #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
  213. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  214. /**
  215. * __atomic_add_unless - add unless the number is a given value
  216. * @v: pointer of type atomic_t
  217. * @a: the amount to add to v...
  218. * @u: ...unless v is equal to u.
  219. *
  220. * Atomically adds @a to @v, so long as it was not @u.
  221. * Returns the old value of @v.
  222. */
  223. static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
  224. {
  225. int c, old;
  226. c = atomic_read(v);
  227. for (;;) {
  228. if (unlikely(c == (u)))
  229. break;
  230. old = atomic_cmpxchg((v), c, c + (a));
  231. if (likely(old == c))
  232. break;
  233. c = old;
  234. }
  235. return c;
  236. }
  237. static __inline__ void atomic_clear_mask(unsigned long mask, atomic_t *addr)
  238. {
  239. unsigned long flags;
  240. unsigned long tmp;
  241. local_irq_save(flags);
  242. __asm__ __volatile__ (
  243. "# atomic_clear_mask \n\t"
  244. DCACHE_CLEAR("%0", "r5", "%1")
  245. M32R_LOCK" %0, @%1; \n\t"
  246. "and %0, %2; \n\t"
  247. M32R_UNLOCK" %0, @%1; \n\t"
  248. : "=&r" (tmp)
  249. : "r" (addr), "r" (~mask)
  250. : "memory"
  251. #ifdef CONFIG_CHIP_M32700_TS1
  252. , "r5"
  253. #endif /* CONFIG_CHIP_M32700_TS1 */
  254. );
  255. local_irq_restore(flags);
  256. }
  257. static __inline__ void atomic_set_mask(unsigned long mask, atomic_t *addr)
  258. {
  259. unsigned long flags;
  260. unsigned long tmp;
  261. local_irq_save(flags);
  262. __asm__ __volatile__ (
  263. "# atomic_set_mask \n\t"
  264. DCACHE_CLEAR("%0", "r5", "%1")
  265. M32R_LOCK" %0, @%1; \n\t"
  266. "or %0, %2; \n\t"
  267. M32R_UNLOCK" %0, @%1; \n\t"
  268. : "=&r" (tmp)
  269. : "r" (addr), "r" (mask)
  270. : "memory"
  271. #ifdef CONFIG_CHIP_M32700_TS1
  272. , "r5"
  273. #endif /* CONFIG_CHIP_M32700_TS1 */
  274. );
  275. local_irq_restore(flags);
  276. }
  277. /* Atomic operations are already serializing on m32r */
  278. #define smp_mb__before_atomic_dec() barrier()
  279. #define smp_mb__after_atomic_dec() barrier()
  280. #define smp_mb__before_atomic_inc() barrier()
  281. #define smp_mb__after_atomic_inc() barrier()
  282. #endif /* _ASM_M32R_ATOMIC_H */