atomic.h 7.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318
  1. #ifndef _ASM_M32R_ATOMIC_H
  2. #define _ASM_M32R_ATOMIC_H
  3. /*
  4. * linux/include/asm-m32r/atomic.h
  5. *
  6. * M32R version:
  7. * Copyright (C) 2001, 2002 Hitoshi Yamamoto
  8. * Copyright (C) 2004 Hirokazu Takata <takata at linux-m32r.org>
  9. */
  10. #include <linux/config.h>
  11. #include <asm/assembler.h>
  12. #include <asm/system.h>
  13. /*
  14. * Atomic operations that C can't guarantee us. Useful for
  15. * resource counting etc..
  16. */
  17. /*
  18. * Make sure gcc doesn't try to be clever and move things around
  19. * on us. We need to use _exactly_ the address the user gave us,
  20. * not some alias that contains the same information.
  21. */
  22. typedef struct { volatile int counter; } atomic_t;
  23. #define ATOMIC_INIT(i) { (i) }
  24. /**
  25. * atomic_read - read atomic variable
  26. * @v: pointer of type atomic_t
  27. *
  28. * Atomically reads the value of @v.
  29. */
  30. #define atomic_read(v) ((v)->counter)
  31. /**
  32. * atomic_set - set atomic variable
  33. * @v: pointer of type atomic_t
  34. * @i: required value
  35. *
  36. * Atomically sets the value of @v to @i.
  37. */
  38. #define atomic_set(v,i) (((v)->counter) = (i))
  39. /**
  40. * atomic_add_return - add integer to atomic variable and return it
  41. * @i: integer value to add
  42. * @v: pointer of type atomic_t
  43. *
  44. * Atomically adds @i to @v and return (@i + @v).
  45. */
  46. static __inline__ int atomic_add_return(int i, atomic_t *v)
  47. {
  48. unsigned long flags;
  49. int result;
  50. local_irq_save(flags);
  51. __asm__ __volatile__ (
  52. "# atomic_add_return \n\t"
  53. DCACHE_CLEAR("%0", "r4", "%1")
  54. M32R_LOCK" %0, @%1; \n\t"
  55. "add %0, %2; \n\t"
  56. M32R_UNLOCK" %0, @%1; \n\t"
  57. : "=&r" (result)
  58. : "r" (&v->counter), "r" (i)
  59. : "memory"
  60. #ifdef CONFIG_CHIP_M32700_TS1
  61. , "r4"
  62. #endif /* CONFIG_CHIP_M32700_TS1 */
  63. );
  64. local_irq_restore(flags);
  65. return result;
  66. }
  67. /**
  68. * atomic_sub_return - subtract integer from atomic variable and return it
  69. * @i: integer value to subtract
  70. * @v: pointer of type atomic_t
  71. *
  72. * Atomically subtracts @i from @v and return (@v - @i).
  73. */
  74. static __inline__ int atomic_sub_return(int i, atomic_t *v)
  75. {
  76. unsigned long flags;
  77. int result;
  78. local_irq_save(flags);
  79. __asm__ __volatile__ (
  80. "# atomic_sub_return \n\t"
  81. DCACHE_CLEAR("%0", "r4", "%1")
  82. M32R_LOCK" %0, @%1; \n\t"
  83. "sub %0, %2; \n\t"
  84. M32R_UNLOCK" %0, @%1; \n\t"
  85. : "=&r" (result)
  86. : "r" (&v->counter), "r" (i)
  87. : "memory"
  88. #ifdef CONFIG_CHIP_M32700_TS1
  89. , "r4"
  90. #endif /* CONFIG_CHIP_M32700_TS1 */
  91. );
  92. local_irq_restore(flags);
  93. return result;
  94. }
  95. /**
  96. * atomic_add - add integer to atomic variable
  97. * @i: integer value to add
  98. * @v: pointer of type atomic_t
  99. *
  100. * Atomically adds @i to @v.
  101. */
  102. #define atomic_add(i,v) ((void) atomic_add_return((i), (v)))
  103. /**
  104. * atomic_sub - subtract the atomic variable
  105. * @i: integer value to subtract
  106. * @v: pointer of type atomic_t
  107. *
  108. * Atomically subtracts @i from @v.
  109. */
  110. #define atomic_sub(i,v) ((void) atomic_sub_return((i), (v)))
  111. /**
  112. * atomic_sub_and_test - subtract value from variable and test result
  113. * @i: integer value to subtract
  114. * @v: pointer of type atomic_t
  115. *
  116. * Atomically subtracts @i from @v and returns
  117. * true if the result is zero, or false for all
  118. * other cases.
  119. */
  120. #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
  121. /**
  122. * atomic_inc_return - increment atomic variable and return it
  123. * @v: pointer of type atomic_t
  124. *
  125. * Atomically increments @v by 1 and returns the result.
  126. */
  127. static __inline__ int atomic_inc_return(atomic_t *v)
  128. {
  129. unsigned long flags;
  130. int result;
  131. local_irq_save(flags);
  132. __asm__ __volatile__ (
  133. "# atomic_inc_return \n\t"
  134. DCACHE_CLEAR("%0", "r4", "%1")
  135. M32R_LOCK" %0, @%1; \n\t"
  136. "addi %0, #1; \n\t"
  137. M32R_UNLOCK" %0, @%1; \n\t"
  138. : "=&r" (result)
  139. : "r" (&v->counter)
  140. : "memory"
  141. #ifdef CONFIG_CHIP_M32700_TS1
  142. , "r4"
  143. #endif /* CONFIG_CHIP_M32700_TS1 */
  144. );
  145. local_irq_restore(flags);
  146. return result;
  147. }
  148. /**
  149. * atomic_dec_return - decrement atomic variable and return it
  150. * @v: pointer of type atomic_t
  151. *
  152. * Atomically decrements @v by 1 and returns the result.
  153. */
  154. static __inline__ int atomic_dec_return(atomic_t *v)
  155. {
  156. unsigned long flags;
  157. int result;
  158. local_irq_save(flags);
  159. __asm__ __volatile__ (
  160. "# atomic_dec_return \n\t"
  161. DCACHE_CLEAR("%0", "r4", "%1")
  162. M32R_LOCK" %0, @%1; \n\t"
  163. "addi %0, #-1; \n\t"
  164. M32R_UNLOCK" %0, @%1; \n\t"
  165. : "=&r" (result)
  166. : "r" (&v->counter)
  167. : "memory"
  168. #ifdef CONFIG_CHIP_M32700_TS1
  169. , "r4"
  170. #endif /* CONFIG_CHIP_M32700_TS1 */
  171. );
  172. local_irq_restore(flags);
  173. return result;
  174. }
  175. /**
  176. * atomic_inc - increment atomic variable
  177. * @v: pointer of type atomic_t
  178. *
  179. * Atomically increments @v by 1.
  180. */
  181. #define atomic_inc(v) ((void)atomic_inc_return(v))
  182. /**
  183. * atomic_dec - decrement atomic variable
  184. * @v: pointer of type atomic_t
  185. *
  186. * Atomically decrements @v by 1.
  187. */
  188. #define atomic_dec(v) ((void)atomic_dec_return(v))
  189. /**
  190. * atomic_inc_and_test - increment and test
  191. * @v: pointer of type atomic_t
  192. *
  193. * Atomically increments @v by 1
  194. * and returns true if the result is zero, or false for all
  195. * other cases.
  196. */
  197. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  198. /**
  199. * atomic_dec_and_test - decrement and test
  200. * @v: pointer of type atomic_t
  201. *
  202. * Atomically decrements @v by 1 and
  203. * returns true if the result is 0, or false for all
  204. * other cases.
  205. */
  206. #define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
  207. /**
  208. * atomic_add_negative - add and test if negative
  209. * @v: pointer of type atomic_t
  210. * @i: integer value to add
  211. *
  212. * Atomically adds @i to @v and returns true
  213. * if the result is negative, or false when
  214. * result is greater than or equal to zero.
  215. */
  216. #define atomic_add_negative(i,v) (atomic_add_return((i), (v)) < 0)
  217. #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
  218. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  219. /**
  220. * atomic_add_unless - add unless the number is a given value
  221. * @v: pointer of type atomic_t
  222. * @a: the amount to add to v...
  223. * @u: ...unless v is equal to u.
  224. *
  225. * Atomically adds @a to @v, so long as it was not @u.
  226. * Returns non-zero if @v was not @u, and zero otherwise.
  227. */
  228. #define atomic_add_unless(v, a, u) \
  229. ({ \
  230. int c, old; \
  231. c = atomic_read(v); \
  232. while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
  233. c = old; \
  234. c != (u); \
  235. })
  236. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  237. static __inline__ void atomic_clear_mask(unsigned long mask, atomic_t *addr)
  238. {
  239. unsigned long flags;
  240. unsigned long tmp;
  241. local_irq_save(flags);
  242. __asm__ __volatile__ (
  243. "# atomic_clear_mask \n\t"
  244. DCACHE_CLEAR("%0", "r5", "%1")
  245. M32R_LOCK" %0, @%1; \n\t"
  246. "and %0, %2; \n\t"
  247. M32R_UNLOCK" %0, @%1; \n\t"
  248. : "=&r" (tmp)
  249. : "r" (addr), "r" (~mask)
  250. : "memory"
  251. #ifdef CONFIG_CHIP_M32700_TS1
  252. , "r5"
  253. #endif /* CONFIG_CHIP_M32700_TS1 */
  254. );
  255. local_irq_restore(flags);
  256. }
  257. static __inline__ void atomic_set_mask(unsigned long mask, atomic_t *addr)
  258. {
  259. unsigned long flags;
  260. unsigned long tmp;
  261. local_irq_save(flags);
  262. __asm__ __volatile__ (
  263. "# atomic_set_mask \n\t"
  264. DCACHE_CLEAR("%0", "r5", "%1")
  265. M32R_LOCK" %0, @%1; \n\t"
  266. "or %0, %2; \n\t"
  267. M32R_UNLOCK" %0, @%1; \n\t"
  268. : "=&r" (tmp)
  269. : "r" (addr), "r" (mask)
  270. : "memory"
  271. #ifdef CONFIG_CHIP_M32700_TS1
  272. , "r5"
  273. #endif /* CONFIG_CHIP_M32700_TS1 */
  274. );
  275. local_irq_restore(flags);
  276. }
  277. /* Atomic operations are already serializing on m32r */
  278. #define smp_mb__before_atomic_dec() barrier()
  279. #define smp_mb__after_atomic_dec() barrier()
  280. #define smp_mb__before_atomic_inc() barrier()
  281. #define smp_mb__after_atomic_inc() barrier()
  282. #include <asm-generic/atomic.h>
  283. #endif /* _ASM_M32R_ATOMIC_H */