atomic.h 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162
  1. /* $Id: atomic.h,v 1.3 2001/07/25 16:15:19 bjornw Exp $ */
  2. #ifndef __ASM_CRIS_ATOMIC__
  3. #define __ASM_CRIS_ATOMIC__
  4. #include <asm/system.h>
  5. #include <asm/arch/atomic.h>
  6. /*
  7. * Atomic operations that C can't guarantee us. Useful for
  8. * resource counting etc..
  9. */
  10. typedef struct { volatile int counter; } atomic_t;
  11. #define ATOMIC_INIT(i) { (i) }
  12. #define atomic_read(v) ((v)->counter)
  13. #define atomic_set(v,i) (((v)->counter) = (i))
  14. /* These should be written in asm but we do it in C for now. */
  15. static inline void atomic_add(int i, volatile atomic_t *v)
  16. {
  17. unsigned long flags;
  18. cris_atomic_save(v, flags);
  19. v->counter += i;
  20. cris_atomic_restore(v, flags);
  21. }
  22. static inline void atomic_sub(int i, volatile atomic_t *v)
  23. {
  24. unsigned long flags;
  25. cris_atomic_save(v, flags);
  26. v->counter -= i;
  27. cris_atomic_restore(v, flags);
  28. }
  29. static inline int atomic_add_return(int i, volatile atomic_t *v)
  30. {
  31. unsigned long flags;
  32. int retval;
  33. cris_atomic_save(v, flags);
  34. retval = (v->counter += i);
  35. cris_atomic_restore(v, flags);
  36. return retval;
  37. }
  38. #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
  39. static inline int atomic_sub_return(int i, volatile atomic_t *v)
  40. {
  41. unsigned long flags;
  42. int retval;
  43. cris_atomic_save(v, flags);
  44. retval = (v->counter -= i);
  45. cris_atomic_restore(v, flags);
  46. return retval;
  47. }
  48. static inline int atomic_sub_and_test(int i, volatile atomic_t *v)
  49. {
  50. int retval;
  51. unsigned long flags;
  52. cris_atomic_save(v, flags);
  53. retval = (v->counter -= i) == 0;
  54. cris_atomic_restore(v, flags);
  55. return retval;
  56. }
  57. static inline void atomic_inc(volatile atomic_t *v)
  58. {
  59. unsigned long flags;
  60. cris_atomic_save(v, flags);
  61. (v->counter)++;
  62. cris_atomic_restore(v, flags);
  63. }
  64. static inline void atomic_dec(volatile atomic_t *v)
  65. {
  66. unsigned long flags;
  67. cris_atomic_save(v, flags);
  68. (v->counter)--;
  69. cris_atomic_restore(v, flags);
  70. }
  71. static inline int atomic_inc_return(volatile atomic_t *v)
  72. {
  73. unsigned long flags;
  74. int retval;
  75. cris_atomic_save(v, flags);
  76. retval = (v->counter)++;
  77. cris_atomic_restore(v, flags);
  78. return retval;
  79. }
  80. static inline int atomic_dec_return(volatile atomic_t *v)
  81. {
  82. unsigned long flags;
  83. int retval;
  84. cris_atomic_save(v, flags);
  85. retval = (v->counter)--;
  86. cris_atomic_restore(v, flags);
  87. return retval;
  88. }
  89. static inline int atomic_dec_and_test(volatile atomic_t *v)
  90. {
  91. int retval;
  92. unsigned long flags;
  93. cris_atomic_save(v, flags);
  94. retval = --(v->counter) == 0;
  95. cris_atomic_restore(v, flags);
  96. return retval;
  97. }
  98. static inline int atomic_inc_and_test(volatile atomic_t *v)
  99. {
  100. int retval;
  101. unsigned long flags;
  102. cris_atomic_save(v, flags);
  103. retval = ++(v->counter) == 0;
  104. cris_atomic_restore(v, flags);
  105. return retval;
  106. }
  107. static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
  108. {
  109. int ret;
  110. unsigned long flags;
  111. cris_atomic_save(v, flags);
  112. ret = v->counter;
  113. if (likely(ret == old))
  114. v->counter = new;
  115. cris_atomic_restore(v, flags);
  116. return ret;
  117. }
  118. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  119. static inline int atomic_add_unless(atomic_t *v, int a, int u)
  120. {
  121. int ret;
  122. unsigned long flags;
  123. cris_atomic_save(v, flags);
  124. ret = v->counter;
  125. if (ret != u)
  126. v->counter += a;
  127. cris_atomic_restore(v, flags);
  128. return ret != u;
  129. }
  130. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  131. /* Atomic operations are already serializing */
  132. #define smp_mb__before_atomic_dec() barrier()
  133. #define smp_mb__after_atomic_dec() barrier()
  134. #define smp_mb__before_atomic_inc() barrier()
  135. #define smp_mb__after_atomic_inc() barrier()
  136. #include <asm-generic/atomic.h>
  137. #endif