atomic.h 2.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132
  1. /* $Id: atomic.h,v 1.3 2001/07/25 16:15:19 bjornw Exp $ */
  2. #ifndef __ASM_CRIS_ATOMIC__
  3. #define __ASM_CRIS_ATOMIC__
  4. #include <asm/system.h>
  5. #include <asm/arch/atomic.h>
  6. /*
  7. * Atomic operations that C can't guarantee us. Useful for
  8. * resource counting etc..
  9. */
  10. typedef struct { volatile int counter; } atomic_t;
  11. #define ATOMIC_INIT(i) { (i) }
  12. #define atomic_read(v) ((v)->counter)
  13. #define atomic_set(v,i) (((v)->counter) = (i))
  14. /* These should be written in asm but we do it in C for now. */
  15. extern __inline__ void atomic_add(int i, volatile atomic_t *v)
  16. {
  17. unsigned long flags;
  18. cris_atomic_save(v, flags);
  19. v->counter += i;
  20. cris_atomic_restore(v, flags);
  21. }
  22. extern __inline__ void atomic_sub(int i, volatile atomic_t *v)
  23. {
  24. unsigned long flags;
  25. cris_atomic_save(v, flags);
  26. v->counter -= i;
  27. cris_atomic_restore(v, flags);
  28. }
  29. extern __inline__ int atomic_add_return(int i, volatile atomic_t *v)
  30. {
  31. unsigned long flags;
  32. int retval;
  33. cris_atomic_save(v, flags);
  34. retval = (v->counter += i);
  35. cris_atomic_restore(v, flags);
  36. return retval;
  37. }
  38. #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
  39. extern __inline__ int atomic_sub_return(int i, volatile atomic_t *v)
  40. {
  41. unsigned long flags;
  42. int retval;
  43. cris_atomic_save(v, flags);
  44. retval = (v->counter -= i);
  45. cris_atomic_restore(v, flags);
  46. return retval;
  47. }
  48. extern __inline__ int atomic_sub_and_test(int i, volatile atomic_t *v)
  49. {
  50. int retval;
  51. unsigned long flags;
  52. cris_atomic_save(v, flags);
  53. retval = (v->counter -= i) == 0;
  54. cris_atomic_restore(v, flags);
  55. return retval;
  56. }
  57. extern __inline__ void atomic_inc(volatile atomic_t *v)
  58. {
  59. unsigned long flags;
  60. cris_atomic_save(v, flags);
  61. (v->counter)++;
  62. cris_atomic_restore(v, flags);
  63. }
  64. extern __inline__ void atomic_dec(volatile atomic_t *v)
  65. {
  66. unsigned long flags;
  67. cris_atomic_save(v, flags);
  68. (v->counter)--;
  69. cris_atomic_restore(v, flags);
  70. }
  71. extern __inline__ int atomic_inc_return(volatile atomic_t *v)
  72. {
  73. unsigned long flags;
  74. int retval;
  75. cris_atomic_save(v, flags);
  76. retval = (v->counter)++;
  77. cris_atomic_restore(v, flags);
  78. return retval;
  79. }
  80. extern __inline__ int atomic_dec_return(volatile atomic_t *v)
  81. {
  82. unsigned long flags;
  83. int retval;
  84. cris_atomic_save(v, flags);
  85. retval = (v->counter)--;
  86. cris_atomic_restore(v, flags);
  87. return retval;
  88. }
  89. extern __inline__ int atomic_dec_and_test(volatile atomic_t *v)
  90. {
  91. int retval;
  92. unsigned long flags;
  93. cris_atomic_save(v, flags);
  94. retval = --(v->counter) == 0;
  95. cris_atomic_restore(v, flags);
  96. return retval;
  97. }
  98. extern __inline__ int atomic_inc_and_test(volatile atomic_t *v)
  99. {
  100. int retval;
  101. unsigned long flags;
  102. cris_atomic_save(v, flags);
  103. retval = ++(v->counter) == 0;
  104. cris_atomic_restore(v, flags);
  105. return retval;
  106. }
  107. /* Atomic operations are already serializing */
  108. #define smp_mb__before_atomic_dec() barrier()
  109. #define smp_mb__after_atomic_dec() barrier()
  110. #define smp_mb__before_atomic_inc() barrier()
  111. #define smp_mb__after_atomic_inc() barrier()
  112. #endif