atomic.h 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147
  1. #ifndef __ASM_SH_ATOMIC_H
  2. #define __ASM_SH_ATOMIC_H
  3. /*
  4. * Atomic operations that C can't guarantee us. Useful for
  5. * resource counting etc..
  6. *
  7. */
  8. typedef struct { volatile int counter; } atomic_t;
  9. #define ATOMIC_INIT(i) ( (atomic_t) { (i) } )
  10. #define atomic_read(v) ((v)->counter)
  11. #define atomic_set(v,i) ((v)->counter = (i))
  12. #include <linux/compiler.h>
  13. #include <asm/system.h>
  14. /*
  15. * To get proper branch prediction for the main line, we must branch
  16. * forward to code at the end of this object's .text section, then
  17. * branch back to restart the operation.
  18. */
  19. static __inline__ void atomic_add(int i, atomic_t * v)
  20. {
  21. unsigned long flags;
  22. local_irq_save(flags);
  23. *(long *)v += i;
  24. local_irq_restore(flags);
  25. }
  26. static __inline__ void atomic_sub(int i, atomic_t *v)
  27. {
  28. unsigned long flags;
  29. local_irq_save(flags);
  30. *(long *)v -= i;
  31. local_irq_restore(flags);
  32. }
  33. static __inline__ int atomic_add_return(int i, atomic_t * v)
  34. {
  35. unsigned long temp, flags;
  36. local_irq_save(flags);
  37. temp = *(long *)v;
  38. temp += i;
  39. *(long *)v = temp;
  40. local_irq_restore(flags);
  41. return temp;
  42. }
  43. #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
  44. static __inline__ int atomic_sub_return(int i, atomic_t * v)
  45. {
  46. unsigned long temp, flags;
  47. local_irq_save(flags);
  48. temp = *(long *)v;
  49. temp -= i;
  50. *(long *)v = temp;
  51. local_irq_restore(flags);
  52. return temp;
  53. }
  54. #define atomic_dec_return(v) atomic_sub_return(1,(v))
  55. #define atomic_inc_return(v) atomic_add_return(1,(v))
  56. /*
  57. * atomic_inc_and_test - increment and test
  58. * @v: pointer of type atomic_t
  59. *
  60. * Atomically increments @v by 1
  61. * and returns true if the result is zero, or false for all
  62. * other cases.
  63. */
  64. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  65. #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
  66. #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
  67. #define atomic_inc(v) atomic_add(1,(v))
  68. #define atomic_dec(v) atomic_sub(1,(v))
  69. static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
  70. {
  71. int ret;
  72. unsigned long flags;
  73. local_irq_save(flags);
  74. ret = v->counter;
  75. if (likely(ret == old))
  76. v->counter = new;
  77. local_irq_restore(flags);
  78. return ret;
  79. }
  80. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  81. static inline int atomic_add_unless(atomic_t *v, int a, int u)
  82. {
  83. int ret;
  84. unsigned long flags;
  85. local_irq_save(flags);
  86. ret = v->counter;
  87. if (ret != u)
  88. v->counter += a;
  89. local_irq_restore(flags);
  90. return ret != u;
  91. }
  92. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  93. static __inline__ void atomic_clear_mask(unsigned int mask, atomic_t *v)
  94. {
  95. unsigned long flags;
  96. local_irq_save(flags);
  97. *(long *)v &= ~mask;
  98. local_irq_restore(flags);
  99. }
  100. static __inline__ void atomic_set_mask(unsigned int mask, atomic_t *v)
  101. {
  102. unsigned long flags;
  103. local_irq_save(flags);
  104. *(long *)v |= mask;
  105. local_irq_restore(flags);
  106. }
  107. /* Atomic operations are already serializing on SH */
  108. #define smp_mb__before_atomic_dec() barrier()
  109. #define smp_mb__after_atomic_dec() barrier()
  110. #define smp_mb__before_atomic_inc() barrier()
  111. #define smp_mb__after_atomic_inc() barrier()
  112. #include <asm-generic/atomic.h>
  113. #endif /* __ASM_SH_ATOMIC_H */