atomic.h 2.0 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889
  1. #ifndef __ASM_SH_ATOMIC_H
  2. #define __ASM_SH_ATOMIC_H
  3. /*
  4. * Atomic operations that C can't guarantee us. Useful for
  5. * resource counting etc..
  6. *
  7. */
  8. typedef struct { volatile int counter; } atomic_t;
  9. #define ATOMIC_INIT(i) ( (atomic_t) { (i) } )
  10. #define atomic_read(v) ((v)->counter)
  11. #define atomic_set(v,i) ((v)->counter = (i))
  12. #include <linux/compiler.h>
  13. #include <asm/system.h>
  14. #if defined(CONFIG_GUSA_RB)
  15. #include <asm/atomic-grb.h>
  16. #elif defined(CONFIG_CPU_SH4A)
  17. #include <asm/atomic-llsc.h>
  18. #else
  19. #include <asm/atomic-irq.h>
  20. #endif
  21. #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
  22. #define atomic_dec_return(v) atomic_sub_return(1,(v))
  23. #define atomic_inc_return(v) atomic_add_return(1,(v))
  24. /*
  25. * atomic_inc_and_test - increment and test
  26. * @v: pointer of type atomic_t
  27. *
  28. * Atomically increments @v by 1
  29. * and returns true if the result is zero, or false for all
  30. * other cases.
  31. */
  32. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  33. #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
  34. #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
  35. #define atomic_inc(v) atomic_add(1,(v))
  36. #define atomic_dec(v) atomic_sub(1,(v))
  37. #ifndef CONFIG_GUSA_RB
  38. static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
  39. {
  40. int ret;
  41. unsigned long flags;
  42. local_irq_save(flags);
  43. ret = v->counter;
  44. if (likely(ret == old))
  45. v->counter = new;
  46. local_irq_restore(flags);
  47. return ret;
  48. }
  49. static inline int atomic_add_unless(atomic_t *v, int a, int u)
  50. {
  51. int ret;
  52. unsigned long flags;
  53. local_irq_save(flags);
  54. ret = v->counter;
  55. if (ret != u)
  56. v->counter += a;
  57. local_irq_restore(flags);
  58. return ret != u;
  59. }
  60. #endif
  61. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  62. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  63. /* Atomic operations are already serializing on SH */
  64. #define smp_mb__before_atomic_dec() barrier()
  65. #define smp_mb__after_atomic_dec() barrier()
  66. #define smp_mb__before_atomic_inc() barrier()
  67. #define smp_mb__after_atomic_inc() barrier()
  68. #include <asm-generic/atomic.h>
  69. #endif /* __ASM_SH_ATOMIC_H */