atomic.h 2.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131
  1. /*
  2. * include/asm-v850/atomic.h -- Atomic operations
  3. *
  4. * Copyright (C) 2001,02 NEC Corporation
  5. * Copyright (C) 2001,02 Miles Bader <miles@gnu.org>
  6. *
  7. * This file is subject to the terms and conditions of the GNU General
  8. * Public License. See the file COPYING in the main directory of this
  9. * archive for more details.
  10. *
  11. * Written by Miles Bader <miles@gnu.org>
  12. */
  13. #ifndef __V850_ATOMIC_H__
  14. #define __V850_ATOMIC_H__
  15. #include <asm/system.h>
  16. #ifdef CONFIG_SMP
  17. #error SMP not supported
  18. #endif
  19. typedef struct { int counter; } atomic_t;
  20. #define ATOMIC_INIT(i) { (i) }
  21. #ifdef __KERNEL__
  22. #define atomic_read(v) ((v)->counter)
  23. #define atomic_set(v,i) (((v)->counter) = (i))
  24. static inline int atomic_add_return (int i, volatile atomic_t *v)
  25. {
  26. unsigned long flags;
  27. int res;
  28. local_irq_save (flags);
  29. res = v->counter + i;
  30. v->counter = res;
  31. local_irq_restore (flags);
  32. return res;
  33. }
  34. static __inline__ int atomic_sub_return (int i, volatile atomic_t *v)
  35. {
  36. unsigned long flags;
  37. int res;
  38. local_irq_save (flags);
  39. res = v->counter - i;
  40. v->counter = res;
  41. local_irq_restore (flags);
  42. return res;
  43. }
  44. static __inline__ void atomic_clear_mask (unsigned long mask, unsigned long *addr)
  45. {
  46. unsigned long flags;
  47. local_irq_save (flags);
  48. *addr &= ~mask;
  49. local_irq_restore (flags);
  50. }
  51. #endif
  52. #define atomic_add(i, v) atomic_add_return ((i), (v))
  53. #define atomic_sub(i, v) atomic_sub_return ((i), (v))
  54. #define atomic_dec_return(v) atomic_sub_return (1, (v))
  55. #define atomic_inc_return(v) atomic_add_return (1, (v))
  56. #define atomic_inc(v) atomic_inc_return (v)
  57. #define atomic_dec(v) atomic_dec_return (v)
  58. /*
  59. * atomic_inc_and_test - increment and test
  60. * @v: pointer of type atomic_t
  61. *
  62. * Atomically increments @v by 1
  63. * and returns true if the result is zero, or false for all
  64. * other cases.
  65. */
  66. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  67. #define atomic_sub_and_test(i,v) (atomic_sub_return ((i), (v)) == 0)
  68. #define atomic_dec_and_test(v) (atomic_sub_return (1, (v)) == 0)
  69. #define atomic_add_negative(i,v) (atomic_add_return ((i), (v)) < 0)
  70. static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
  71. {
  72. int ret;
  73. unsigned long flags;
  74. local_irq_save(flags);
  75. ret = v->counter;
  76. if (likely(ret == old))
  77. v->counter = new;
  78. local_irq_restore(flags);
  79. return ret;
  80. }
  81. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  82. static inline int atomic_add_unless(atomic_t *v, int a, int u)
  83. {
  84. int ret;
  85. unsigned long flags;
  86. local_irq_save(flags);
  87. ret = v->counter;
  88. if (ret != u)
  89. v->counter += a;
  90. local_irq_restore(flags);
  91. return ret != u;
  92. }
  93. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  94. /* Atomic operations are already serializing on ARM */
  95. #define smp_mb__before_atomic_dec() barrier()
  96. #define smp_mb__after_atomic_dec() barrier()
  97. #define smp_mb__before_atomic_inc() barrier()
  98. #define smp_mb__after_atomic_inc() barrier()
  99. #include <asm-generic/atomic.h>
  100. #endif /* __V850_ATOMIC_H__ */