atomic.h 2.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132
  1. /*
  2. * include/asm-v850/atomic.h -- Atomic operations
  3. *
  4. * Copyright (C) 2001,02 NEC Corporation
  5. * Copyright (C) 2001,02 Miles Bader <miles@gnu.org>
  6. *
  7. * This file is subject to the terms and conditions of the GNU General
  8. * Public License. See the file COPYING in the main directory of this
  9. * archive for more details.
  10. *
  11. * Written by Miles Bader <miles@gnu.org>
  12. */
  13. #ifndef __V850_ATOMIC_H__
  14. #define __V850_ATOMIC_H__
  15. #include <linux/config.h>
  16. #include <asm/system.h>
  17. #ifdef CONFIG_SMP
  18. #error SMP not supported
  19. #endif
  20. typedef struct { int counter; } atomic_t;
  21. #define ATOMIC_INIT(i) { (i) }
  22. #ifdef __KERNEL__
  23. #define atomic_read(v) ((v)->counter)
  24. #define atomic_set(v,i) (((v)->counter) = (i))
  25. static inline int atomic_add_return (int i, volatile atomic_t *v)
  26. {
  27. unsigned long flags;
  28. int res;
  29. local_irq_save (flags);
  30. res = v->counter + i;
  31. v->counter = res;
  32. local_irq_restore (flags);
  33. return res;
  34. }
  35. static __inline__ int atomic_sub_return (int i, volatile atomic_t *v)
  36. {
  37. unsigned long flags;
  38. int res;
  39. local_irq_save (flags);
  40. res = v->counter - i;
  41. v->counter = res;
  42. local_irq_restore (flags);
  43. return res;
  44. }
  45. static __inline__ void atomic_clear_mask (unsigned long mask, unsigned long *addr)
  46. {
  47. unsigned long flags;
  48. local_irq_save (flags);
  49. *addr &= ~mask;
  50. local_irq_restore (flags);
  51. }
  52. #endif
  53. #define atomic_add(i, v) atomic_add_return ((i), (v))
  54. #define atomic_sub(i, v) atomic_sub_return ((i), (v))
  55. #define atomic_dec_return(v) atomic_sub_return (1, (v))
  56. #define atomic_inc_return(v) atomic_add_return (1, (v))
  57. #define atomic_inc(v) atomic_inc_return (v)
  58. #define atomic_dec(v) atomic_dec_return (v)
  59. /*
  60. * atomic_inc_and_test - increment and test
  61. * @v: pointer of type atomic_t
  62. *
  63. * Atomically increments @v by 1
  64. * and returns true if the result is zero, or false for all
  65. * other cases.
  66. */
  67. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  68. #define atomic_sub_and_test(i,v) (atomic_sub_return ((i), (v)) == 0)
  69. #define atomic_dec_and_test(v) (atomic_sub_return (1, (v)) == 0)
  70. #define atomic_add_negative(i,v) (atomic_add_return ((i), (v)) < 0)
  71. static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
  72. {
  73. int ret;
  74. unsigned long flags;
  75. local_irq_save(flags);
  76. ret = v->counter;
  77. if (likely(ret == old))
  78. v->counter = new;
  79. local_irq_restore(flags);
  80. return ret;
  81. }
  82. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  83. static inline int atomic_add_unless(atomic_t *v, int a, int u)
  84. {
  85. int ret;
  86. unsigned long flags;
  87. local_irq_save(flags);
  88. ret = v->counter;
  89. if (ret != u)
  90. v->counter += a;
  91. local_irq_restore(flags);
  92. return ret != u;
  93. }
  94. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  95. /* Atomic operations are already serializing on ARM */
  96. #define smp_mb__before_atomic_dec() barrier()
  97. #define smp_mb__after_atomic_dec() barrier()
  98. #define smp_mb__before_atomic_inc() barrier()
  99. #define smp_mb__after_atomic_inc() barrier()
  100. #include <asm-generic/atomic.h>
  101. #endif /* __V850_ATOMIC_H__ */