atomic.h 3.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124
  1. /*
  2. * linux/include/asm-arm26/atomic.h
  3. *
  4. * Copyright (c) 1996 Russell King.
  5. * Modified for arm26 by Ian Molton
  6. *
  7. * This program is free software; you can redistribute it and/or modify
  8. * it under the terms of the GNU General Public License version 2 as
  9. * published by the Free Software Foundation.
  10. *
  11. * Changelog:
  12. * 25-11-2004 IM Updated for 2.6.9
  13. * 27-06-1996 RMK Created
  14. * 13-04-1997 RMK Made functions atomic!
  15. * 07-12-1997 RMK Upgraded for v2.1.
  16. * 26-08-1998 PJB Added #ifdef __KERNEL__
  17. *
  18. * FIXME - its probably worth seeing what these compile into...
  19. */
  20. #ifndef __ASM_ARM_ATOMIC_H
  21. #define __ASM_ARM_ATOMIC_H
  22. #ifdef CONFIG_SMP
  23. #error SMP is NOT supported
  24. #endif
  25. typedef struct { volatile int counter; } atomic_t;
  26. #define ATOMIC_INIT(i) { (i) }
  27. #ifdef __KERNEL__
  28. #include <asm/system.h>
  29. #define atomic_read(v) ((v)->counter)
  30. #define atomic_set(v,i) (((v)->counter) = (i))
  31. static inline int atomic_add_return(int i, atomic_t *v)
  32. {
  33. unsigned long flags;
  34. int val;
  35. local_irq_save(flags);
  36. val = v->counter;
  37. v->counter = val += i;
  38. local_irq_restore(flags);
  39. return val;
  40. }
  41. static inline int atomic_sub_return(int i, atomic_t *v)
  42. {
  43. unsigned long flags;
  44. int val;
  45. local_irq_save(flags);
  46. val = v->counter;
  47. v->counter = val -= i;
  48. local_irq_restore(flags);
  49. return val;
  50. }
  51. static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
  52. {
  53. int ret;
  54. unsigned long flags;
  55. local_irq_save(flags);
  56. ret = v->counter;
  57. if (likely(ret == old))
  58. v->counter = new;
  59. local_irq_restore(flags);
  60. return ret;
  61. }
  62. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  63. static inline int atomic_add_unless(atomic_t *v, int a, int u)
  64. {
  65. int ret;
  66. unsigned long flags;
  67. local_irq_save(flags);
  68. ret = v->counter;
  69. if (ret != u)
  70. v->counter += a;
  71. local_irq_restore(flags);
  72. return ret != u;
  73. }
  74. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  75. static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
  76. {
  77. unsigned long flags;
  78. local_irq_save(flags);
  79. *addr &= ~mask;
  80. local_irq_restore(flags);
  81. }
  82. #define atomic_add(i, v) (void) atomic_add_return(i, v)
  83. #define atomic_inc(v) (void) atomic_add_return(1, v)
  84. #define atomic_sub(i, v) (void) atomic_sub_return(i, v)
  85. #define atomic_dec(v) (void) atomic_sub_return(1, v)
  86. #define atomic_inc_and_test(v) (atomic_add_return(1, v) == 0)
  87. #define atomic_dec_and_test(v) (atomic_sub_return(1, v) == 0)
  88. #define atomic_inc_return(v) (atomic_add_return(1, v))
  89. #define atomic_dec_return(v) (atomic_sub_return(1, v))
  90. #define atomic_add_negative(i,v) (atomic_add_return(i, v) < 0)
  91. /* Atomic operations are already serializing on ARM26 */
  92. #define smp_mb__before_atomic_dec() barrier()
  93. #define smp_mb__after_atomic_dec() barrier()
  94. #define smp_mb__before_atomic_inc() barrier()
  95. #define smp_mb__after_atomic_inc() barrier()
  96. #include <asm-generic/atomic.h>
  97. #endif
  98. #endif