atomic.h 3.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125
  1. /*
  2. * linux/include/asm-arm26/atomic.h
  3. *
  4. * Copyright (c) 1996 Russell King.
  5. * Modified for arm26 by Ian Molton
  6. *
  7. * This program is free software; you can redistribute it and/or modify
  8. * it under the terms of the GNU General Public License version 2 as
  9. * published by the Free Software Foundation.
  10. *
  11. * Changelog:
  12. * 25-11-2004 IM Updated for 2.6.9
  13. * 27-06-1996 RMK Created
  14. * 13-04-1997 RMK Made functions atomic!
  15. * 07-12-1997 RMK Upgraded for v2.1.
  16. * 26-08-1998 PJB Added #ifdef __KERNEL__
  17. *
  18. * FIXME - its probably worth seeing what these compile into...
  19. */
  20. #ifndef __ASM_ARM_ATOMIC_H
  21. #define __ASM_ARM_ATOMIC_H
  22. #include <linux/config.h>
  23. #ifdef CONFIG_SMP
  24. #error SMP is NOT supported
  25. #endif
  26. typedef struct { volatile int counter; } atomic_t;
  27. #define ATOMIC_INIT(i) { (i) }
  28. #ifdef __KERNEL__
  29. #include <asm/system.h>
  30. #define atomic_read(v) ((v)->counter)
  31. #define atomic_set(v,i) (((v)->counter) = (i))
  32. static inline int atomic_add_return(int i, atomic_t *v)
  33. {
  34. unsigned long flags;
  35. int val;
  36. local_irq_save(flags);
  37. val = v->counter;
  38. v->counter = val += i;
  39. local_irq_restore(flags);
  40. return val;
  41. }
  42. static inline int atomic_sub_return(int i, atomic_t *v)
  43. {
  44. unsigned long flags;
  45. int val;
  46. local_irq_save(flags);
  47. val = v->counter;
  48. v->counter = val -= i;
  49. local_irq_restore(flags);
  50. return val;
  51. }
  52. static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
  53. {
  54. int ret;
  55. unsigned long flags;
  56. local_irq_save(flags);
  57. ret = v->counter;
  58. if (likely(ret == old))
  59. v->counter = new;
  60. local_irq_restore(flags);
  61. return ret;
  62. }
  63. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  64. static inline int atomic_add_unless(atomic_t *v, int a, int u)
  65. {
  66. int ret;
  67. unsigned long flags;
  68. local_irq_save(flags);
  69. ret = v->counter;
  70. if (ret != u)
  71. v->counter += a;
  72. local_irq_restore(flags);
  73. return ret != u;
  74. }
  75. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  76. static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
  77. {
  78. unsigned long flags;
  79. local_irq_save(flags);
  80. *addr &= ~mask;
  81. local_irq_restore(flags);
  82. }
  83. #define atomic_add(i, v) (void) atomic_add_return(i, v)
  84. #define atomic_inc(v) (void) atomic_add_return(1, v)
  85. #define atomic_sub(i, v) (void) atomic_sub_return(i, v)
  86. #define atomic_dec(v) (void) atomic_sub_return(1, v)
  87. #define atomic_inc_and_test(v) (atomic_add_return(1, v) == 0)
  88. #define atomic_dec_and_test(v) (atomic_sub_return(1, v) == 0)
  89. #define atomic_inc_return(v) (atomic_add_return(1, v))
  90. #define atomic_dec_return(v) (atomic_sub_return(1, v))
  91. #define atomic_add_negative(i,v) (atomic_add_return(i, v) < 0)
  92. /* Atomic operations are already serializing on ARM26 */
  93. #define smp_mb__before_atomic_dec() barrier()
  94. #define smp_mb__after_atomic_dec() barrier()
  95. #define smp_mb__before_atomic_inc() barrier()
  96. #define smp_mb__after_atomic_inc() barrier()
  97. #include <asm-generic/atomic.h>
  98. #endif
  99. #endif