local.h 2.1 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182
  1. #ifndef _ARCH_I386_LOCAL_H
  2. #define _ARCH_I386_LOCAL_H
  3. #include <linux/percpu.h>
  4. typedef struct
  5. {
  6. volatile long counter;
  7. } local_t;
  8. #define LOCAL_INIT(i) { (i) }
  9. #define local_read(v) ((v)->counter)
  10. #define local_set(v,i) (((v)->counter) = (i))
  11. static __inline__ void local_inc(local_t *v)
  12. {
  13. __asm__ __volatile__(
  14. "incl %0"
  15. :"+m" (v->counter));
  16. }
  17. static __inline__ void local_dec(local_t *v)
  18. {
  19. __asm__ __volatile__(
  20. "decl %0"
  21. :"+m" (v->counter));
  22. }
  23. static __inline__ void local_add(long i, local_t *v)
  24. {
  25. __asm__ __volatile__(
  26. "addl %1,%0"
  27. :"+m" (v->counter)
  28. :"ir" (i));
  29. }
  30. static __inline__ void local_sub(long i, local_t *v)
  31. {
  32. __asm__ __volatile__(
  33. "subl %1,%0"
  34. :"+m" (v->counter)
  35. :"ir" (i));
  36. }
  37. /* On x86, these are no better than the atomic variants. */
  38. #define __local_inc(l) local_inc(l)
  39. #define __local_dec(l) local_dec(l)
  40. #define __local_add(i,l) local_add((i),(l))
  41. #define __local_sub(i,l) local_sub((i),(l))
  42. /* Use these for per-cpu local_t variables: on some archs they are
  43. * much more efficient than these naive implementations. Note they take
  44. * a variable, not an address.
  45. */
  46. /* Need to disable preemption for the cpu local counters otherwise we could
  47. still access a variable of a previous CPU in a non atomic way. */
  48. #define cpu_local_wrap_v(v) \
  49. ({ local_t res__; \
  50. preempt_disable(); \
  51. res__ = (v); \
  52. preempt_enable(); \
  53. res__; })
  54. #define cpu_local_wrap(v) \
  55. ({ preempt_disable(); \
  56. v; \
  57. preempt_enable(); }) \
  58. #define cpu_local_read(v) cpu_local_wrap_v(local_read(&__get_cpu_var(v)))
  59. #define cpu_local_set(v, i) cpu_local_wrap(local_set(&__get_cpu_var(v), (i)))
  60. #define cpu_local_inc(v) cpu_local_wrap(local_inc(&__get_cpu_var(v)))
  61. #define cpu_local_dec(v) cpu_local_wrap(local_dec(&__get_cpu_var(v)))
  62. #define cpu_local_add(i, v) cpu_local_wrap(local_add((i), &__get_cpu_var(v)))
  63. #define cpu_local_sub(i, v) cpu_local_wrap(local_sub((i), &__get_cpu_var(v)))
  64. #define __cpu_local_inc(v) cpu_local_inc(v)
  65. #define __cpu_local_dec(v) cpu_local_dec(v)
  66. #define __cpu_local_add(i, v) cpu_local_add((i), (v))
  67. #define __cpu_local_sub(i, v) cpu_local_sub((i), (v))
  68. #endif /* _ARCH_I386_LOCAL_H */