percpu.h 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121
  1. #ifndef _ASM_GENERIC_PERCPU_H_
  2. #define _ASM_GENERIC_PERCPU_H_
  3. #include <linux/compiler.h>
  4. #include <linux/threads.h>
  5. #include <linux/percpu-defs.h>
  6. #ifdef CONFIG_SMP
  7. /*
  8. * per_cpu_offset() is the offset that has to be added to a
  9. * percpu variable to get to the instance for a certain processor.
  10. *
  11. * Most arches use the __per_cpu_offset array for those offsets but
  12. * some arches have their own ways of determining the offset (x86_64, s390).
  13. */
  14. #ifndef __per_cpu_offset
  15. extern unsigned long __per_cpu_offset[NR_CPUS];
  16. #define per_cpu_offset(x) (__per_cpu_offset[x])
  17. #endif
  18. /*
  19. * Determine the offset for the currently active processor.
  20. * An arch may define __my_cpu_offset to provide a more effective
  21. * means of obtaining the offset to the per cpu variables of the
  22. * current processor.
  23. */
  24. #ifndef __my_cpu_offset
  25. #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
  26. #endif
  27. #ifdef CONFIG_DEBUG_PREEMPT
  28. #define my_cpu_offset per_cpu_offset(smp_processor_id())
  29. #else
  30. #define my_cpu_offset __my_cpu_offset
  31. #endif
  32. /*
  33. * Add a offset to a pointer but keep the pointer as is.
  34. *
  35. * Only S390 provides its own means of moving the pointer.
  36. */
  37. #ifndef SHIFT_PERCPU_PTR
  38. /* Weird cast keeps both GCC and sparse happy. */
  39. #define SHIFT_PERCPU_PTR(__p, __offset) ({ \
  40. __verify_pcpu_ptr((__p)); \
  41. RELOC_HIDE((typeof(*(__p)) __kernel __force *)(__p), (__offset)); \
  42. })
  43. #endif
  44. /*
  45. * A percpu variable may point to a discarded regions. The following are
  46. * established ways to produce a usable pointer from the percpu variable
  47. * offset.
  48. */
  49. #define per_cpu(var, cpu) \
  50. (*SHIFT_PERCPU_PTR(&(var), per_cpu_offset(cpu)))
  51. #define __get_cpu_var(var) \
  52. (*SHIFT_PERCPU_PTR(&(var), my_cpu_offset))
  53. #define __raw_get_cpu_var(var) \
  54. (*SHIFT_PERCPU_PTR(&(var), __my_cpu_offset))
  55. #define this_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, my_cpu_offset)
  56. #define __this_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
  57. #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
  58. extern void setup_per_cpu_areas(void);
  59. #endif
  60. #else /* ! SMP */
  61. #define VERIFY_PERCPU_PTR(__p) ({ \
  62. __verify_pcpu_ptr((__p)); \
  63. (typeof(*(__p)) __kernel __force *)(__p); \
  64. })
  65. #define per_cpu(var, cpu) (*((void)(cpu), VERIFY_PERCPU_PTR(&(var))))
  66. #define __get_cpu_var(var) (*VERIFY_PERCPU_PTR(&(var)))
  67. #define __raw_get_cpu_var(var) (*VERIFY_PERCPU_PTR(&(var)))
  68. #define this_cpu_ptr(ptr) per_cpu_ptr(ptr, 0)
  69. #define __this_cpu_ptr(ptr) this_cpu_ptr(ptr)
  70. #endif /* SMP */
  71. #ifndef PER_CPU_BASE_SECTION
  72. #ifdef CONFIG_SMP
  73. #define PER_CPU_BASE_SECTION ".data..percpu"
  74. #else
  75. #define PER_CPU_BASE_SECTION ".data"
  76. #endif
  77. #endif
  78. #ifdef CONFIG_SMP
  79. #ifdef MODULE
  80. #define PER_CPU_SHARED_ALIGNED_SECTION ""
  81. #define PER_CPU_ALIGNED_SECTION ""
  82. #else
  83. #define PER_CPU_SHARED_ALIGNED_SECTION "..shared_aligned"
  84. #define PER_CPU_ALIGNED_SECTION "..shared_aligned"
  85. #endif
  86. #define PER_CPU_FIRST_SECTION "..first"
  87. #else
  88. #define PER_CPU_SHARED_ALIGNED_SECTION ""
  89. #define PER_CPU_ALIGNED_SECTION "..shared_aligned"
  90. #define PER_CPU_FIRST_SECTION ""
  91. #endif
  92. #ifndef PER_CPU_ATTRIBUTES
  93. #define PER_CPU_ATTRIBUTES
  94. #endif
  95. #ifndef PER_CPU_DEF_ATTRIBUTES
  96. #define PER_CPU_DEF_ATTRIBUTES
  97. #endif
  98. #endif /* _ASM_GENERIC_PERCPU_H_ */