percpu.h 2.5 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697
  1. #ifndef __ALPHA_PERCPU_H
  2. #define __ALPHA_PERCPU_H
  3. #include <linux/compiler.h>
  4. #include <linux/threads.h>
  5. #include <linux/percpu-defs.h>
  6. /*
  7. * Determine the real variable name from the name visible in the
  8. * kernel sources.
  9. */
  10. #define per_cpu_var(var) per_cpu__##var
  11. #ifdef CONFIG_SMP
  12. /*
  13. * per_cpu_offset() is the offset that has to be added to a
  14. * percpu variable to get to the instance for a certain processor.
  15. */
  16. extern unsigned long __per_cpu_offset[NR_CPUS];
  17. #define per_cpu_offset(x) (__per_cpu_offset[x])
  18. #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
  19. #ifdef CONFIG_DEBUG_PREEMPT
  20. #define my_cpu_offset per_cpu_offset(smp_processor_id())
  21. #else
  22. #define my_cpu_offset __my_cpu_offset
  23. #endif
  24. #ifndef MODULE
  25. #define SHIFT_PERCPU_PTR(var, offset) RELOC_HIDE(&per_cpu_var(var), (offset))
  26. #else
  27. /*
  28. * To calculate addresses of locally defined variables, GCC uses 32-bit
  29. * displacement from the GP. Which doesn't work for per cpu variables in
  30. * modules, as an offset to the kernel per cpu area is way above 4G.
  31. *
  32. * This forces allocation of a GOT entry for per cpu variable using
  33. * ldq instruction with a 'literal' relocation.
  34. */
  35. #define SHIFT_PERCPU_PTR(var, offset) ({ \
  36. extern int simple_identifier_##var(void); \
  37. unsigned long __ptr, tmp_gp; \
  38. asm ( "br %1, 1f \n\
  39. 1: ldgp %1, 0(%1) \n\
  40. ldq %0, per_cpu__" #var"(%1)\t!literal" \
  41. : "=&r"(__ptr), "=&r"(tmp_gp)); \
  42. (typeof(&per_cpu_var(var)))(__ptr + (offset)); })
  43. #endif /* MODULE */
  44. /*
  45. * A percpu variable may point to a discarded regions. The following are
  46. * established ways to produce a usable pointer from the percpu variable
  47. * offset.
  48. */
  49. #define per_cpu(var, cpu) \
  50. (*SHIFT_PERCPU_PTR(var, per_cpu_offset(cpu)))
  51. #define __get_cpu_var(var) \
  52. (*SHIFT_PERCPU_PTR(var, my_cpu_offset))
  53. #define __raw_get_cpu_var(var) \
  54. (*SHIFT_PERCPU_PTR(var, __my_cpu_offset))
  55. #else /* ! SMP */
  56. #define per_cpu(var, cpu) (*((void)(cpu), &per_cpu_var(var)))
  57. #define __get_cpu_var(var) per_cpu_var(var)
  58. #define __raw_get_cpu_var(var) per_cpu_var(var)
  59. #endif /* SMP */
  60. #ifdef CONFIG_SMP
  61. #define PER_CPU_BASE_SECTION ".data.percpu"
  62. #else
  63. #define PER_CPU_BASE_SECTION ".data"
  64. #endif
  65. #ifdef CONFIG_SMP
  66. #ifdef MODULE
  67. #define PER_CPU_SHARED_ALIGNED_SECTION ""
  68. #else
  69. #define PER_CPU_SHARED_ALIGNED_SECTION ".shared_aligned"
  70. #endif
  71. #define PER_CPU_FIRST_SECTION ".first"
  72. #else
  73. #define PER_CPU_SHARED_ALIGNED_SECTION ""
  74. #define PER_CPU_FIRST_SECTION ""
  75. #endif
  76. #define PER_CPU_ATTRIBUTES
  77. #endif /* __ALPHA_PERCPU_H */