percpu.h 2.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102
  1. #ifndef __ALPHA_PERCPU_H
  2. #define __ALPHA_PERCPU_H
  3. #include <linux/compiler.h>
  4. #include <linux/threads.h>
  5. #include <linux/percpu-defs.h>
  6. /*
  7. * Determine the real variable name from the name visible in the
  8. * kernel sources.
  9. */
  10. #define per_cpu_var(var) per_cpu__##var
  11. #ifdef CONFIG_SMP
  12. /*
  13. * per_cpu_offset() is the offset that has to be added to a
  14. * percpu variable to get to the instance for a certain processor.
  15. */
  16. extern unsigned long __per_cpu_offset[NR_CPUS];
  17. #define per_cpu_offset(x) (__per_cpu_offset[x])
  18. #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
  19. #ifdef CONFIG_DEBUG_PREEMPT
  20. #define my_cpu_offset per_cpu_offset(smp_processor_id())
  21. #else
  22. #define my_cpu_offset __my_cpu_offset
  23. #endif
  24. #ifndef MODULE
  25. #define SHIFT_PERCPU_PTR(var, offset) RELOC_HIDE(&per_cpu_var(var), (offset))
  26. #define PER_CPU_ATTRIBUTES
  27. #else
  28. /*
  29. * To calculate addresses of locally defined variables, GCC uses 32-bit
  30. * displacement from the GP. Which doesn't work for per cpu variables in
  31. * modules, as an offset to the kernel per cpu area is way above 4G.
  32. *
  33. * This forces allocation of a GOT entry for per cpu variable using
  34. * ldq instruction with a 'literal' relocation.
  35. */
  36. #define SHIFT_PERCPU_PTR(var, offset) ({ \
  37. extern int simple_identifier_##var(void); \
  38. unsigned long __ptr, tmp_gp; \
  39. asm ( "br %1, 1f \n\
  40. 1: ldgp %1, 0(%1) \n\
  41. ldq %0, per_cpu__" #var"(%1)\t!literal" \
  42. : "=&r"(__ptr), "=&r"(tmp_gp)); \
  43. (typeof(&per_cpu_var(var)))(__ptr + (offset)); })
  44. #define PER_CPU_ATTRIBUTES __used
  45. #endif /* MODULE */
  46. /*
  47. * A percpu variable may point to a discarded regions. The following are
  48. * established ways to produce a usable pointer from the percpu variable
  49. * offset.
  50. */
  51. #define per_cpu(var, cpu) \
  52. (*SHIFT_PERCPU_PTR(var, per_cpu_offset(cpu)))
  53. #define __get_cpu_var(var) \
  54. (*SHIFT_PERCPU_PTR(var, my_cpu_offset))
  55. #define __raw_get_cpu_var(var) \
  56. (*SHIFT_PERCPU_PTR(var, __my_cpu_offset))
  57. #else /* ! SMP */
  58. #define per_cpu(var, cpu) (*((void)(cpu), &per_cpu_var(var)))
  59. #define __get_cpu_var(var) per_cpu_var(var)
  60. #define __raw_get_cpu_var(var) per_cpu_var(var)
  61. #define PER_CPU_ATTRIBUTES
  62. #endif /* SMP */
  63. #ifdef CONFIG_SMP
  64. #define PER_CPU_BASE_SECTION ".data.percpu"
  65. #else
  66. #define PER_CPU_BASE_SECTION ".data"
  67. #endif
  68. #ifdef CONFIG_SMP
  69. #ifdef MODULE
  70. #define PER_CPU_SHARED_ALIGNED_SECTION ""
  71. #else
  72. #define PER_CPU_SHARED_ALIGNED_SECTION ".shared_aligned"
  73. #endif
  74. #define PER_CPU_FIRST_SECTION ".first"
  75. #else
  76. #define PER_CPU_SHARED_ALIGNED_SECTION ""
  77. #define PER_CPU_FIRST_SECTION ""
  78. #endif
  79. #define PER_CPU_ATTRIBUTES
  80. #endif /* __ALPHA_PERCPU_H */