asm-compat.h 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118
  1. #ifndef _ASM_POWERPC_ASM_COMPAT_H
  2. #define _ASM_POWERPC_ASM_COMPAT_H
  3. #include <asm/types.h>
  4. #ifdef __ASSEMBLY__
  5. # define stringify_in_c(...) __VA_ARGS__
  6. # define ASM_CONST(x) x
  7. #else
  8. /* This version of stringify will deal with commas... */
  9. # define __stringify_in_c(...) #__VA_ARGS__
  10. # define stringify_in_c(...) __stringify_in_c(__VA_ARGS__) " "
  11. # define __ASM_CONST(x) x##UL
  12. # define ASM_CONST(x) __ASM_CONST(x)
  13. #endif
  14. /*
  15. * Feature section common macros
  16. *
  17. * Note that the entries now contain offsets between the table entry
  18. * and the code rather than absolute code pointers in order to be
  19. * useable with the vdso shared library. There is also an assumption
  20. * that values will be negative, that is, the fixup table has to be
  21. * located after the code it fixes up.
  22. */
  23. #ifdef CONFIG_PPC64
  24. #ifdef __powerpc64__
  25. /* 64 bits kernel, 64 bits code */
  26. #define MAKE_FTR_SECTION_ENTRY(msk, val, label, sect) \
  27. 99: \
  28. .section sect,"a"; \
  29. .align 3; \
  30. 98: \
  31. .llong msk; \
  32. .llong val; \
  33. .llong label##b-98b; \
  34. .llong 99b-98b; \
  35. .previous
  36. #else /* __powerpc64__ */
  37. /* 64 bits kernel, 32 bits code (ie. vdso32) */
  38. #define MAKE_FTR_SECTION_ENTRY(msk, val, label, sect) \
  39. 99: \
  40. .section sect,"a"; \
  41. .align 3; \
  42. 98: \
  43. .llong msk; \
  44. .llong val; \
  45. .long 0xffffffff; \
  46. .long label##b-98b; \
  47. .long 0xffffffff; \
  48. .long 99b-98b; \
  49. .previous
  50. #endif /* !__powerpc64__ */
  51. #else /* CONFIG_PPC64 */
  52. /* 32 bits kernel, 32 bits code */
  53. #define MAKE_FTR_SECTION_ENTRY(msk, val, label, sect) \
  54. 99: \
  55. .section sect,"a"; \
  56. .align 2; \
  57. 98: \
  58. .long msk; \
  59. .long val; \
  60. .long label##b-98b; \
  61. .long 99b-98b; \
  62. .previous
  63. #endif /* !CONFIG_PPC64 */
  64. #ifdef __powerpc64__
  65. /* operations for longs and pointers */
  66. #define PPC_LL stringify_in_c(ld)
  67. #define PPC_STL stringify_in_c(std)
  68. #define PPC_LCMPI stringify_in_c(cmpdi)
  69. #define PPC_LONG stringify_in_c(.llong)
  70. #define PPC_TLNEI stringify_in_c(tdnei)
  71. #define PPC_LLARX stringify_in_c(ldarx)
  72. #define PPC_STLCX stringify_in_c(stdcx.)
  73. #define PPC_CNTLZL stringify_in_c(cntlzd)
  74. /* Move to CR, single-entry optimized version. Only available
  75. * on POWER4 and later.
  76. */
  77. #ifdef CONFIG_POWER4_ONLY
  78. #define PPC_MTOCRF stringify_in_c(mtocrf)
  79. #else
  80. #define PPC_MTOCRF stringify_in_c(mtcrf)
  81. #endif
  82. #else /* 32-bit */
  83. /* operations for longs and pointers */
  84. #define PPC_LL stringify_in_c(lwz)
  85. #define PPC_STL stringify_in_c(stw)
  86. #define PPC_LCMPI stringify_in_c(cmpwi)
  87. #define PPC_LONG stringify_in_c(.long)
  88. #define PPC_TLNEI stringify_in_c(twnei)
  89. #define PPC_LLARX stringify_in_c(lwarx)
  90. #define PPC_STLCX stringify_in_c(stwcx.)
  91. #define PPC_CNTLZL stringify_in_c(cntlzw)
  92. #define PPC_MTOCRF stringify_in_c(mtcrf)
  93. #endif
  94. #ifdef __KERNEL__
  95. #ifdef CONFIG_IBM405_ERR77
  96. /* Erratum #77 on the 405 means we need a sync or dcbt before every
  97. * stwcx. The old ATOMIC_SYNC_FIX covered some but not all of this.
  98. */
  99. #define PPC405_ERR77(ra,rb) stringify_in_c(dcbt ra, rb;)
  100. #define PPC405_ERR77_SYNC stringify_in_c(sync;)
  101. #else
  102. #define PPC405_ERR77(ra,rb)
  103. #define PPC405_ERR77_SYNC
  104. #endif
  105. #endif
  106. #endif /* _ASM_POWERPC_ASM_COMPAT_H */