msr.h 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166
  1. #ifndef __ASM_MSR_H
  2. #define __ASM_MSR_H
  3. #include <asm/msr-index.h>
  4. #ifdef __KERNEL__
  5. #ifndef __ASSEMBLY__
  6. #include <asm/errno.h>
  7. static inline unsigned long long native_read_msr(unsigned int msr)
  8. {
  9. unsigned long long val;
  10. asm volatile("rdmsr" : "=A" (val) : "c" (msr));
  11. return val;
  12. }
  13. static inline unsigned long long native_read_msr_safe(unsigned int msr,
  14. int *err)
  15. {
  16. unsigned long long val;
  17. asm volatile("2: rdmsr ; xorl %0,%0\n"
  18. "1:\n\t"
  19. ".section .fixup,\"ax\"\n\t"
  20. "3: movl %3,%0 ; jmp 1b\n\t"
  21. ".previous\n\t"
  22. ".section __ex_table,\"a\"\n"
  23. " .align 4\n\t"
  24. " .long 2b,3b\n\t"
  25. ".previous"
  26. : "=r" (*err), "=A" (val)
  27. : "c" (msr), "i" (-EFAULT));
  28. return val;
  29. }
  30. static inline void native_write_msr(unsigned int msr, unsigned long long val)
  31. {
  32. asm volatile("wrmsr" : : "c" (msr), "A"(val));
  33. }
  34. static inline int native_write_msr_safe(unsigned int msr,
  35. unsigned long long val)
  36. {
  37. int err;
  38. asm volatile("2: wrmsr ; xorl %0,%0\n"
  39. "1:\n\t"
  40. ".section .fixup,\"ax\"\n\t"
  41. "3: movl %4,%0 ; jmp 1b\n\t"
  42. ".previous\n\t"
  43. ".section __ex_table,\"a\"\n"
  44. " .align 4\n\t"
  45. " .long 2b,3b\n\t"
  46. ".previous"
  47. : "=a" (err)
  48. : "c" (msr), "0" ((u32)val), "d" ((u32)(val>>32)),
  49. "i" (-EFAULT));
  50. return err;
  51. }
  52. static inline unsigned long long native_read_tsc(void)
  53. {
  54. unsigned long long val;
  55. asm volatile("rdtsc" : "=A" (val));
  56. return val;
  57. }
  58. static inline unsigned long long native_read_pmc(void)
  59. {
  60. unsigned long long val;
  61. asm volatile("rdpmc" : "=A" (val));
  62. return val;
  63. }
  64. #ifdef CONFIG_PARAVIRT
  65. #include <asm/paravirt.h>
  66. #else
  67. #include <linux/errno.h>
  68. /*
  69. * Access to machine-specific registers (available on 586 and better only)
  70. * Note: the rd* operations modify the parameters directly (without using
  71. * pointer indirection), this allows gcc to optimize better
  72. */
  73. #define rdmsr(msr,val1,val2) \
  74. do { \
  75. unsigned long long __val = native_read_msr(msr); \
  76. val1 = __val; \
  77. val2 = __val >> 32; \
  78. } while(0)
  79. #define wrmsr(msr,val1,val2) \
  80. native_write_msr(msr, ((unsigned long long)val2 << 32) | val1)
  81. #define rdmsrl(msr,val) \
  82. do { \
  83. (val) = native_read_msr(msr); \
  84. } while(0)
  85. static inline void wrmsrl (unsigned long msr, unsigned long long val)
  86. {
  87. unsigned long lo, hi;
  88. lo = (unsigned long) val;
  89. hi = val >> 32;
  90. wrmsr (msr, lo, hi);
  91. }
  92. /* wrmsr with exception handling */
  93. #define wrmsr_safe(msr,val1,val2) \
  94. (native_write_msr_safe(msr, ((unsigned long long)val2 << 32) | val1))
  95. /* rdmsr with exception handling */
  96. #define rdmsr_safe(msr,p1,p2) \
  97. ({ \
  98. int __err; \
  99. unsigned long long __val = native_read_msr_safe(msr, &__err);\
  100. (*p1) = __val; \
  101. (*p2) = __val >> 32; \
  102. __err; \
  103. })
  104. #define rdtscl(low) \
  105. do { \
  106. (low) = native_read_tsc(); \
  107. } while(0)
  108. #define rdtscll(val) ((val) = native_read_tsc())
  109. #define write_tsc(val1,val2) wrmsr(0x10, val1, val2)
  110. #define rdpmc(counter,low,high) \
  111. do { \
  112. u64 _l = native_read_pmc(); \
  113. low = (u32)_l; \
  114. high = _l >> 32; \
  115. } while(0)
  116. #endif /* !CONFIG_PARAVIRT */
  117. #ifdef CONFIG_SMP
  118. void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h);
  119. void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h);
  120. int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h);
  121. int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h);
  122. #else /* CONFIG_SMP */
  123. static inline void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h)
  124. {
  125. rdmsr(msr_no, *l, *h);
  126. }
  127. static inline void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h)
  128. {
  129. wrmsr(msr_no, l, h);
  130. }
  131. static inline int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h)
  132. {
  133. return rdmsr_safe(msr_no, l, h);
  134. }
  135. static inline int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h)
  136. {
  137. return wrmsr_safe(msr_no, l, h);
  138. }
  139. #endif /* CONFIG_SMP */
  140. #endif
  141. #endif
  142. #endif /* __ASM_MSR_H */