msr_32.h 3.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161
  1. #ifndef __ASM_MSR_H
  2. #define __ASM_MSR_H
  3. #include <asm/msr-index.h>
  4. #ifdef __KERNEL__
  5. #ifndef __ASSEMBLY__
  6. #include <asm/errno.h>
  7. static inline unsigned long long native_read_msr(unsigned int msr)
  8. {
  9. unsigned long long val;
  10. asm volatile("rdmsr" : "=A" (val) : "c" (msr));
  11. return val;
  12. }
  13. static inline unsigned long long native_read_msr_safe(unsigned int msr,
  14. int *err)
  15. {
  16. unsigned long long val;
  17. asm volatile("2: rdmsr ; xorl %0,%0\n"
  18. "1:\n\t"
  19. ".section .fixup,\"ax\"\n\t"
  20. "3: movl %3,%0 ; jmp 1b\n\t"
  21. ".previous\n\t"
  22. ".section __ex_table,\"a\"\n"
  23. " .align 4\n\t"
  24. " .long 2b,3b\n\t"
  25. ".previous"
  26. : "=r" (*err), "=A" (val)
  27. : "c" (msr), "i" (-EFAULT));
  28. return val;
  29. }
  30. static inline void native_write_msr(unsigned int msr, unsigned long long val)
  31. {
  32. asm volatile("wrmsr" : : "c" (msr), "A"(val));
  33. }
  34. static inline int native_write_msr_safe(unsigned int msr,
  35. unsigned long long val)
  36. {
  37. int err;
  38. asm volatile("2: wrmsr ; xorl %0,%0\n"
  39. "1:\n\t"
  40. ".section .fixup,\"ax\"\n\t"
  41. "3: movl %4,%0 ; jmp 1b\n\t"
  42. ".previous\n\t"
  43. ".section __ex_table,\"a\"\n"
  44. " .align 4\n\t"
  45. " .long 2b,3b\n\t"
  46. ".previous"
  47. : "=a" (err)
  48. : "c" (msr), "0" ((u32)val), "d" ((u32)(val>>32)),
  49. "i" (-EFAULT));
  50. return err;
  51. }
  52. static inline unsigned long long native_read_tsc(void)
  53. {
  54. unsigned long long val;
  55. asm volatile("rdtsc" : "=A" (val));
  56. return val;
  57. }
  58. static inline unsigned long long native_read_pmc(void)
  59. {
  60. unsigned long long val;
  61. asm volatile("rdpmc" : "=A" (val));
  62. return val;
  63. }
  64. #ifdef CONFIG_PARAVIRT
  65. #include <asm/paravirt.h>
  66. #else
  67. #include <linux/errno.h>
  68. /*
  69. * Access to machine-specific registers (available on 586 and better only)
  70. * Note: the rd* operations modify the parameters directly (without using
  71. * pointer indirection), this allows gcc to optimize better
  72. */
  73. #define rdmsr(msr,val1,val2) \
  74. do { \
  75. u64 __val = native_read_msr(msr); \
  76. (val1) = (u32)__val; \
  77. (val2) = (u32)(__val >> 32); \
  78. } while(0)
  79. static inline void wrmsr(u32 __msr, u32 __low, u32 __high)
  80. {
  81. native_write_msr(__msr, ((u64)__high << 32) | __low);
  82. }
  83. #define rdmsrl(msr,val) \
  84. ((val) = native_read_msr(msr))
  85. #define wrmsrl(msr,val) native_write_msr(msr, val)
  86. /* wrmsr with exception handling */
  87. static inline int wrmsr_safe(u32 __msr, u32 __low, u32 __high)
  88. {
  89. return native_write_msr_safe(__msr, ((u64)__high << 32) | __low);
  90. }
  91. /* rdmsr with exception handling */
  92. #define rdmsr_safe(msr,p1,p2) \
  93. ({ \
  94. int __err; \
  95. u64 __val = native_read_msr_safe(msr, &__err); \
  96. (*p1) = (u32)__val; \
  97. (*p2) = (u32)(__val >> 32); \
  98. __err; \
  99. })
  100. #define rdtscl(low) \
  101. ((low) = (u32)native_read_tsc())
  102. #define rdtscll(val) \
  103. ((val) = native_read_tsc())
  104. #define write_tsc(val1,val2) wrmsr(0x10, val1, val2)
  105. #define rdpmc(counter,low,high) \
  106. do { \
  107. u64 _l = native_read_pmc(); \
  108. (low) = (u32)_l; \
  109. (high) = (u32)(_l >> 32); \
  110. } while(0)
  111. #endif /* !CONFIG_PARAVIRT */
  112. #ifdef CONFIG_SMP
  113. void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h);
  114. void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h);
  115. int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h);
  116. int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h);
  117. #else /* CONFIG_SMP */
  118. static inline void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h)
  119. {
  120. rdmsr(msr_no, *l, *h);
  121. }
  122. static inline void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h)
  123. {
  124. wrmsr(msr_no, l, h);
  125. }
  126. static inline int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h)
  127. {
  128. return rdmsr_safe(msr_no, l, h);
  129. }
  130. static inline int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h)
  131. {
  132. return wrmsr_safe(msr_no, l, h);
  133. }
  134. #endif /* CONFIG_SMP */
  135. #endif
  136. #endif
  137. #endif /* __ASM_MSR_H */