msr.h 3.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163
  1. #ifndef __ASM_MSR_H
  2. #define __ASM_MSR_H
  3. #include <asm/msr-index.h>
  4. #ifdef __KERNEL__
  5. #ifndef __ASSEMBLY__
  6. #include <asm/errno.h>
  7. static inline unsigned long long native_read_msr(unsigned int msr)
  8. {
  9. unsigned long long val;
  10. asm volatile("rdmsr" : "=A" (val) : "c" (msr));
  11. return val;
  12. }
  13. static inline unsigned long long native_read_msr_safe(unsigned int msr,
  14. int *err)
  15. {
  16. unsigned long long val;
  17. asm volatile("2: rdmsr ; xorl %0,%0\n"
  18. "1:\n\t"
  19. ".section .fixup,\"ax\"\n\t"
  20. "3: movl %3,%0 ; jmp 1b\n\t"
  21. ".previous\n\t"
  22. ".section __ex_table,\"a\"\n"
  23. " .align 4\n\t"
  24. " .long 2b,3b\n\t"
  25. ".previous"
  26. : "=r" (*err), "=A" (val)
  27. : "c" (msr), "i" (-EFAULT));
  28. return val;
  29. }
  30. static inline void native_write_msr(unsigned int msr, unsigned long long val)
  31. {
  32. asm volatile("wrmsr" : : "c" (msr), "A"(val));
  33. }
  34. static inline int native_write_msr_safe(unsigned int msr,
  35. unsigned long long val)
  36. {
  37. int err;
  38. asm volatile("2: wrmsr ; xorl %0,%0\n"
  39. "1:\n\t"
  40. ".section .fixup,\"ax\"\n\t"
  41. "3: movl %4,%0 ; jmp 1b\n\t"
  42. ".previous\n\t"
  43. ".section __ex_table,\"a\"\n"
  44. " .align 4\n\t"
  45. " .long 2b,3b\n\t"
  46. ".previous"
  47. : "=a" (err)
  48. : "c" (msr), "0" ((u32)val), "d" ((u32)(val>>32)),
  49. "i" (-EFAULT));
  50. return err;
  51. }
  52. static inline unsigned long long native_read_tsc(void)
  53. {
  54. unsigned long long val;
  55. asm volatile("rdtsc" : "=A" (val));
  56. return val;
  57. }
  58. static inline unsigned long long native_read_pmc(void)
  59. {
  60. unsigned long long val;
  61. asm volatile("rdpmc" : "=A" (val));
  62. return val;
  63. }
  64. #ifdef CONFIG_PARAVIRT
  65. #include <asm/paravirt.h>
  66. #else
  67. /*
  68. * Access to machine-specific registers (available on 586 and better only)
  69. * Note: the rd* operations modify the parameters directly (without using
  70. * pointer indirection), this allows gcc to optimize better
  71. */
  72. #define rdmsr(msr,val1,val2) \
  73. do { \
  74. unsigned long long __val = native_read_msr(msr); \
  75. val1 = __val; \
  76. val2 = __val >> 32; \
  77. } while(0)
  78. #define wrmsr(msr,val1,val2) \
  79. native_write_msr(msr, ((unsigned long long)val2 << 32) | val1)
  80. #define rdmsrl(msr,val) \
  81. do { \
  82. (val) = native_read_msr(msr); \
  83. } while(0)
  84. static inline void wrmsrl (unsigned long msr, unsigned long long val)
  85. {
  86. unsigned long lo, hi;
  87. lo = (unsigned long) val;
  88. hi = val >> 32;
  89. wrmsr (msr, lo, hi);
  90. }
  91. /* wrmsr with exception handling */
  92. #define wrmsr_safe(msr,val1,val2) \
  93. (native_write_msr_safe(msr, ((unsigned long long)val2 << 32) | val1))
  94. /* rdmsr with exception handling */
  95. #define rdmsr_safe(msr,p1,p2) \
  96. ({ \
  97. int __err; \
  98. unsigned long long __val = native_read_msr_safe(msr, &__err);\
  99. (*p1) = __val; \
  100. (*p2) = __val >> 32; \
  101. __err; \
  102. })
  103. #define rdtsc(low,high) \
  104. do { \
  105. u64 _l = native_read_tsc(); \
  106. (low) = (u32)_l; \
  107. (high) = _l >> 32; \
  108. } while(0)
  109. #define rdtscl(low) \
  110. do { \
  111. (low) = native_read_tsc(); \
  112. } while(0)
  113. #define rdtscll(val) ((val) = native_read_tsc())
  114. #define write_tsc(val1,val2) wrmsr(0x10, val1, val2)
  115. #define rdpmc(counter,low,high) \
  116. do { \
  117. u64 _l = native_read_pmc(); \
  118. low = (u32)_l; \
  119. high = _l >> 32; \
  120. } while(0)
  121. #endif /* !CONFIG_PARAVIRT */
  122. #ifdef CONFIG_SMP
  123. void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h);
  124. void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h);
  125. #else /* CONFIG_SMP */
  126. static inline void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h)
  127. {
  128. rdmsr(msr_no, *l, *h);
  129. }
  130. static inline void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h)
  131. {
  132. wrmsr(msr_no, l, h);
  133. }
  134. #endif /* CONFIG_SMP */
  135. #endif
  136. #endif
  137. #endif /* __ASM_MSR_H */