|
@@ -17,6 +17,7 @@
|
|
|
#include <asm/time.h>
|
|
|
#include <asm/delay.h>
|
|
|
#include <asm/hypervisor.h>
|
|
|
+#include <asm/nmi.h>
|
|
|
|
|
|
unsigned int __read_mostly cpu_khz; /* TSC clocks / usec, not used here */
|
|
|
EXPORT_SYMBOL(cpu_khz);
|
|
@@ -852,6 +853,60 @@ static void __init init_tsc_clocksource(void)
|
|
|
clocksource_register(&clocksource_tsc);
|
|
|
}
|
|
|
|
|
|
+#ifdef CONFIG_X86_64
|
|
|
+/*
|
|
|
+ * calibrate_cpu is used on systems with fixed rate TSCs to determine
|
|
|
+ * processor frequency
|
|
|
+ */
|
|
|
+#define TICK_COUNT 100000000
|
|
|
+static unsigned long __init calibrate_cpu(void)
|
|
|
+{
|
|
|
+ int tsc_start, tsc_now;
|
|
|
+ int i, no_ctr_free;
|
|
|
+ unsigned long evntsel3 = 0, pmc3 = 0, pmc_now = 0;
|
|
|
+ unsigned long flags;
|
|
|
+
|
|
|
+ for (i = 0; i < 4; i++)
|
|
|
+ if (avail_to_resrv_perfctr_nmi_bit(i))
|
|
|
+ break;
|
|
|
+ no_ctr_free = (i == 4);
|
|
|
+ if (no_ctr_free) {
|
|
|
+ WARN(1, KERN_WARNING "Warning: AMD perfctrs busy ... "
|
|
|
+ "cpu_khz value may be incorrect.\n");
|
|
|
+ i = 3;
|
|
|
+ rdmsrl(MSR_K7_EVNTSEL3, evntsel3);
|
|
|
+ wrmsrl(MSR_K7_EVNTSEL3, 0);
|
|
|
+ rdmsrl(MSR_K7_PERFCTR3, pmc3);
|
|
|
+ } else {
|
|
|
+ reserve_perfctr_nmi(MSR_K7_PERFCTR0 + i);
|
|
|
+ reserve_evntsel_nmi(MSR_K7_EVNTSEL0 + i);
|
|
|
+ }
|
|
|
+ local_irq_save(flags);
|
|
|
+ /* start measuring cycles, incrementing from 0 */
|
|
|
+ wrmsrl(MSR_K7_PERFCTR0 + i, 0);
|
|
|
+ wrmsrl(MSR_K7_EVNTSEL0 + i, 1 << 22 | 3 << 16 | 0x76);
|
|
|
+ rdtscl(tsc_start);
|
|
|
+ do {
|
|
|
+ rdmsrl(MSR_K7_PERFCTR0 + i, pmc_now);
|
|
|
+ tsc_now = get_cycles();
|
|
|
+ } while ((tsc_now - tsc_start) < TICK_COUNT);
|
|
|
+
|
|
|
+ local_irq_restore(flags);
|
|
|
+ if (no_ctr_free) {
|
|
|
+ wrmsrl(MSR_K7_EVNTSEL3, 0);
|
|
|
+ wrmsrl(MSR_K7_PERFCTR3, pmc3);
|
|
|
+ wrmsrl(MSR_K7_EVNTSEL3, evntsel3);
|
|
|
+ } else {
|
|
|
+ release_perfctr_nmi(MSR_K7_PERFCTR0 + i);
|
|
|
+ release_evntsel_nmi(MSR_K7_EVNTSEL0 + i);
|
|
|
+ }
|
|
|
+
|
|
|
+ return pmc_now * tsc_khz / (tsc_now - tsc_start);
|
|
|
+}
|
|
|
+#else
|
|
|
+static inline unsigned long calibrate_cpu(void) { return cpu_khz; }
|
|
|
+#endif
|
|
|
+
|
|
|
void __init tsc_init(void)
|
|
|
{
|
|
|
u64 lpj;
|
|
@@ -870,11 +925,9 @@ void __init tsc_init(void)
|
|
|
return;
|
|
|
}
|
|
|
|
|
|
-#ifdef CONFIG_X86_64
|
|
|
if (cpu_has(&boot_cpu_data, X86_FEATURE_CONSTANT_TSC) &&
|
|
|
(boot_cpu_data.x86_vendor == X86_VENDOR_AMD))
|
|
|
cpu_khz = calibrate_cpu();
|
|
|
-#endif
|
|
|
|
|
|
printk("Detected %lu.%03lu MHz processor.\n",
|
|
|
(unsigned long)cpu_khz / 1000,
|