|
@@ -1,24 +1,25 @@
|
|
|
-#include <linux/mm.h>
|
|
|
#include <linux/init.h>
|
|
|
-#include <asm/io.h>
|
|
|
-#include <asm/mtrr.h>
|
|
|
-#include <asm/msr.h>
|
|
|
+#include <linux/io.h>
|
|
|
+#include <linux/mm.h>
|
|
|
+
|
|
|
#include <asm/processor-cyrix.h>
|
|
|
#include <asm/processor-flags.h>
|
|
|
-#include "mtrr.h"
|
|
|
+#include <asm/mtrr.h>
|
|
|
+#include <asm/msr.h>
|
|
|
|
|
|
+#include "mtrr.h"
|
|
|
|
|
|
-/* Put the processor into a state where MTRRs can be safely set */
|
|
|
+/* Put the processor into a state where MTRRs can be safely set */
|
|
|
void set_mtrr_prepare_save(struct set_mtrr_context *ctxt)
|
|
|
{
|
|
|
unsigned int cr0;
|
|
|
|
|
|
- /* Disable interrupts locally */
|
|
|
+ /* Disable interrupts locally */
|
|
|
local_irq_save(ctxt->flags);
|
|
|
|
|
|
if (use_intel() || is_cpu(CYRIX)) {
|
|
|
|
|
|
- /* Save value of CR4 and clear Page Global Enable (bit 7) */
|
|
|
+ /* Save value of CR4 and clear Page Global Enable (bit 7) */
|
|
|
if (cpu_has_pge) {
|
|
|
ctxt->cr4val = read_cr4();
|
|
|
write_cr4(ctxt->cr4val & ~X86_CR4_PGE);
|
|
@@ -33,50 +34,61 @@ void set_mtrr_prepare_save(struct set_mtrr_context *ctxt)
|
|
|
write_cr0(cr0);
|
|
|
wbinvd();
|
|
|
|
|
|
- if (use_intel())
|
|
|
- /* Save MTRR state */
|
|
|
+ if (use_intel()) {
|
|
|
+ /* Save MTRR state */
|
|
|
rdmsr(MSR_MTRRdefType, ctxt->deftype_lo, ctxt->deftype_hi);
|
|
|
- else
|
|
|
- /* Cyrix ARRs - everything else were excluded at the top */
|
|
|
+ } else {
|
|
|
+ /*
|
|
|
+ * Cyrix ARRs -
|
|
|
+ * everything else were excluded at the top
|
|
|
+ */
|
|
|
ctxt->ccr3 = getCx86(CX86_CCR3);
|
|
|
+ }
|
|
|
}
|
|
|
}
|
|
|
|
|
|
void set_mtrr_cache_disable(struct set_mtrr_context *ctxt)
|
|
|
{
|
|
|
- if (use_intel())
|
|
|
- /* Disable MTRRs, and set the default type to uncached */
|
|
|
+ if (use_intel()) {
|
|
|
+ /* Disable MTRRs, and set the default type to uncached */
|
|
|
mtrr_wrmsr(MSR_MTRRdefType, ctxt->deftype_lo & 0xf300UL,
|
|
|
ctxt->deftype_hi);
|
|
|
- else if (is_cpu(CYRIX))
|
|
|
- /* Cyrix ARRs - everything else were excluded at the top */
|
|
|
- setCx86(CX86_CCR3, (ctxt->ccr3 & 0x0f) | 0x10);
|
|
|
+ } else {
|
|
|
+ if (is_cpu(CYRIX)) {
|
|
|
+ /* Cyrix ARRs - everything else were excluded at the top */
|
|
|
+ setCx86(CX86_CCR3, (ctxt->ccr3 & 0x0f) | 0x10);
|
|
|
+ }
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
-/* Restore the processor after a set_mtrr_prepare */
|
|
|
+/* Restore the processor after a set_mtrr_prepare */
|
|
|
void set_mtrr_done(struct set_mtrr_context *ctxt)
|
|
|
{
|
|
|
if (use_intel() || is_cpu(CYRIX)) {
|
|
|
|
|
|
- /* Flush caches and TLBs */
|
|
|
+ /* Flush caches and TLBs */
|
|
|
wbinvd();
|
|
|
|
|
|
- /* Restore MTRRdefType */
|
|
|
- if (use_intel())
|
|
|
+ /* Restore MTRRdefType */
|
|
|
+ if (use_intel()) {
|
|
|
/* Intel (P6) standard MTRRs */
|
|
|
- mtrr_wrmsr(MSR_MTRRdefType, ctxt->deftype_lo, ctxt->deftype_hi);
|
|
|
- else
|
|
|
- /* Cyrix ARRs - everything else was excluded at the top */
|
|
|
+ mtrr_wrmsr(MSR_MTRRdefType, ctxt->deftype_lo,
|
|
|
+ ctxt->deftype_hi);
|
|
|
+ } else {
|
|
|
+ /*
|
|
|
+ * Cyrix ARRs -
|
|
|
+ * everything else was excluded at the top
|
|
|
+ */
|
|
|
setCx86(CX86_CCR3, ctxt->ccr3);
|
|
|
+ }
|
|
|
|
|
|
- /* Enable caches */
|
|
|
+ /* Enable caches */
|
|
|
write_cr0(read_cr0() & 0xbfffffff);
|
|
|
|
|
|
- /* Restore value of CR4 */
|
|
|
+ /* Restore value of CR4 */
|
|
|
if (cpu_has_pge)
|
|
|
write_cr4(ctxt->cr4val);
|
|
|
}
|
|
|
- /* Re-enable interrupts locally (if enabled previously) */
|
|
|
+ /* Re-enable interrupts locally (if enabled previously) */
|
|
|
local_irq_restore(ctxt->flags);
|
|
|
}
|
|
|
-
|