state.c 2.0 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879
  1. #include <linux/mm.h>
  2. #include <linux/init.h>
  3. #include <asm/io.h>
  4. #include <asm/mtrr.h>
  5. #include <asm/msr.h>
  6. #include <asm-i386/processor-cyrix.h>
  7. #include "mtrr.h"
  8. /* Put the processor into a state where MTRRs can be safely set */
  9. void set_mtrr_prepare_save(struct set_mtrr_context *ctxt)
  10. {
  11. unsigned int cr0;
  12. /* Disable interrupts locally */
  13. local_irq_save(ctxt->flags);
  14. if (use_intel() || is_cpu(CYRIX)) {
  15. /* Save value of CR4 and clear Page Global Enable (bit 7) */
  16. if ( cpu_has_pge ) {
  17. ctxt->cr4val = read_cr4();
  18. write_cr4(ctxt->cr4val & ~X86_CR4_PGE);
  19. }
  20. /* Disable and flush caches. Note that wbinvd flushes the TLBs as
  21. a side-effect */
  22. cr0 = read_cr0() | 0x40000000;
  23. wbinvd();
  24. write_cr0(cr0);
  25. wbinvd();
  26. if (use_intel())
  27. /* Save MTRR state */
  28. rdmsr(MTRRdefType_MSR, ctxt->deftype_lo, ctxt->deftype_hi);
  29. else
  30. /* Cyrix ARRs - everything else were excluded at the top */
  31. ctxt->ccr3 = getCx86(CX86_CCR3);
  32. }
  33. }
  34. void set_mtrr_cache_disable(struct set_mtrr_context *ctxt)
  35. {
  36. if (use_intel())
  37. /* Disable MTRRs, and set the default type to uncached */
  38. mtrr_wrmsr(MTRRdefType_MSR, ctxt->deftype_lo & 0xf300UL,
  39. ctxt->deftype_hi);
  40. else if (is_cpu(CYRIX))
  41. /* Cyrix ARRs - everything else were excluded at the top */
  42. setCx86(CX86_CCR3, (ctxt->ccr3 & 0x0f) | 0x10);
  43. }
  44. /* Restore the processor after a set_mtrr_prepare */
  45. void set_mtrr_done(struct set_mtrr_context *ctxt)
  46. {
  47. if (use_intel() || is_cpu(CYRIX)) {
  48. /* Flush caches and TLBs */
  49. wbinvd();
  50. /* Restore MTRRdefType */
  51. if (use_intel())
  52. /* Intel (P6) standard MTRRs */
  53. mtrr_wrmsr(MTRRdefType_MSR, ctxt->deftype_lo, ctxt->deftype_hi);
  54. else
  55. /* Cyrix ARRs - everything else was excluded at the top */
  56. setCx86(CX86_CCR3, ctxt->ccr3);
  57. /* Enable caches */
  58. write_cr0(read_cr0() & 0xbfffffff);
  59. /* Restore value of CR4 */
  60. if ( cpu_has_pge )
  61. write_cr4(ctxt->cr4val);
  62. }
  63. /* Re-enable interrupts locally (if enabled previously) */
  64. local_irq_restore(ctxt->flags);
  65. }