feature-fixups.c 1.5 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758
  1. /*
  2. * Copyright (C) 2001 Ben. Herrenschmidt (benh@kernel.crashing.org)
  3. *
  4. * Modifications for ppc64:
  5. * Copyright (C) 2003 Dave Engebretsen <engebret@us.ibm.com>
  6. *
  7. * Copyright 2008 Michael Ellerman, IBM Corporation.
  8. *
  9. * This program is free software; you can redistribute it and/or
  10. * modify it under the terms of the GNU General Public License
  11. * as published by the Free Software Foundation; either version
  12. * 2 of the License, or (at your option) any later version.
  13. */
  14. #include <linux/kernel.h>
  15. #include <asm/cputable.h>
  16. #include <asm/code-patching.h>
  17. struct fixup_entry {
  18. unsigned long mask;
  19. unsigned long value;
  20. long start_off;
  21. long end_off;
  22. long alt_start_off;
  23. long alt_end_off;
  24. };
  25. static void patch_feature_section(unsigned long value, struct fixup_entry *fcur)
  26. {
  27. unsigned int *pstart, *pend, *p;
  28. if ((value & fcur->mask) == fcur->value)
  29. return;
  30. pstart = ((unsigned int *)fcur) + (fcur->start_off / 4);
  31. pend = ((unsigned int *)fcur) + (fcur->end_off / 4);
  32. for (p = pstart; p < pend; p++) {
  33. *p = PPC_NOP_INSTR;
  34. asm volatile ("dcbst 0, %0" : : "r" (p));
  35. }
  36. asm volatile ("sync" : : : "memory");
  37. for (p = pstart; p < pend; p++)
  38. asm volatile ("icbi 0,%0" : : "r" (p));
  39. asm volatile ("sync; isync" : : : "memory");
  40. }
  41. void do_feature_fixups(unsigned long value, void *fixup_start, void *fixup_end)
  42. {
  43. struct fixup_entry *fcur, *fend;
  44. fcur = fixup_start;
  45. fend = fixup_end;
  46. for (; fcur < fend; fcur++)
  47. patch_feature_section(value, fcur);
  48. }