irqflags_32.h 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163
  1. /*
  2. * include/asm-i386/irqflags.h
  3. *
  4. * IRQ flags handling
  5. *
  6. * This file gets included from lowlevel asm headers too, to provide
  7. * wrapped versions of the local_irq_*() APIs, based on the
  8. * raw_local_irq_*() functions from the lowlevel headers.
  9. */
  10. #ifndef _ASM_IRQFLAGS_H
  11. #define _ASM_IRQFLAGS_H
  12. #include <asm/processor-flags.h>
  13. #ifndef __ASSEMBLY__
  14. static inline unsigned long native_save_fl(void)
  15. {
  16. unsigned long f;
  17. asm volatile("pushfl ; popl %0":"=g" (f): /* no input */);
  18. return f;
  19. }
  20. static inline void native_restore_fl(unsigned long f)
  21. {
  22. asm volatile("pushl %0 ; popfl": /* no output */
  23. :"g" (f)
  24. :"memory", "cc");
  25. }
  26. static inline void native_irq_disable(void)
  27. {
  28. asm volatile("cli": : :"memory");
  29. }
  30. static inline void native_irq_enable(void)
  31. {
  32. asm volatile("sti": : :"memory");
  33. }
  34. static inline void native_safe_halt(void)
  35. {
  36. asm volatile("sti; hlt": : :"memory");
  37. }
  38. static inline void native_halt(void)
  39. {
  40. asm volatile("hlt": : :"memory");
  41. }
  42. #endif /* __ASSEMBLY__ */
  43. #ifdef CONFIG_PARAVIRT
  44. #include <asm/paravirt.h>
  45. #else
  46. #ifndef __ASSEMBLY__
  47. static inline unsigned long __raw_local_save_flags(void)
  48. {
  49. return native_save_fl();
  50. }
  51. static inline void raw_local_irq_restore(unsigned long flags)
  52. {
  53. native_restore_fl(flags);
  54. }
  55. static inline void raw_local_irq_disable(void)
  56. {
  57. native_irq_disable();
  58. }
  59. static inline void raw_local_irq_enable(void)
  60. {
  61. native_irq_enable();
  62. }
  63. /*
  64. * Used in the idle loop; sti takes one instruction cycle
  65. * to complete:
  66. */
  67. static inline void raw_safe_halt(void)
  68. {
  69. native_safe_halt();
  70. }
  71. /*
  72. * Used when interrupts are already enabled or to
  73. * shutdown the processor:
  74. */
  75. static inline void halt(void)
  76. {
  77. native_halt();
  78. }
  79. /*
  80. * For spinlocks, etc:
  81. */
  82. static inline unsigned long __raw_local_irq_save(void)
  83. {
  84. unsigned long flags = __raw_local_save_flags();
  85. raw_local_irq_disable();
  86. return flags;
  87. }
  88. #else
  89. #define DISABLE_INTERRUPTS(clobbers) cli
  90. #define ENABLE_INTERRUPTS(clobbers) sti
  91. #define ENABLE_INTERRUPTS_SYSEXIT sti; sysexit
  92. #define INTERRUPT_RETURN iret
  93. #define GET_CR0_INTO_EAX movl %cr0, %eax
  94. #endif /* __ASSEMBLY__ */
  95. #endif /* CONFIG_PARAVIRT */
  96. #ifndef __ASSEMBLY__
  97. #define raw_local_save_flags(flags) \
  98. do { (flags) = __raw_local_save_flags(); } while (0)
  99. #define raw_local_irq_save(flags) \
  100. do { (flags) = __raw_local_irq_save(); } while (0)
  101. static inline int raw_irqs_disabled_flags(unsigned long flags)
  102. {
  103. return !(flags & X86_EFLAGS_IF);
  104. }
  105. static inline int raw_irqs_disabled(void)
  106. {
  107. unsigned long flags = __raw_local_save_flags();
  108. return raw_irqs_disabled_flags(flags);
  109. }
  110. #endif /* __ASSEMBLY__ */
  111. /*
  112. * Do the CPU's IRQ-state tracing from assembly code. We call a
  113. * C function, so save all the C-clobbered registers:
  114. */
  115. #ifdef CONFIG_TRACE_IRQFLAGS
  116. # define TRACE_IRQS_ON \
  117. pushl %eax; \
  118. pushl %ecx; \
  119. pushl %edx; \
  120. call trace_hardirqs_on; \
  121. popl %edx; \
  122. popl %ecx; \
  123. popl %eax;
  124. # define TRACE_IRQS_OFF \
  125. pushl %eax; \
  126. pushl %ecx; \
  127. pushl %edx; \
  128. call trace_hardirqs_off; \
  129. popl %edx; \
  130. popl %ecx; \
  131. popl %eax;
  132. #else
  133. # define TRACE_IRQS_ON
  134. # define TRACE_IRQS_OFF
  135. #endif
  136. #endif