|
@@ -354,11 +354,6 @@ static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
|
|
#define local_irq_disable() __asm__ __volatile__("cli": : :"memory")
|
|
#define local_irq_disable() __asm__ __volatile__("cli": : :"memory")
|
|
#define local_irq_enable() __asm__ __volatile__("sti": : :"memory")
|
|
#define local_irq_enable() __asm__ __volatile__("sti": : :"memory")
|
|
|
|
|
|
-/* used in the idle loop; sti takes one instruction cycle to complete */
|
|
|
|
-#define safe_halt() __asm__ __volatile__("sti; hlt": : :"memory")
|
|
|
|
-/* used when interrupts are already enabled or to shutdown the processor */
|
|
|
|
-#define halt() __asm__ __volatile__("hlt": : :"memory")
|
|
|
|
-
|
|
|
|
#define irqs_disabled() \
|
|
#define irqs_disabled() \
|
|
({ \
|
|
({ \
|
|
unsigned long flags; \
|
|
unsigned long flags; \
|
|
@@ -370,6 +365,11 @@ static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
|
|
#define local_irq_save(x) do { warn_if_not_ulong(x); __asm__ __volatile__("# local_irq_save \n\t pushfq ; popq %0 ; cli":"=g" (x): /* no input */ :"memory"); } while (0)
|
|
#define local_irq_save(x) do { warn_if_not_ulong(x); __asm__ __volatile__("# local_irq_save \n\t pushfq ; popq %0 ; cli":"=g" (x): /* no input */ :"memory"); } while (0)
|
|
#endif
|
|
#endif
|
|
|
|
|
|
|
|
+/* used in the idle loop; sti takes one instruction cycle to complete */
|
|
|
|
+#define safe_halt() __asm__ __volatile__("sti; hlt": : :"memory")
|
|
|
|
+/* used when interrupts are already enabled or to shutdown the processor */
|
|
|
|
+#define halt() __asm__ __volatile__("hlt": : :"memory")
|
|
|
|
+
|
|
void cpu_idle_wait(void);
|
|
void cpu_idle_wait(void);
|
|
|
|
|
|
extern unsigned long arch_align_stack(unsigned long sp);
|
|
extern unsigned long arch_align_stack(unsigned long sp);
|