|
@@ -14,7 +14,7 @@
|
|
|
|
|
|
#if CONFIG_XILINX_MICROBLAZE0_USE_MSR_INSTR
|
|
|
|
|
|
-static inline unsigned long arch_local_irq_save(void)
|
|
|
+static inline notrace unsigned long arch_local_irq_save(void)
|
|
|
{
|
|
|
unsigned long flags;
|
|
|
asm volatile(" msrclr %0, %1 \n"
|
|
@@ -25,7 +25,7 @@ static inline unsigned long arch_local_irq_save(void)
|
|
|
return flags;
|
|
|
}
|
|
|
|
|
|
-static inline void arch_local_irq_disable(void)
|
|
|
+static inline notrace void arch_local_irq_disable(void)
|
|
|
{
|
|
|
/* this uses r0 without declaring it - is that correct? */
|
|
|
asm volatile(" msrclr r0, %0 \n"
|
|
@@ -35,7 +35,7 @@ static inline void arch_local_irq_disable(void)
|
|
|
: "memory");
|
|
|
}
|
|
|
|
|
|
-static inline void arch_local_irq_enable(void)
|
|
|
+static inline notrace void arch_local_irq_enable(void)
|
|
|
{
|
|
|
/* this uses r0 without declaring it - is that correct? */
|
|
|
asm volatile(" msrset r0, %0 \n"
|
|
@@ -47,7 +47,7 @@ static inline void arch_local_irq_enable(void)
|
|
|
|
|
|
#else /* !CONFIG_XILINX_MICROBLAZE0_USE_MSR_INSTR */
|
|
|
|
|
|
-static inline unsigned long arch_local_irq_save(void)
|
|
|
+static inline notrace unsigned long arch_local_irq_save(void)
|
|
|
{
|
|
|
unsigned long flags, tmp;
|
|
|
asm volatile (" mfs %0, rmsr \n"
|
|
@@ -61,7 +61,7 @@ static inline unsigned long arch_local_irq_save(void)
|
|
|
return flags;
|
|
|
}
|
|
|
|
|
|
-static inline void arch_local_irq_disable(void)
|
|
|
+static inline notrace void arch_local_irq_disable(void)
|
|
|
{
|
|
|
unsigned long tmp;
|
|
|
asm volatile(" mfs %0, rmsr \n"
|
|
@@ -74,7 +74,7 @@ static inline void arch_local_irq_disable(void)
|
|
|
: "memory");
|
|
|
}
|
|
|
|
|
|
-static inline void arch_local_irq_enable(void)
|
|
|
+static inline notrace void arch_local_irq_enable(void)
|
|
|
{
|
|
|
unsigned long tmp;
|
|
|
asm volatile(" mfs %0, rmsr \n"
|
|
@@ -89,7 +89,7 @@ static inline void arch_local_irq_enable(void)
|
|
|
|
|
|
#endif /* CONFIG_XILINX_MICROBLAZE0_USE_MSR_INSTR */
|
|
|
|
|
|
-static inline unsigned long arch_local_save_flags(void)
|
|
|
+static inline notrace unsigned long arch_local_save_flags(void)
|
|
|
{
|
|
|
unsigned long flags;
|
|
|
asm volatile(" mfs %0, rmsr \n"
|
|
@@ -100,7 +100,7 @@ static inline unsigned long arch_local_save_flags(void)
|
|
|
return flags;
|
|
|
}
|
|
|
|
|
|
-static inline void arch_local_irq_restore(unsigned long flags)
|
|
|
+static inline notrace void arch_local_irq_restore(unsigned long flags)
|
|
|
{
|
|
|
asm volatile(" mts rmsr, %0 \n"
|
|
|
" nop \n"
|
|
@@ -109,12 +109,12 @@ static inline void arch_local_irq_restore(unsigned long flags)
|
|
|
: "memory");
|
|
|
}
|
|
|
|
|
|
-static inline bool arch_irqs_disabled_flags(unsigned long flags)
|
|
|
+static inline notrace bool arch_irqs_disabled_flags(unsigned long flags)
|
|
|
{
|
|
|
return (flags & MSR_IE) == 0;
|
|
|
}
|
|
|
|
|
|
-static inline bool arch_irqs_disabled(void)
|
|
|
+static inline notrace bool arch_irqs_disabled(void)
|
|
|
{
|
|
|
return arch_irqs_disabled_flags(arch_local_save_flags());
|
|
|
}
|