|
@@ -34,34 +34,6 @@ extern struct task_struct * FASTCALL(__switch_to(struct task_struct *prev, struc
|
|
|
"2" (prev), "d" (next)); \
|
|
|
} while (0)
|
|
|
|
|
|
-#define _set_base(addr,base) do { unsigned long __pr; \
|
|
|
-__asm__ __volatile__ ("movw %%dx,%1\n\t" \
|
|
|
- "rorl $16,%%edx\n\t" \
|
|
|
- "movb %%dl,%2\n\t" \
|
|
|
- "movb %%dh,%3" \
|
|
|
- :"=&d" (__pr) \
|
|
|
- :"m" (*((addr)+2)), \
|
|
|
- "m" (*((addr)+4)), \
|
|
|
- "m" (*((addr)+7)), \
|
|
|
- "0" (base) \
|
|
|
- ); } while(0)
|
|
|
-
|
|
|
-#define _set_limit(addr,limit) do { unsigned long __lr; \
|
|
|
-__asm__ __volatile__ ("movw %%dx,%1\n\t" \
|
|
|
- "rorl $16,%%edx\n\t" \
|
|
|
- "movb %2,%%dh\n\t" \
|
|
|
- "andb $0xf0,%%dh\n\t" \
|
|
|
- "orb %%dh,%%dl\n\t" \
|
|
|
- "movb %%dl,%2" \
|
|
|
- :"=&d" (__lr) \
|
|
|
- :"m" (*(addr)), \
|
|
|
- "m" (*((addr)+6)), \
|
|
|
- "0" (limit) \
|
|
|
- ); } while(0)
|
|
|
-
|
|
|
-#define set_base(ldt,base) _set_base( ((char *)&(ldt)) , (base) )
|
|
|
-#define set_limit(ldt,limit) _set_limit( ((char *)&(ldt)) , ((limit)-1) )
|
|
|
-
|
|
|
/*
|
|
|
* Load a segment. Fall back on loading the zero
|
|
|
* segment if something goes wrong..
|
|
@@ -83,12 +55,6 @@ __asm__ __volatile__ ("movw %%dx,%1\n\t" \
|
|
|
".previous" \
|
|
|
: :"rm" (value))
|
|
|
|
|
|
-/*
|
|
|
- * Save a segment register away
|
|
|
- */
|
|
|
-#define savesegment(seg, value) \
|
|
|
- asm volatile("mov %%" #seg ",%0":"=rm" (value))
|
|
|
-
|
|
|
|
|
|
static inline void native_clts(void)
|
|
|
{
|
|
@@ -161,11 +127,6 @@ static inline void native_wbinvd(void)
|
|
|
asm volatile("wbinvd": : :"memory");
|
|
|
}
|
|
|
|
|
|
-static inline void clflush(void *__p)
|
|
|
-{
|
|
|
- asm volatile("clflush %0" : "+m" (*(char __force *)__p));
|
|
|
-}
|
|
|
-
|
|
|
#ifdef CONFIG_PARAVIRT
|
|
|
#include <asm/paravirt.h>
|
|
|
#else
|
|
@@ -190,15 +151,6 @@ static inline void clflush(void *__p)
|
|
|
|
|
|
#endif /* __KERNEL__ */
|
|
|
|
|
|
-static inline unsigned long get_limit(unsigned long segment)
|
|
|
-{
|
|
|
- unsigned long __limit;
|
|
|
- __asm__("lsll %1,%0"
|
|
|
- :"=r" (__limit):"r" (segment));
|
|
|
- return __limit+1;
|
|
|
-}
|
|
|
-
|
|
|
-#define nop() __asm__ __volatile__ ("nop")
|
|
|
|
|
|
/*
|
|
|
* Force strict CPU ordering.
|
|
@@ -305,15 +257,5 @@ static inline unsigned long get_limit(unsigned long segment)
|
|
|
* disable hlt during certain critical i/o operations
|
|
|
*/
|
|
|
#define HAVE_DISABLE_HLT
|
|
|
-void disable_hlt(void);
|
|
|
-void enable_hlt(void);
|
|
|
-
|
|
|
-extern int es7000_plat;
|
|
|
-void cpu_idle_wait(void);
|
|
|
-
|
|
|
-extern unsigned long arch_align_stack(unsigned long sp);
|
|
|
-extern void free_init_pages(char *what, unsigned long begin, unsigned long end);
|
|
|
-
|
|
|
-void default_idle(void);
|
|
|
|
|
|
#endif
|