|
@@ -10,6 +10,14 @@
|
|
#include <asm/asm-offsets.h>
|
|
#include <asm/asm-offsets.h>
|
|
#include <asm/regdef.h>
|
|
#include <asm/regdef.h>
|
|
|
|
|
|
|
|
+#if LONGSIZE == 4
|
|
|
|
+#define LONG_S_L swl
|
|
|
|
+#define LONG_S_R swr
|
|
|
|
+#else
|
|
|
|
+#define LONG_S_L sdl
|
|
|
|
+#define LONG_S_R sdr
|
|
|
|
+#endif
|
|
|
|
+
|
|
#define EX(insn,reg,addr,handler) \
|
|
#define EX(insn,reg,addr,handler) \
|
|
9: insn reg, addr; \
|
|
9: insn reg, addr; \
|
|
.section __ex_table,"a"; \
|
|
.section __ex_table,"a"; \
|
|
@@ -25,6 +33,7 @@
|
|
EX(LONG_S, \val, (\offset + 5 * LONGSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 5 * LONGSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 6 * LONGSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 6 * LONGSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 7 * LONGSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 7 * LONGSIZE)(\dst), \fixup)
|
|
|
|
+#if LONGSIZE == 4
|
|
EX(LONG_S, \val, (\offset + 8 * LONGSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 8 * LONGSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 9 * LONGSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 9 * LONGSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 10 * LONGSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 10 * LONGSIZE)(\dst), \fixup)
|
|
@@ -33,6 +42,7 @@
|
|
EX(LONG_S, \val, (\offset + 13 * LONGSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 13 * LONGSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 14 * LONGSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 14 * LONGSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 15 * LONGSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 15 * LONGSIZE)(\dst), \fixup)
|
|
|
|
+#endif
|
|
.endm
|
|
.endm
|
|
|
|
|
|
/*
|
|
/*
|
|
@@ -49,9 +59,13 @@ LEAF(memset)
|
|
move v0, a0 /* result */
|
|
move v0, a0 /* result */
|
|
|
|
|
|
andi a1, 0xff /* spread fillword */
|
|
andi a1, 0xff /* spread fillword */
|
|
- sll t1, a1, 8
|
|
|
|
|
|
+ LONG_SLL t1, a1, 8
|
|
or a1, t1
|
|
or a1, t1
|
|
- sll t1, a1, 16
|
|
|
|
|
|
+ LONG_SLL t1, a1, 16
|
|
|
|
+#if LONGSIZE == 8
|
|
|
|
+ or a1, t1
|
|
|
|
+ LONG_SLL t1, a1, 32
|
|
|
|
+#endif
|
|
or a1, t1
|
|
or a1, t1
|
|
1:
|
|
1:
|
|
|
|
|
|
@@ -64,10 +78,10 @@ FEXPORT(__bzero)
|
|
PTR_SUBU t0, LONGSIZE /* alignment in bytes */
|
|
PTR_SUBU t0, LONGSIZE /* alignment in bytes */
|
|
|
|
|
|
#ifdef __MIPSEB__
|
|
#ifdef __MIPSEB__
|
|
- EX(swl, a1, (a0), first_fixup) /* make word aligned */
|
|
|
|
|
|
+ EX(LONG_S_L, a1, (a0), first_fixup) /* make word/dword aligned */
|
|
#endif
|
|
#endif
|
|
#ifdef __MIPSEL__
|
|
#ifdef __MIPSEL__
|
|
- EX(swr, a1, (a0), first_fixup) /* make word aligned */
|
|
|
|
|
|
+ EX(LONG_S_R, a1, (a0), first_fixup) /* make word/dword aligned */
|
|
#endif
|
|
#endif
|
|
PTR_SUBU a0, t0 /* long align ptr */
|
|
PTR_SUBU a0, t0 /* long align ptr */
|
|
PTR_ADDU a2, t0 /* correct size */
|
|
PTR_ADDU a2, t0 /* correct size */
|
|
@@ -75,7 +89,7 @@ FEXPORT(__bzero)
|
|
1: ori t1, a2, 0x3f /* # of full blocks */
|
|
1: ori t1, a2, 0x3f /* # of full blocks */
|
|
xori t1, 0x3f
|
|
xori t1, 0x3f
|
|
beqz t1, memset_partial /* no block to fill */
|
|
beqz t1, memset_partial /* no block to fill */
|
|
- andi t0, a2, 0x3c
|
|
|
|
|
|
+ andi t0, a2, 0x40-LONGSIZE
|
|
|
|
|
|
PTR_ADDU t1, a0 /* end address */
|
|
PTR_ADDU t1, a0 /* end address */
|
|
.set reorder
|
|
.set reorder
|
|
@@ -86,7 +100,14 @@ FEXPORT(__bzero)
|
|
|
|
|
|
memset_partial:
|
|
memset_partial:
|
|
PTR_LA t1, 2f /* where to start */
|
|
PTR_LA t1, 2f /* where to start */
|
|
|
|
+#if LONGSIZE == 4
|
|
PTR_SUBU t1, t0
|
|
PTR_SUBU t1, t0
|
|
|
|
+#else
|
|
|
|
+ .set noat
|
|
|
|
+ LONG_SRL AT, t0, 1
|
|
|
|
+ PTR_SUBU t1, AT
|
|
|
|
+ .set noat
|
|
|
|
+#endif
|
|
jr t1
|
|
jr t1
|
|
PTR_ADDU a0, t0 /* dest ptr */
|
|
PTR_ADDU a0, t0 /* dest ptr */
|
|
|
|
|
|
@@ -100,10 +121,10 @@ memset_partial:
|
|
beqz a2, 1f
|
|
beqz a2, 1f
|
|
PTR_ADDU a0, a2 /* What's left */
|
|
PTR_ADDU a0, a2 /* What's left */
|
|
#ifdef __MIPSEB__
|
|
#ifdef __MIPSEB__
|
|
- EX(swr, a1, -1(a0), last_fixup)
|
|
|
|
|
|
+ EX(LONG_S_R, a1, -1(a0), last_fixup)
|
|
#endif
|
|
#endif
|
|
#ifdef __MIPSEL__
|
|
#ifdef __MIPSEL__
|
|
- EX(swl, a1, -1(a0), last_fixup)
|
|
|
|
|
|
+ EX(LONG_S_L, a1, -1(a0), last_fixup)
|
|
#endif
|
|
#endif
|
|
1: jr ra
|
|
1: jr ra
|
|
move a2, zero
|
|
move a2, zero
|