memset.S 3.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142
  1. /*
  2. * This file is subject to the terms and conditions of the GNU General Public
  3. * License. See the file "COPYING" in the main directory of this archive
  4. * for more details.
  5. *
  6. * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
  7. * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
  8. */
  9. #include <asm/asm.h>
  10. #include <asm/offset.h>
  11. #include <asm/regdef.h>
  12. #define EX(insn,reg,addr,handler) \
  13. 9: insn reg, addr; \
  14. .section __ex_table,"a"; \
  15. PTR 9b, handler; \
  16. .previous
  17. .macro f_fill64 dst, offset, val, fixup
  18. EX(LONG_S, \val, (\offset + 0 * LONGSIZE)(\dst), \fixup)
  19. EX(LONG_S, \val, (\offset + 1 * LONGSIZE)(\dst), \fixup)
  20. EX(LONG_S, \val, (\offset + 2 * LONGSIZE)(\dst), \fixup)
  21. EX(LONG_S, \val, (\offset + 3 * LONGSIZE)(\dst), \fixup)
  22. EX(LONG_S, \val, (\offset + 4 * LONGSIZE)(\dst), \fixup)
  23. EX(LONG_S, \val, (\offset + 5 * LONGSIZE)(\dst), \fixup)
  24. EX(LONG_S, \val, (\offset + 6 * LONGSIZE)(\dst), \fixup)
  25. EX(LONG_S, \val, (\offset + 7 * LONGSIZE)(\dst), \fixup)
  26. .endm
  27. /*
  28. * memset(void *s, int c, size_t n)
  29. *
  30. * a0: start of area to clear
  31. * a1: char to fill with
  32. * a2: size of area to clear
  33. */
  34. .set noreorder
  35. .align 5
  36. LEAF(memset)
  37. beqz a1, 1f
  38. move v0, a0 /* result */
  39. andi a1, 0xff /* spread fillword */
  40. dsll t1, a1, 8
  41. or a1, t1
  42. dsll t1, a1, 16
  43. or a1, t1
  44. dsll t1, a1, 32
  45. or a1, t1
  46. 1:
  47. FEXPORT(__bzero)
  48. sltiu t0, a2, LONGSIZE /* very small region? */
  49. bnez t0, small_memset
  50. andi t0, a0, LONGMASK /* aligned? */
  51. beqz t0, 1f
  52. PTR_SUBU t0, LONGSIZE /* alignment in bytes */
  53. #ifdef __MIPSEB__
  54. EX(sdl, a1, (a0), first_fixup) /* make dword aligned */
  55. #endif
  56. #ifdef __MIPSEL__
  57. EX(sdr, a1, (a0), first_fixup) /* make dword aligned */
  58. #endif
  59. PTR_SUBU a0, t0 /* long align ptr */
  60. PTR_ADDU a2, t0 /* correct size */
  61. 1: ori t1, a2, 0x3f /* # of full blocks */
  62. xori t1, 0x3f
  63. beqz t1, memset_partial /* no block to fill */
  64. andi t0, a2, 0x38
  65. PTR_ADDU t1, a0 /* end address */
  66. .set reorder
  67. 1: PTR_ADDIU a0, 64
  68. f_fill64 a0, -64, a1, fwd_fixup
  69. bne t1, a0, 1b
  70. .set noreorder
  71. memset_partial:
  72. PTR_LA t1, 2f /* where to start */
  73. .set noat
  74. dsrl AT, t0, 1
  75. PTR_SUBU t1, AT
  76. .set noat
  77. jr t1
  78. PTR_ADDU a0, t0 /* dest ptr */
  79. .set push
  80. .set noreorder
  81. .set nomacro
  82. f_fill64 a0, -64, a1, partial_fixup /* ... but first do longs ... */
  83. 2: .set pop
  84. andi a2, LONGMASK /* At most one long to go */
  85. beqz a2, 1f
  86. PTR_ADDU a0, a2 /* What's left */
  87. #ifdef __MIPSEB__
  88. EX(sdr, a1, -1(a0), last_fixup)
  89. #endif
  90. #ifdef __MIPSEL__
  91. EX(sdl, a1, -1(a0), last_fixup)
  92. #endif
  93. 1: jr ra
  94. move a2, zero
  95. small_memset:
  96. beqz a2, 2f
  97. PTR_ADDU t1, a0, a2
  98. 1: PTR_ADDIU a0, 1 /* fill bytewise */
  99. bne t1, a0, 1b
  100. sb a1, -1(a0)
  101. 2: jr ra /* done */
  102. move a2, zero
  103. END(memset)
  104. first_fixup:
  105. jr ra
  106. nop
  107. fwd_fixup:
  108. PTR_L t0, TI_TASK($28)
  109. LONG_L t0, THREAD_BUADDR(t0)
  110. andi a2, 0x3f
  111. LONG_ADDU a2, t1
  112. jr ra
  113. LONG_SUBU a2, t0
  114. partial_fixup:
  115. PTR_L t0, TI_TASK($28)
  116. LONG_L t0, THREAD_BUADDR(t0)
  117. andi a2, LONGMASK
  118. LONG_ADDU a2, t1
  119. jr ra
  120. LONG_SUBU a2, t0
  121. last_fixup:
  122. jr ra
  123. andi v1, a2, LONGMASK