copy_user.S 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492
  1. /* copy_user.S: Sparc optimized copy_from_user and copy_to_user code.
  2. *
  3. * Copyright(C) 1995 Linus Torvalds
  4. * Copyright(C) 1996 David S. Miller
  5. * Copyright(C) 1996 Eddie C. Dost
  6. * Copyright(C) 1996,1998 Jakub Jelinek
  7. *
  8. * derived from:
  9. * e-mail between David and Eddie.
  10. *
  11. * Returns 0 if successful, otherwise count of bytes not copied yet
  12. */
  13. #include <asm/ptrace.h>
  14. #include <asm/asmmacro.h>
  15. #include <asm/page.h>
  16. /* Work around cpp -rob */
  17. #define ALLOC #alloc
  18. #define EXECINSTR #execinstr
  19. #define EX(x,y,a,b) \
  20. 98: x,y; \
  21. .section .fixup,ALLOC,EXECINSTR; \
  22. .align 4; \
  23. 99: ba fixupretl; \
  24. a, b, %g3; \
  25. .section __ex_table,ALLOC; \
  26. .align 4; \
  27. .word 98b, 99b; \
  28. .text; \
  29. .align 4
  30. #define EX2(x,y,c,d,e,a,b) \
  31. 98: x,y; \
  32. .section .fixup,ALLOC,EXECINSTR; \
  33. .align 4; \
  34. 99: c, d, e; \
  35. ba fixupretl; \
  36. a, b, %g3; \
  37. .section __ex_table,ALLOC; \
  38. .align 4; \
  39. .word 98b, 99b; \
  40. .text; \
  41. .align 4
  42. #define EXO2(x,y) \
  43. 98: x, y; \
  44. .section __ex_table,ALLOC; \
  45. .align 4; \
  46. .word 98b, 97f; \
  47. .text; \
  48. .align 4
  49. #define EXT(start,end,handler) \
  50. .section __ex_table,ALLOC; \
  51. .align 4; \
  52. .word start, 0, end, handler; \
  53. .text; \
  54. .align 4
  55. /* Please do not change following macros unless you change logic used
  56. * in .fixup at the end of this file as well
  57. */
  58. /* Both these macros have to start with exactly the same insn */
  59. #define MOVE_BIGCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, t7) \
  60. ldd [%src + (offset) + 0x00], %t0; \
  61. ldd [%src + (offset) + 0x08], %t2; \
  62. ldd [%src + (offset) + 0x10], %t4; \
  63. ldd [%src + (offset) + 0x18], %t6; \
  64. st %t0, [%dst + (offset) + 0x00]; \
  65. st %t1, [%dst + (offset) + 0x04]; \
  66. st %t2, [%dst + (offset) + 0x08]; \
  67. st %t3, [%dst + (offset) + 0x0c]; \
  68. st %t4, [%dst + (offset) + 0x10]; \
  69. st %t5, [%dst + (offset) + 0x14]; \
  70. st %t6, [%dst + (offset) + 0x18]; \
  71. st %t7, [%dst + (offset) + 0x1c];
  72. #define MOVE_BIGALIGNCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, t7) \
  73. ldd [%src + (offset) + 0x00], %t0; \
  74. ldd [%src + (offset) + 0x08], %t2; \
  75. ldd [%src + (offset) + 0x10], %t4; \
  76. ldd [%src + (offset) + 0x18], %t6; \
  77. std %t0, [%dst + (offset) + 0x00]; \
  78. std %t2, [%dst + (offset) + 0x08]; \
  79. std %t4, [%dst + (offset) + 0x10]; \
  80. std %t6, [%dst + (offset) + 0x18];
  81. #define MOVE_LASTCHUNK(src, dst, offset, t0, t1, t2, t3) \
  82. ldd [%src - (offset) - 0x10], %t0; \
  83. ldd [%src - (offset) - 0x08], %t2; \
  84. st %t0, [%dst - (offset) - 0x10]; \
  85. st %t1, [%dst - (offset) - 0x0c]; \
  86. st %t2, [%dst - (offset) - 0x08]; \
  87. st %t3, [%dst - (offset) - 0x04];
  88. #define MOVE_HALFCHUNK(src, dst, offset, t0, t1, t2, t3) \
  89. lduh [%src + (offset) + 0x00], %t0; \
  90. lduh [%src + (offset) + 0x02], %t1; \
  91. lduh [%src + (offset) + 0x04], %t2; \
  92. lduh [%src + (offset) + 0x06], %t3; \
  93. sth %t0, [%dst + (offset) + 0x00]; \
  94. sth %t1, [%dst + (offset) + 0x02]; \
  95. sth %t2, [%dst + (offset) + 0x04]; \
  96. sth %t3, [%dst + (offset) + 0x06];
  97. #define MOVE_SHORTCHUNK(src, dst, offset, t0, t1) \
  98. ldub [%src - (offset) - 0x02], %t0; \
  99. ldub [%src - (offset) - 0x01], %t1; \
  100. stb %t0, [%dst - (offset) - 0x02]; \
  101. stb %t1, [%dst - (offset) - 0x01];
  102. .text
  103. .align 4
  104. .globl __copy_user_begin
  105. __copy_user_begin:
  106. .globl __copy_user
  107. dword_align:
  108. andcc %o1, 1, %g0
  109. be 4f
  110. andcc %o1, 2, %g0
  111. EXO2(ldub [%o1], %g2)
  112. add %o1, 1, %o1
  113. EXO2(stb %g2, [%o0])
  114. sub %o2, 1, %o2
  115. bne 3f
  116. add %o0, 1, %o0
  117. EXO2(lduh [%o1], %g2)
  118. add %o1, 2, %o1
  119. EXO2(sth %g2, [%o0])
  120. sub %o2, 2, %o2
  121. b 3f
  122. add %o0, 2, %o0
  123. 4:
  124. EXO2(lduh [%o1], %g2)
  125. add %o1, 2, %o1
  126. EXO2(sth %g2, [%o0])
  127. sub %o2, 2, %o2
  128. b 3f
  129. add %o0, 2, %o0
  130. __copy_user: /* %o0=dst %o1=src %o2=len */
  131. xor %o0, %o1, %o4
  132. 1:
  133. andcc %o4, 3, %o5
  134. 2:
  135. bne cannot_optimize
  136. cmp %o2, 15
  137. bleu short_aligned_end
  138. andcc %o1, 3, %g0
  139. bne dword_align
  140. 3:
  141. andcc %o1, 4, %g0
  142. be 2f
  143. mov %o2, %g1
  144. EXO2(ld [%o1], %o4)
  145. sub %g1, 4, %g1
  146. EXO2(st %o4, [%o0])
  147. add %o1, 4, %o1
  148. add %o0, 4, %o0
  149. 2:
  150. andcc %g1, 0xffffff80, %g7
  151. be 3f
  152. andcc %o0, 4, %g0
  153. be ldd_std + 4
  154. 5:
  155. MOVE_BIGCHUNK(o1, o0, 0x00, o2, o3, o4, o5, g2, g3, g4, g5)
  156. MOVE_BIGCHUNK(o1, o0, 0x20, o2, o3, o4, o5, g2, g3, g4, g5)
  157. MOVE_BIGCHUNK(o1, o0, 0x40, o2, o3, o4, o5, g2, g3, g4, g5)
  158. MOVE_BIGCHUNK(o1, o0, 0x60, o2, o3, o4, o5, g2, g3, g4, g5)
  159. 80:
  160. EXT(5b, 80b, 50f)
  161. subcc %g7, 128, %g7
  162. add %o1, 128, %o1
  163. bne 5b
  164. add %o0, 128, %o0
  165. 3:
  166. andcc %g1, 0x70, %g7
  167. be copy_user_table_end
  168. andcc %g1, 8, %g0
  169. sethi %hi(copy_user_table_end), %o5
  170. srl %g7, 1, %o4
  171. add %g7, %o4, %o4
  172. add %o1, %g7, %o1
  173. sub %o5, %o4, %o5
  174. jmpl %o5 + %lo(copy_user_table_end), %g0
  175. add %o0, %g7, %o0
  176. copy_user_table:
  177. MOVE_LASTCHUNK(o1, o0, 0x60, g2, g3, g4, g5)
  178. MOVE_LASTCHUNK(o1, o0, 0x50, g2, g3, g4, g5)
  179. MOVE_LASTCHUNK(o1, o0, 0x40, g2, g3, g4, g5)
  180. MOVE_LASTCHUNK(o1, o0, 0x30, g2, g3, g4, g5)
  181. MOVE_LASTCHUNK(o1, o0, 0x20, g2, g3, g4, g5)
  182. MOVE_LASTCHUNK(o1, o0, 0x10, g2, g3, g4, g5)
  183. MOVE_LASTCHUNK(o1, o0, 0x00, g2, g3, g4, g5)
  184. copy_user_table_end:
  185. EXT(copy_user_table, copy_user_table_end, 51f)
  186. be copy_user_last7
  187. andcc %g1, 4, %g0
  188. EX(ldd [%o1], %g2, and %g1, 0xf)
  189. add %o0, 8, %o0
  190. add %o1, 8, %o1
  191. EX(st %g2, [%o0 - 0x08], and %g1, 0xf)
  192. EX2(st %g3, [%o0 - 0x04], and %g1, 0xf, %g1, sub %g1, 4)
  193. copy_user_last7:
  194. be 1f
  195. andcc %g1, 2, %g0
  196. EX(ld [%o1], %g2, and %g1, 7)
  197. add %o1, 4, %o1
  198. EX(st %g2, [%o0], and %g1, 7)
  199. add %o0, 4, %o0
  200. 1:
  201. be 1f
  202. andcc %g1, 1, %g0
  203. EX(lduh [%o1], %g2, and %g1, 3)
  204. add %o1, 2, %o1
  205. EX(sth %g2, [%o0], and %g1, 3)
  206. add %o0, 2, %o0
  207. 1:
  208. be 1f
  209. nop
  210. EX(ldub [%o1], %g2, add %g0, 1)
  211. EX(stb %g2, [%o0], add %g0, 1)
  212. 1:
  213. retl
  214. clr %o0
  215. ldd_std:
  216. MOVE_BIGALIGNCHUNK(o1, o0, 0x00, o2, o3, o4, o5, g2, g3, g4, g5)
  217. MOVE_BIGALIGNCHUNK(o1, o0, 0x20, o2, o3, o4, o5, g2, g3, g4, g5)
  218. MOVE_BIGALIGNCHUNK(o1, o0, 0x40, o2, o3, o4, o5, g2, g3, g4, g5)
  219. MOVE_BIGALIGNCHUNK(o1, o0, 0x60, o2, o3, o4, o5, g2, g3, g4, g5)
  220. 81:
  221. EXT(ldd_std, 81b, 52f)
  222. subcc %g7, 128, %g7
  223. add %o1, 128, %o1
  224. bne ldd_std
  225. add %o0, 128, %o0
  226. andcc %g1, 0x70, %g7
  227. be copy_user_table_end
  228. andcc %g1, 8, %g0
  229. sethi %hi(copy_user_table_end), %o5
  230. srl %g7, 1, %o4
  231. add %g7, %o4, %o4
  232. add %o1, %g7, %o1
  233. sub %o5, %o4, %o5
  234. jmpl %o5 + %lo(copy_user_table_end), %g0
  235. add %o0, %g7, %o0
  236. cannot_optimize:
  237. bleu short_end
  238. cmp %o5, 2
  239. bne byte_chunk
  240. and %o2, 0xfffffff0, %o3
  241. andcc %o1, 1, %g0
  242. be 10f
  243. nop
  244. EXO2(ldub [%o1], %g2)
  245. add %o1, 1, %o1
  246. EXO2(stb %g2, [%o0])
  247. sub %o2, 1, %o2
  248. andcc %o2, 0xfffffff0, %o3
  249. be short_end
  250. add %o0, 1, %o0
  251. 10:
  252. MOVE_HALFCHUNK(o1, o0, 0x00, g2, g3, g4, g5)
  253. MOVE_HALFCHUNK(o1, o0, 0x08, g2, g3, g4, g5)
  254. 82:
  255. EXT(10b, 82b, 53f)
  256. subcc %o3, 0x10, %o3
  257. add %o1, 0x10, %o1
  258. bne 10b
  259. add %o0, 0x10, %o0
  260. b 2f
  261. and %o2, 0xe, %o3
  262. byte_chunk:
  263. MOVE_SHORTCHUNK(o1, o0, -0x02, g2, g3)
  264. MOVE_SHORTCHUNK(o1, o0, -0x04, g2, g3)
  265. MOVE_SHORTCHUNK(o1, o0, -0x06, g2, g3)
  266. MOVE_SHORTCHUNK(o1, o0, -0x08, g2, g3)
  267. MOVE_SHORTCHUNK(o1, o0, -0x0a, g2, g3)
  268. MOVE_SHORTCHUNK(o1, o0, -0x0c, g2, g3)
  269. MOVE_SHORTCHUNK(o1, o0, -0x0e, g2, g3)
  270. MOVE_SHORTCHUNK(o1, o0, -0x10, g2, g3)
  271. 83:
  272. EXT(byte_chunk, 83b, 54f)
  273. subcc %o3, 0x10, %o3
  274. add %o1, 0x10, %o1
  275. bne byte_chunk
  276. add %o0, 0x10, %o0
  277. short_end:
  278. and %o2, 0xe, %o3
  279. 2:
  280. sethi %hi(short_table_end), %o5
  281. sll %o3, 3, %o4
  282. add %o0, %o3, %o0
  283. sub %o5, %o4, %o5
  284. add %o1, %o3, %o1
  285. jmpl %o5 + %lo(short_table_end), %g0
  286. andcc %o2, 1, %g0
  287. 84:
  288. MOVE_SHORTCHUNK(o1, o0, 0x0c, g2, g3)
  289. MOVE_SHORTCHUNK(o1, o0, 0x0a, g2, g3)
  290. MOVE_SHORTCHUNK(o1, o0, 0x08, g2, g3)
  291. MOVE_SHORTCHUNK(o1, o0, 0x06, g2, g3)
  292. MOVE_SHORTCHUNK(o1, o0, 0x04, g2, g3)
  293. MOVE_SHORTCHUNK(o1, o0, 0x02, g2, g3)
  294. MOVE_SHORTCHUNK(o1, o0, 0x00, g2, g3)
  295. short_table_end:
  296. EXT(84b, short_table_end, 55f)
  297. be 1f
  298. nop
  299. EX(ldub [%o1], %g2, add %g0, 1)
  300. EX(stb %g2, [%o0], add %g0, 1)
  301. 1:
  302. retl
  303. clr %o0
  304. short_aligned_end:
  305. bne short_end
  306. andcc %o2, 8, %g0
  307. be 1f
  308. andcc %o2, 4, %g0
  309. EXO2(ld [%o1 + 0x00], %g2)
  310. EXO2(ld [%o1 + 0x04], %g3)
  311. add %o1, 8, %o1
  312. EXO2(st %g2, [%o0 + 0x00])
  313. EX(st %g3, [%o0 + 0x04], sub %o2, 4)
  314. add %o0, 8, %o0
  315. 1:
  316. b copy_user_last7
  317. mov %o2, %g1
  318. .section .fixup,#alloc,#execinstr
  319. .align 4
  320. 97:
  321. mov %o2, %g3
  322. fixupretl:
  323. sethi %hi(PAGE_OFFSET), %g1
  324. cmp %o0, %g1
  325. blu 1f
  326. cmp %o1, %g1
  327. bgeu 1f
  328. nop
  329. save %sp, -64, %sp
  330. mov %i0, %o0
  331. call __bzero
  332. mov %g3, %o1
  333. restore
  334. 1: retl
  335. mov %g3, %o0
  336. /* exception routine sets %g2 to (broken_insn - first_insn)>>2 */
  337. 50:
  338. /* This magic counts how many bytes are left when crash in MOVE_BIGCHUNK
  339. * happens. This is derived from the amount ldd reads, st stores, etc.
  340. * x = g2 % 12;
  341. * g3 = g1 + g7 - ((g2 / 12) * 32 + (x < 4) ? 0 : (x - 4) * 4);
  342. * o0 += (g2 / 12) * 32;
  343. */
  344. cmp %g2, 12
  345. add %o0, %g7, %o0
  346. bcs 1f
  347. cmp %g2, 24
  348. bcs 2f
  349. cmp %g2, 36
  350. bcs 3f
  351. nop
  352. sub %g2, 12, %g2
  353. sub %g7, 32, %g7
  354. 3: sub %g2, 12, %g2
  355. sub %g7, 32, %g7
  356. 2: sub %g2, 12, %g2
  357. sub %g7, 32, %g7
  358. 1: cmp %g2, 4
  359. bcs,a 60f
  360. clr %g2
  361. sub %g2, 4, %g2
  362. sll %g2, 2, %g2
  363. 60: and %g1, 0x7f, %g3
  364. sub %o0, %g7, %o0
  365. add %g3, %g7, %g3
  366. ba fixupretl
  367. sub %g3, %g2, %g3
  368. 51:
  369. /* i = 41 - g2; j = i % 6;
  370. * g3 = (g1 & 15) + (i / 6) * 16 + (j < 4) ? (j + 1) * 4 : 16;
  371. * o0 -= (i / 6) * 16 + 16;
  372. */
  373. neg %g2
  374. and %g1, 0xf, %g1
  375. add %g2, 41, %g2
  376. add %o0, %g1, %o0
  377. 1: cmp %g2, 6
  378. bcs,a 2f
  379. cmp %g2, 4
  380. add %g1, 16, %g1
  381. b 1b
  382. sub %g2, 6, %g2
  383. 2: bcc,a 2f
  384. mov 16, %g2
  385. inc %g2
  386. sll %g2, 2, %g2
  387. 2: add %g1, %g2, %g3
  388. ba fixupretl
  389. sub %o0, %g3, %o0
  390. 52:
  391. /* g3 = g1 + g7 - (g2 / 8) * 32 + (g2 & 4) ? (g2 & 3) * 8 : 0;
  392. o0 += (g2 / 8) * 32 */
  393. andn %g2, 7, %g4
  394. add %o0, %g7, %o0
  395. andcc %g2, 4, %g0
  396. and %g2, 3, %g2
  397. sll %g4, 2, %g4
  398. sll %g2, 3, %g2
  399. bne 60b
  400. sub %g7, %g4, %g7
  401. ba 60b
  402. clr %g2
  403. 53:
  404. /* g3 = o3 + (o2 & 15) - (g2 & 8) - (g2 & 4) ? (g2 & 3) * 2 : 0;
  405. o0 += (g2 & 8) */
  406. and %g2, 3, %g4
  407. andcc %g2, 4, %g0
  408. and %g2, 8, %g2
  409. sll %g4, 1, %g4
  410. be 1f
  411. add %o0, %g2, %o0
  412. add %g2, %g4, %g2
  413. 1: and %o2, 0xf, %g3
  414. add %g3, %o3, %g3
  415. ba fixupretl
  416. sub %g3, %g2, %g3
  417. 54:
  418. /* g3 = o3 + (o2 & 15) - (g2 / 4) * 2 - (g2 & 2) ? (g2 & 1) : 0;
  419. o0 += (g2 / 4) * 2 */
  420. srl %g2, 2, %o4
  421. and %g2, 1, %o5
  422. srl %g2, 1, %g2
  423. add %o4, %o4, %o4
  424. and %o5, %g2, %o5
  425. and %o2, 0xf, %o2
  426. add %o0, %o4, %o0
  427. sub %o3, %o5, %o3
  428. sub %o2, %o4, %o2
  429. ba fixupretl
  430. add %o2, %o3, %g3
  431. 55:
  432. /* i = 27 - g2;
  433. g3 = (o2 & 1) + i / 4 * 2 + !(i & 3);
  434. o0 -= i / 4 * 2 + 1 */
  435. neg %g2
  436. and %o2, 1, %o2
  437. add %g2, 27, %g2
  438. srl %g2, 2, %o5
  439. andcc %g2, 3, %g0
  440. mov 1, %g2
  441. add %o5, %o5, %o5
  442. be,a 1f
  443. clr %g2
  444. 1: add %g2, %o5, %g3
  445. sub %o0, %g3, %o0
  446. ba fixupretl
  447. add %g3, %o2, %g3
  448. .globl __copy_user_end
  449. __copy_user_end: