string.S 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732
  1. /*
  2. * String handling functions for PowerPC.
  3. *
  4. * Copyright (C) 1996 Paul Mackerras.
  5. *
  6. * This program is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU General Public License
  8. * as published by the Free Software Foundation; either version
  9. * 2 of the License, or (at your option) any later version.
  10. */
  11. #include <asm/processor.h>
  12. #include <asm/cache.h>
  13. #include <asm/errno.h>
  14. #include <asm/ppc_asm.h>
  15. #define COPY_16_BYTES \
  16. lwz r7,4(r4); \
  17. lwz r8,8(r4); \
  18. lwz r9,12(r4); \
  19. lwzu r10,16(r4); \
  20. stw r7,4(r6); \
  21. stw r8,8(r6); \
  22. stw r9,12(r6); \
  23. stwu r10,16(r6)
  24. #define COPY_16_BYTES_WITHEX(n) \
  25. 8 ## n ## 0: \
  26. lwz r7,4(r4); \
  27. 8 ## n ## 1: \
  28. lwz r8,8(r4); \
  29. 8 ## n ## 2: \
  30. lwz r9,12(r4); \
  31. 8 ## n ## 3: \
  32. lwzu r10,16(r4); \
  33. 8 ## n ## 4: \
  34. stw r7,4(r6); \
  35. 8 ## n ## 5: \
  36. stw r8,8(r6); \
  37. 8 ## n ## 6: \
  38. stw r9,12(r6); \
  39. 8 ## n ## 7: \
  40. stwu r10,16(r6)
  41. #define COPY_16_BYTES_EXCODE(n) \
  42. 9 ## n ## 0: \
  43. addi r5,r5,-(16 * n); \
  44. b 104f; \
  45. 9 ## n ## 1: \
  46. addi r5,r5,-(16 * n); \
  47. b 105f; \
  48. .section __ex_table,"a"; \
  49. .align 2; \
  50. .long 8 ## n ## 0b,9 ## n ## 0b; \
  51. .long 8 ## n ## 1b,9 ## n ## 0b; \
  52. .long 8 ## n ## 2b,9 ## n ## 0b; \
  53. .long 8 ## n ## 3b,9 ## n ## 0b; \
  54. .long 8 ## n ## 4b,9 ## n ## 1b; \
  55. .long 8 ## n ## 5b,9 ## n ## 1b; \
  56. .long 8 ## n ## 6b,9 ## n ## 1b; \
  57. .long 8 ## n ## 7b,9 ## n ## 1b; \
  58. .text
  59. .text
  60. .stabs "arch/ppc/lib/",N_SO,0,0,0f
  61. .stabs "string.S",N_SO,0,0,0f
  62. CACHELINE_BYTES = L1_CACHE_BYTES
  63. LG_CACHELINE_BYTES = L1_CACHE_SHIFT
  64. CACHELINE_MASK = (L1_CACHE_BYTES-1)
  65. _GLOBAL(strcpy)
  66. addi r5,r3,-1
  67. addi r4,r4,-1
  68. 1: lbzu r0,1(r4)
  69. cmpwi 0,r0,0
  70. stbu r0,1(r5)
  71. bne 1b
  72. blr
  73. /* This clears out any unused part of the destination buffer,
  74. just as the libc version does. -- paulus */
  75. _GLOBAL(strncpy)
  76. cmpwi 0,r5,0
  77. beqlr
  78. mtctr r5
  79. addi r6,r3,-1
  80. addi r4,r4,-1
  81. 1: lbzu r0,1(r4)
  82. cmpwi 0,r0,0
  83. stbu r0,1(r6)
  84. bdnzf 2,1b /* dec ctr, branch if ctr != 0 && !cr0.eq */
  85. bnelr /* if we didn't hit a null char, we're done */
  86. mfctr r5
  87. cmpwi 0,r5,0 /* any space left in destination buffer? */
  88. beqlr /* we know r0 == 0 here */
  89. 2: stbu r0,1(r6) /* clear it out if so */
  90. bdnz 2b
  91. blr
  92. _GLOBAL(strcat)
  93. addi r5,r3,-1
  94. addi r4,r4,-1
  95. 1: lbzu r0,1(r5)
  96. cmpwi 0,r0,0
  97. bne 1b
  98. addi r5,r5,-1
  99. 1: lbzu r0,1(r4)
  100. cmpwi 0,r0,0
  101. stbu r0,1(r5)
  102. bne 1b
  103. blr
  104. _GLOBAL(strcmp)
  105. addi r5,r3,-1
  106. addi r4,r4,-1
  107. 1: lbzu r3,1(r5)
  108. cmpwi 1,r3,0
  109. lbzu r0,1(r4)
  110. subf. r3,r0,r3
  111. beqlr 1
  112. beq 1b
  113. blr
  114. _GLOBAL(strncmp)
  115. PPC_LCMPI r5,0
  116. beqlr
  117. mtctr r5
  118. addi r5,r3,-1
  119. addi r4,r4,-1
  120. 1: lbzu r3,1(r5)
  121. cmpwi 1,r3,0
  122. lbzu r0,1(r4)
  123. subf. r3,r0,r3
  124. beqlr 1
  125. bdnzt eq,1b
  126. blr
  127. _GLOBAL(strlen)
  128. addi r4,r3,-1
  129. 1: lbzu r0,1(r4)
  130. cmpwi 0,r0,0
  131. bne 1b
  132. subf r3,r3,r4
  133. blr
  134. /*
  135. * Use dcbz on the complete cache lines in the destination
  136. * to set them to zero. This requires that the destination
  137. * area is cacheable. -- paulus
  138. */
  139. _GLOBAL(cacheable_memzero)
  140. mr r5,r4
  141. li r4,0
  142. addi r6,r3,-4
  143. cmplwi 0,r5,4
  144. blt 7f
  145. stwu r4,4(r6)
  146. beqlr
  147. andi. r0,r6,3
  148. add r5,r0,r5
  149. subf r6,r0,r6
  150. clrlwi r7,r6,32-LG_CACHELINE_BYTES
  151. add r8,r7,r5
  152. srwi r9,r8,LG_CACHELINE_BYTES
  153. addic. r9,r9,-1 /* total number of complete cachelines */
  154. ble 2f
  155. xori r0,r7,CACHELINE_MASK & ~3
  156. srwi. r0,r0,2
  157. beq 3f
  158. mtctr r0
  159. 4: stwu r4,4(r6)
  160. bdnz 4b
  161. 3: mtctr r9
  162. li r7,4
  163. #if !defined(CONFIG_8xx)
  164. 10: dcbz r7,r6
  165. #else
  166. 10: stw r4, 4(r6)
  167. stw r4, 8(r6)
  168. stw r4, 12(r6)
  169. stw r4, 16(r6)
  170. #if CACHE_LINE_SIZE >= 32
  171. stw r4, 20(r6)
  172. stw r4, 24(r6)
  173. stw r4, 28(r6)
  174. stw r4, 32(r6)
  175. #endif /* CACHE_LINE_SIZE */
  176. #endif
  177. addi r6,r6,CACHELINE_BYTES
  178. bdnz 10b
  179. clrlwi r5,r8,32-LG_CACHELINE_BYTES
  180. addi r5,r5,4
  181. 2: srwi r0,r5,2
  182. mtctr r0
  183. bdz 6f
  184. 1: stwu r4,4(r6)
  185. bdnz 1b
  186. 6: andi. r5,r5,3
  187. 7: cmpwi 0,r5,0
  188. beqlr
  189. mtctr r5
  190. addi r6,r6,3
  191. 8: stbu r4,1(r6)
  192. bdnz 8b
  193. blr
  194. _GLOBAL(memset)
  195. rlwimi r4,r4,8,16,23
  196. rlwimi r4,r4,16,0,15
  197. addi r6,r3,-4
  198. cmplwi 0,r5,4
  199. blt 7f
  200. stwu r4,4(r6)
  201. beqlr
  202. andi. r0,r6,3
  203. add r5,r0,r5
  204. subf r6,r0,r6
  205. srwi r0,r5,2
  206. mtctr r0
  207. bdz 6f
  208. 1: stwu r4,4(r6)
  209. bdnz 1b
  210. 6: andi. r5,r5,3
  211. 7: cmpwi 0,r5,0
  212. beqlr
  213. mtctr r5
  214. addi r6,r6,3
  215. 8: stbu r4,1(r6)
  216. bdnz 8b
  217. blr
  218. /*
  219. * This version uses dcbz on the complete cache lines in the
  220. * destination area to reduce memory traffic. This requires that
  221. * the destination area is cacheable.
  222. * We only use this version if the source and dest don't overlap.
  223. * -- paulus.
  224. */
  225. _GLOBAL(cacheable_memcpy)
  226. add r7,r3,r5 /* test if the src & dst overlap */
  227. add r8,r4,r5
  228. cmplw 0,r4,r7
  229. cmplw 1,r3,r8
  230. crand 0,0,4 /* cr0.lt &= cr1.lt */
  231. blt memcpy /* if regions overlap */
  232. addi r4,r4,-4
  233. addi r6,r3,-4
  234. neg r0,r3
  235. andi. r0,r0,CACHELINE_MASK /* # bytes to start of cache line */
  236. beq 58f
  237. cmplw 0,r5,r0 /* is this more than total to do? */
  238. blt 63f /* if not much to do */
  239. andi. r8,r0,3 /* get it word-aligned first */
  240. subf r5,r0,r5
  241. mtctr r8
  242. beq+ 61f
  243. 70: lbz r9,4(r4) /* do some bytes */
  244. stb r9,4(r6)
  245. addi r4,r4,1
  246. addi r6,r6,1
  247. bdnz 70b
  248. 61: srwi. r0,r0,2
  249. mtctr r0
  250. beq 58f
  251. 72: lwzu r9,4(r4) /* do some words */
  252. stwu r9,4(r6)
  253. bdnz 72b
  254. 58: srwi. r0,r5,LG_CACHELINE_BYTES /* # complete cachelines */
  255. clrlwi r5,r5,32-LG_CACHELINE_BYTES
  256. li r11,4
  257. mtctr r0
  258. beq 63f
  259. 53:
  260. #if !defined(CONFIG_8xx)
  261. dcbz r11,r6
  262. #endif
  263. COPY_16_BYTES
  264. #if L1_CACHE_BYTES >= 32
  265. COPY_16_BYTES
  266. #if L1_CACHE_BYTES >= 64
  267. COPY_16_BYTES
  268. COPY_16_BYTES
  269. #if L1_CACHE_BYTES >= 128
  270. COPY_16_BYTES
  271. COPY_16_BYTES
  272. COPY_16_BYTES
  273. COPY_16_BYTES
  274. #endif
  275. #endif
  276. #endif
  277. bdnz 53b
  278. 63: srwi. r0,r5,2
  279. mtctr r0
  280. beq 64f
  281. 30: lwzu r0,4(r4)
  282. stwu r0,4(r6)
  283. bdnz 30b
  284. 64: andi. r0,r5,3
  285. mtctr r0
  286. beq+ 65f
  287. 40: lbz r0,4(r4)
  288. stb r0,4(r6)
  289. addi r4,r4,1
  290. addi r6,r6,1
  291. bdnz 40b
  292. 65: blr
  293. _GLOBAL(memmove)
  294. cmplw 0,r3,r4
  295. bgt backwards_memcpy
  296. /* fall through */
  297. _GLOBAL(memcpy)
  298. srwi. r7,r5,3
  299. addi r6,r3,-4
  300. addi r4,r4,-4
  301. beq 2f /* if less than 8 bytes to do */
  302. andi. r0,r6,3 /* get dest word aligned */
  303. mtctr r7
  304. bne 5f
  305. 1: lwz r7,4(r4)
  306. lwzu r8,8(r4)
  307. stw r7,4(r6)
  308. stwu r8,8(r6)
  309. bdnz 1b
  310. andi. r5,r5,7
  311. 2: cmplwi 0,r5,4
  312. blt 3f
  313. lwzu r0,4(r4)
  314. addi r5,r5,-4
  315. stwu r0,4(r6)
  316. 3: cmpwi 0,r5,0
  317. beqlr
  318. mtctr r5
  319. addi r4,r4,3
  320. addi r6,r6,3
  321. 4: lbzu r0,1(r4)
  322. stbu r0,1(r6)
  323. bdnz 4b
  324. blr
  325. 5: subfic r0,r0,4
  326. mtctr r0
  327. 6: lbz r7,4(r4)
  328. addi r4,r4,1
  329. stb r7,4(r6)
  330. addi r6,r6,1
  331. bdnz 6b
  332. subf r5,r0,r5
  333. rlwinm. r7,r5,32-3,3,31
  334. beq 2b
  335. mtctr r7
  336. b 1b
  337. _GLOBAL(backwards_memcpy)
  338. rlwinm. r7,r5,32-3,3,31 /* r0 = r5 >> 3 */
  339. add r6,r3,r5
  340. add r4,r4,r5
  341. beq 2f
  342. andi. r0,r6,3
  343. mtctr r7
  344. bne 5f
  345. 1: lwz r7,-4(r4)
  346. lwzu r8,-8(r4)
  347. stw r7,-4(r6)
  348. stwu r8,-8(r6)
  349. bdnz 1b
  350. andi. r5,r5,7
  351. 2: cmplwi 0,r5,4
  352. blt 3f
  353. lwzu r0,-4(r4)
  354. subi r5,r5,4
  355. stwu r0,-4(r6)
  356. 3: cmpwi 0,r5,0
  357. beqlr
  358. mtctr r5
  359. 4: lbzu r0,-1(r4)
  360. stbu r0,-1(r6)
  361. bdnz 4b
  362. blr
  363. 5: mtctr r0
  364. 6: lbzu r7,-1(r4)
  365. stbu r7,-1(r6)
  366. bdnz 6b
  367. subf r5,r0,r5
  368. rlwinm. r7,r5,32-3,3,31
  369. beq 2b
  370. mtctr r7
  371. b 1b
  372. _GLOBAL(memcmp)
  373. cmpwi 0,r5,0
  374. ble- 2f
  375. mtctr r5
  376. addi r6,r3,-1
  377. addi r4,r4,-1
  378. 1: lbzu r3,1(r6)
  379. lbzu r0,1(r4)
  380. subf. r3,r0,r3
  381. bdnzt 2,1b
  382. blr
  383. 2: li r3,0
  384. blr
  385. _GLOBAL(memchr)
  386. cmpwi 0,r5,0
  387. ble- 2f
  388. mtctr r5
  389. addi r3,r3,-1
  390. 1: lbzu r0,1(r3)
  391. cmpw 0,r0,r4
  392. bdnzf 2,1b
  393. beqlr
  394. 2: li r3,0
  395. blr
  396. _GLOBAL(__copy_tofrom_user)
  397. addi r4,r4,-4
  398. addi r6,r3,-4
  399. neg r0,r3
  400. andi. r0,r0,CACHELINE_MASK /* # bytes to start of cache line */
  401. beq 58f
  402. cmplw 0,r5,r0 /* is this more than total to do? */
  403. blt 63f /* if not much to do */
  404. andi. r8,r0,3 /* get it word-aligned first */
  405. mtctr r8
  406. beq+ 61f
  407. 70: lbz r9,4(r4) /* do some bytes */
  408. 71: stb r9,4(r6)
  409. addi r4,r4,1
  410. addi r6,r6,1
  411. bdnz 70b
  412. 61: subf r5,r0,r5
  413. srwi. r0,r0,2
  414. mtctr r0
  415. beq 58f
  416. 72: lwzu r9,4(r4) /* do some words */
  417. 73: stwu r9,4(r6)
  418. bdnz 72b
  419. .section __ex_table,"a"
  420. .align 2
  421. .long 70b,100f
  422. .long 71b,101f
  423. .long 72b,102f
  424. .long 73b,103f
  425. .text
  426. 58: srwi. r0,r5,LG_CACHELINE_BYTES /* # complete cachelines */
  427. clrlwi r5,r5,32-LG_CACHELINE_BYTES
  428. li r11,4
  429. beq 63f
  430. #ifdef CONFIG_8xx
  431. /* Don't use prefetch on 8xx */
  432. mtctr r0
  433. li r0,0
  434. 53: COPY_16_BYTES_WITHEX(0)
  435. bdnz 53b
  436. #else /* not CONFIG_8xx */
  437. /* Here we decide how far ahead to prefetch the source */
  438. li r3,4
  439. cmpwi r0,1
  440. li r7,0
  441. ble 114f
  442. li r7,1
  443. #if MAX_COPY_PREFETCH > 1
  444. /* Heuristically, for large transfers we prefetch
  445. MAX_COPY_PREFETCH cachelines ahead. For small transfers
  446. we prefetch 1 cacheline ahead. */
  447. cmpwi r0,MAX_COPY_PREFETCH
  448. ble 112f
  449. li r7,MAX_COPY_PREFETCH
  450. 112: mtctr r7
  451. 111: dcbt r3,r4
  452. addi r3,r3,CACHELINE_BYTES
  453. bdnz 111b
  454. #else
  455. dcbt r3,r4
  456. addi r3,r3,CACHELINE_BYTES
  457. #endif /* MAX_COPY_PREFETCH > 1 */
  458. 114: subf r8,r7,r0
  459. mr r0,r7
  460. mtctr r8
  461. 53: dcbt r3,r4
  462. 54: dcbz r11,r6
  463. .section __ex_table,"a"
  464. .align 2
  465. .long 54b,105f
  466. .text
  467. /* the main body of the cacheline loop */
  468. COPY_16_BYTES_WITHEX(0)
  469. #if L1_CACHE_BYTES >= 32
  470. COPY_16_BYTES_WITHEX(1)
  471. #if L1_CACHE_BYTES >= 64
  472. COPY_16_BYTES_WITHEX(2)
  473. COPY_16_BYTES_WITHEX(3)
  474. #if L1_CACHE_BYTES >= 128
  475. COPY_16_BYTES_WITHEX(4)
  476. COPY_16_BYTES_WITHEX(5)
  477. COPY_16_BYTES_WITHEX(6)
  478. COPY_16_BYTES_WITHEX(7)
  479. #endif
  480. #endif
  481. #endif
  482. bdnz 53b
  483. cmpwi r0,0
  484. li r3,4
  485. li r7,0
  486. bne 114b
  487. #endif /* CONFIG_8xx */
  488. 63: srwi. r0,r5,2
  489. mtctr r0
  490. beq 64f
  491. 30: lwzu r0,4(r4)
  492. 31: stwu r0,4(r6)
  493. bdnz 30b
  494. 64: andi. r0,r5,3
  495. mtctr r0
  496. beq+ 65f
  497. 40: lbz r0,4(r4)
  498. 41: stb r0,4(r6)
  499. addi r4,r4,1
  500. addi r6,r6,1
  501. bdnz 40b
  502. 65: li r3,0
  503. blr
  504. /* read fault, initial single-byte copy */
  505. 100: li r9,0
  506. b 90f
  507. /* write fault, initial single-byte copy */
  508. 101: li r9,1
  509. 90: subf r5,r8,r5
  510. li r3,0
  511. b 99f
  512. /* read fault, initial word copy */
  513. 102: li r9,0
  514. b 91f
  515. /* write fault, initial word copy */
  516. 103: li r9,1
  517. 91: li r3,2
  518. b 99f
  519. /*
  520. * this stuff handles faults in the cacheline loop and branches to either
  521. * 104f (if in read part) or 105f (if in write part), after updating r5
  522. */
  523. COPY_16_BYTES_EXCODE(0)
  524. #if L1_CACHE_BYTES >= 32
  525. COPY_16_BYTES_EXCODE(1)
  526. #if L1_CACHE_BYTES >= 64
  527. COPY_16_BYTES_EXCODE(2)
  528. COPY_16_BYTES_EXCODE(3)
  529. #if L1_CACHE_BYTES >= 128
  530. COPY_16_BYTES_EXCODE(4)
  531. COPY_16_BYTES_EXCODE(5)
  532. COPY_16_BYTES_EXCODE(6)
  533. COPY_16_BYTES_EXCODE(7)
  534. #endif
  535. #endif
  536. #endif
  537. /* read fault in cacheline loop */
  538. 104: li r9,0
  539. b 92f
  540. /* fault on dcbz (effectively a write fault) */
  541. /* or write fault in cacheline loop */
  542. 105: li r9,1
  543. 92: li r3,LG_CACHELINE_BYTES
  544. mfctr r8
  545. add r0,r0,r8
  546. b 106f
  547. /* read fault in final word loop */
  548. 108: li r9,0
  549. b 93f
  550. /* write fault in final word loop */
  551. 109: li r9,1
  552. 93: andi. r5,r5,3
  553. li r3,2
  554. b 99f
  555. /* read fault in final byte loop */
  556. 110: li r9,0
  557. b 94f
  558. /* write fault in final byte loop */
  559. 111: li r9,1
  560. 94: li r5,0
  561. li r3,0
  562. /*
  563. * At this stage the number of bytes not copied is
  564. * r5 + (ctr << r3), and r9 is 0 for read or 1 for write.
  565. */
  566. 99: mfctr r0
  567. 106: slw r3,r0,r3
  568. add. r3,r3,r5
  569. beq 120f /* shouldn't happen */
  570. cmpwi 0,r9,0
  571. bne 120f
  572. /* for a read fault, first try to continue the copy one byte at a time */
  573. mtctr r3
  574. 130: lbz r0,4(r4)
  575. 131: stb r0,4(r6)
  576. addi r4,r4,1
  577. addi r6,r6,1
  578. bdnz 130b
  579. /* then clear out the destination: r3 bytes starting at 4(r6) */
  580. 132: mfctr r3
  581. srwi. r0,r3,2
  582. li r9,0
  583. mtctr r0
  584. beq 113f
  585. 112: stwu r9,4(r6)
  586. bdnz 112b
  587. 113: andi. r0,r3,3
  588. mtctr r0
  589. beq 120f
  590. 114: stb r9,4(r6)
  591. addi r6,r6,1
  592. bdnz 114b
  593. 120: blr
  594. .section __ex_table,"a"
  595. .align 2
  596. .long 30b,108b
  597. .long 31b,109b
  598. .long 40b,110b
  599. .long 41b,111b
  600. .long 130b,132b
  601. .long 131b,120b
  602. .long 112b,120b
  603. .long 114b,120b
  604. .text
  605. _GLOBAL(__clear_user)
  606. addi r6,r3,-4
  607. li r3,0
  608. li r5,0
  609. cmplwi 0,r4,4
  610. blt 7f
  611. /* clear a single word */
  612. 11: stwu r5,4(r6)
  613. beqlr
  614. /* clear word sized chunks */
  615. andi. r0,r6,3
  616. add r4,r0,r4
  617. subf r6,r0,r6
  618. srwi r0,r4,2
  619. andi. r4,r4,3
  620. mtctr r0
  621. bdz 7f
  622. 1: stwu r5,4(r6)
  623. bdnz 1b
  624. /* clear byte sized chunks */
  625. 7: cmpwi 0,r4,0
  626. beqlr
  627. mtctr r4
  628. addi r6,r6,3
  629. 8: stbu r5,1(r6)
  630. bdnz 8b
  631. blr
  632. 90: mr r3,r4
  633. blr
  634. 91: mfctr r3
  635. slwi r3,r3,2
  636. add r3,r3,r4
  637. blr
  638. 92: mfctr r3
  639. blr
  640. .section __ex_table,"a"
  641. .align 2
  642. .long 11b,90b
  643. .long 1b,91b
  644. .long 8b,92b
  645. .text
  646. _GLOBAL(__strncpy_from_user)
  647. addi r6,r3,-1
  648. addi r4,r4,-1
  649. cmpwi 0,r5,0
  650. beq 2f
  651. mtctr r5
  652. 1: lbzu r0,1(r4)
  653. cmpwi 0,r0,0
  654. stbu r0,1(r6)
  655. bdnzf 2,1b /* dec ctr, branch if ctr != 0 && !cr0.eq */
  656. beq 3f
  657. 2: addi r6,r6,1
  658. 3: subf r3,r3,r6
  659. blr
  660. 99: li r3,-EFAULT
  661. blr
  662. .section __ex_table,"a"
  663. .align 2
  664. .long 1b,99b
  665. .text
  666. /* r3 = str, r4 = len (> 0), r5 = top (highest addr) */
  667. _GLOBAL(__strnlen_user)
  668. addi r7,r3,-1
  669. subf r6,r7,r5 /* top+1 - str */
  670. cmplw 0,r4,r6
  671. bge 0f
  672. mr r6,r4
  673. 0: mtctr r6 /* ctr = min(len, top - str) */
  674. 1: lbzu r0,1(r7) /* get next byte */
  675. cmpwi 0,r0,0
  676. bdnzf 2,1b /* loop if --ctr != 0 && byte != 0 */
  677. addi r7,r7,1
  678. subf r3,r3,r7 /* number of bytes we have looked at */
  679. beqlr /* return if we found a 0 byte */
  680. cmpw 0,r3,r4 /* did we look at all len bytes? */
  681. blt 99f /* if not, must have hit top */
  682. addi r3,r4,1 /* return len + 1 to indicate no null found */
  683. blr
  684. 99: li r3,0 /* bad address, return 0 */
  685. blr
  686. .section __ex_table,"a"
  687. .align 2
  688. .long 1b,99b