string.S 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719
  1. /*
  2. * String handling functions for PowerPC.
  3. *
  4. * Copyright (C) 1996 Paul Mackerras.
  5. *
  6. * This program is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU General Public License
  8. * as published by the Free Software Foundation; either version
  9. * 2 of the License, or (at your option) any later version.
  10. */
  11. #include <linux/config.h>
  12. #include <asm/processor.h>
  13. #include <asm/cache.h>
  14. #include <asm/errno.h>
  15. #include <asm/ppc_asm.h>
  16. #define COPY_16_BYTES \
  17. lwz r7,4(r4); \
  18. lwz r8,8(r4); \
  19. lwz r9,12(r4); \
  20. lwzu r10,16(r4); \
  21. stw r7,4(r6); \
  22. stw r8,8(r6); \
  23. stw r9,12(r6); \
  24. stwu r10,16(r6)
  25. #define COPY_16_BYTES_WITHEX(n) \
  26. 8 ## n ## 0: \
  27. lwz r7,4(r4); \
  28. 8 ## n ## 1: \
  29. lwz r8,8(r4); \
  30. 8 ## n ## 2: \
  31. lwz r9,12(r4); \
  32. 8 ## n ## 3: \
  33. lwzu r10,16(r4); \
  34. 8 ## n ## 4: \
  35. stw r7,4(r6); \
  36. 8 ## n ## 5: \
  37. stw r8,8(r6); \
  38. 8 ## n ## 6: \
  39. stw r9,12(r6); \
  40. 8 ## n ## 7: \
  41. stwu r10,16(r6)
  42. #define COPY_16_BYTES_EXCODE(n) \
  43. 9 ## n ## 0: \
  44. addi r5,r5,-(16 * n); \
  45. b 104f; \
  46. 9 ## n ## 1: \
  47. addi r5,r5,-(16 * n); \
  48. b 105f; \
  49. .section __ex_table,"a"; \
  50. .align 2; \
  51. .long 8 ## n ## 0b,9 ## n ## 0b; \
  52. .long 8 ## n ## 1b,9 ## n ## 0b; \
  53. .long 8 ## n ## 2b,9 ## n ## 0b; \
  54. .long 8 ## n ## 3b,9 ## n ## 0b; \
  55. .long 8 ## n ## 4b,9 ## n ## 1b; \
  56. .long 8 ## n ## 5b,9 ## n ## 1b; \
  57. .long 8 ## n ## 6b,9 ## n ## 1b; \
  58. .long 8 ## n ## 7b,9 ## n ## 1b; \
  59. .text
  60. .text
  61. .stabs "arch/ppc/lib/",N_SO,0,0,0f
  62. .stabs "string.S",N_SO,0,0,0f
  63. CACHELINE_BYTES = L1_CACHE_BYTES
  64. LG_CACHELINE_BYTES = L1_CACHE_SHIFT
  65. CACHELINE_MASK = (L1_CACHE_BYTES-1)
  66. _GLOBAL(strcpy)
  67. addi r5,r3,-1
  68. addi r4,r4,-1
  69. 1: lbzu r0,1(r4)
  70. cmpwi 0,r0,0
  71. stbu r0,1(r5)
  72. bne 1b
  73. blr
  74. /* This clears out any unused part of the destination buffer,
  75. just as the libc version does. -- paulus */
  76. _GLOBAL(strncpy)
  77. cmpwi 0,r5,0
  78. beqlr
  79. mtctr r5
  80. addi r6,r3,-1
  81. addi r4,r4,-1
  82. 1: lbzu r0,1(r4)
  83. cmpwi 0,r0,0
  84. stbu r0,1(r6)
  85. bdnzf 2,1b /* dec ctr, branch if ctr != 0 && !cr0.eq */
  86. bnelr /* if we didn't hit a null char, we're done */
  87. mfctr r5
  88. cmpwi 0,r5,0 /* any space left in destination buffer? */
  89. beqlr /* we know r0 == 0 here */
  90. 2: stbu r0,1(r6) /* clear it out if so */
  91. bdnz 2b
  92. blr
  93. _GLOBAL(strcat)
  94. addi r5,r3,-1
  95. addi r4,r4,-1
  96. 1: lbzu r0,1(r5)
  97. cmpwi 0,r0,0
  98. bne 1b
  99. addi r5,r5,-1
  100. 1: lbzu r0,1(r4)
  101. cmpwi 0,r0,0
  102. stbu r0,1(r5)
  103. bne 1b
  104. blr
  105. _GLOBAL(strcmp)
  106. addi r5,r3,-1
  107. addi r4,r4,-1
  108. 1: lbzu r3,1(r5)
  109. cmpwi 1,r3,0
  110. lbzu r0,1(r4)
  111. subf. r3,r0,r3
  112. beqlr 1
  113. beq 1b
  114. blr
  115. _GLOBAL(strlen)
  116. addi r4,r3,-1
  117. 1: lbzu r0,1(r4)
  118. cmpwi 0,r0,0
  119. bne 1b
  120. subf r3,r3,r4
  121. blr
  122. /*
  123. * Use dcbz on the complete cache lines in the destination
  124. * to set them to zero. This requires that the destination
  125. * area is cacheable. -- paulus
  126. */
  127. _GLOBAL(cacheable_memzero)
  128. mr r5,r4
  129. li r4,0
  130. addi r6,r3,-4
  131. cmplwi 0,r5,4
  132. blt 7f
  133. stwu r4,4(r6)
  134. beqlr
  135. andi. r0,r6,3
  136. add r5,r0,r5
  137. subf r6,r0,r6
  138. clrlwi r7,r6,32-LG_CACHELINE_BYTES
  139. add r8,r7,r5
  140. srwi r9,r8,LG_CACHELINE_BYTES
  141. addic. r9,r9,-1 /* total number of complete cachelines */
  142. ble 2f
  143. xori r0,r7,CACHELINE_MASK & ~3
  144. srwi. r0,r0,2
  145. beq 3f
  146. mtctr r0
  147. 4: stwu r4,4(r6)
  148. bdnz 4b
  149. 3: mtctr r9
  150. li r7,4
  151. #if !defined(CONFIG_8xx)
  152. 10: dcbz r7,r6
  153. #else
  154. 10: stw r4, 4(r6)
  155. stw r4, 8(r6)
  156. stw r4, 12(r6)
  157. stw r4, 16(r6)
  158. #if CACHE_LINE_SIZE >= 32
  159. stw r4, 20(r6)
  160. stw r4, 24(r6)
  161. stw r4, 28(r6)
  162. stw r4, 32(r6)
  163. #endif /* CACHE_LINE_SIZE */
  164. #endif
  165. addi r6,r6,CACHELINE_BYTES
  166. bdnz 10b
  167. clrlwi r5,r8,32-LG_CACHELINE_BYTES
  168. addi r5,r5,4
  169. 2: srwi r0,r5,2
  170. mtctr r0
  171. bdz 6f
  172. 1: stwu r4,4(r6)
  173. bdnz 1b
  174. 6: andi. r5,r5,3
  175. 7: cmpwi 0,r5,0
  176. beqlr
  177. mtctr r5
  178. addi r6,r6,3
  179. 8: stbu r4,1(r6)
  180. bdnz 8b
  181. blr
  182. _GLOBAL(memset)
  183. rlwimi r4,r4,8,16,23
  184. rlwimi r4,r4,16,0,15
  185. addi r6,r3,-4
  186. cmplwi 0,r5,4
  187. blt 7f
  188. stwu r4,4(r6)
  189. beqlr
  190. andi. r0,r6,3
  191. add r5,r0,r5
  192. subf r6,r0,r6
  193. srwi r0,r5,2
  194. mtctr r0
  195. bdz 6f
  196. 1: stwu r4,4(r6)
  197. bdnz 1b
  198. 6: andi. r5,r5,3
  199. 7: cmpwi 0,r5,0
  200. beqlr
  201. mtctr r5
  202. addi r6,r6,3
  203. 8: stbu r4,1(r6)
  204. bdnz 8b
  205. blr
  206. /*
  207. * This version uses dcbz on the complete cache lines in the
  208. * destination area to reduce memory traffic. This requires that
  209. * the destination area is cacheable.
  210. * We only use this version if the source and dest don't overlap.
  211. * -- paulus.
  212. */
  213. _GLOBAL(cacheable_memcpy)
  214. add r7,r3,r5 /* test if the src & dst overlap */
  215. add r8,r4,r5
  216. cmplw 0,r4,r7
  217. cmplw 1,r3,r8
  218. crand 0,0,4 /* cr0.lt &= cr1.lt */
  219. blt memcpy /* if regions overlap */
  220. addi r4,r4,-4
  221. addi r6,r3,-4
  222. neg r0,r3
  223. andi. r0,r0,CACHELINE_MASK /* # bytes to start of cache line */
  224. beq 58f
  225. cmplw 0,r5,r0 /* is this more than total to do? */
  226. blt 63f /* if not much to do */
  227. andi. r8,r0,3 /* get it word-aligned first */
  228. subf r5,r0,r5
  229. mtctr r8
  230. beq+ 61f
  231. 70: lbz r9,4(r4) /* do some bytes */
  232. stb r9,4(r6)
  233. addi r4,r4,1
  234. addi r6,r6,1
  235. bdnz 70b
  236. 61: srwi. r0,r0,2
  237. mtctr r0
  238. beq 58f
  239. 72: lwzu r9,4(r4) /* do some words */
  240. stwu r9,4(r6)
  241. bdnz 72b
  242. 58: srwi. r0,r5,LG_CACHELINE_BYTES /* # complete cachelines */
  243. clrlwi r5,r5,32-LG_CACHELINE_BYTES
  244. li r11,4
  245. mtctr r0
  246. beq 63f
  247. 53:
  248. #if !defined(CONFIG_8xx)
  249. dcbz r11,r6
  250. #endif
  251. COPY_16_BYTES
  252. #if L1_CACHE_BYTES >= 32
  253. COPY_16_BYTES
  254. #if L1_CACHE_BYTES >= 64
  255. COPY_16_BYTES
  256. COPY_16_BYTES
  257. #if L1_CACHE_BYTES >= 128
  258. COPY_16_BYTES
  259. COPY_16_BYTES
  260. COPY_16_BYTES
  261. COPY_16_BYTES
  262. #endif
  263. #endif
  264. #endif
  265. bdnz 53b
  266. 63: srwi. r0,r5,2
  267. mtctr r0
  268. beq 64f
  269. 30: lwzu r0,4(r4)
  270. stwu r0,4(r6)
  271. bdnz 30b
  272. 64: andi. r0,r5,3
  273. mtctr r0
  274. beq+ 65f
  275. 40: lbz r0,4(r4)
  276. stb r0,4(r6)
  277. addi r4,r4,1
  278. addi r6,r6,1
  279. bdnz 40b
  280. 65: blr
  281. _GLOBAL(memmove)
  282. cmplw 0,r3,r4
  283. bgt backwards_memcpy
  284. /* fall through */
  285. _GLOBAL(memcpy)
  286. srwi. r7,r5,3
  287. addi r6,r3,-4
  288. addi r4,r4,-4
  289. beq 2f /* if less than 8 bytes to do */
  290. andi. r0,r6,3 /* get dest word aligned */
  291. mtctr r7
  292. bne 5f
  293. 1: lwz r7,4(r4)
  294. lwzu r8,8(r4)
  295. stw r7,4(r6)
  296. stwu r8,8(r6)
  297. bdnz 1b
  298. andi. r5,r5,7
  299. 2: cmplwi 0,r5,4
  300. blt 3f
  301. lwzu r0,4(r4)
  302. addi r5,r5,-4
  303. stwu r0,4(r6)
  304. 3: cmpwi 0,r5,0
  305. beqlr
  306. mtctr r5
  307. addi r4,r4,3
  308. addi r6,r6,3
  309. 4: lbzu r0,1(r4)
  310. stbu r0,1(r6)
  311. bdnz 4b
  312. blr
  313. 5: subfic r0,r0,4
  314. mtctr r0
  315. 6: lbz r7,4(r4)
  316. addi r4,r4,1
  317. stb r7,4(r6)
  318. addi r6,r6,1
  319. bdnz 6b
  320. subf r5,r0,r5
  321. rlwinm. r7,r5,32-3,3,31
  322. beq 2b
  323. mtctr r7
  324. b 1b
  325. _GLOBAL(backwards_memcpy)
  326. rlwinm. r7,r5,32-3,3,31 /* r0 = r5 >> 3 */
  327. add r6,r3,r5
  328. add r4,r4,r5
  329. beq 2f
  330. andi. r0,r6,3
  331. mtctr r7
  332. bne 5f
  333. 1: lwz r7,-4(r4)
  334. lwzu r8,-8(r4)
  335. stw r7,-4(r6)
  336. stwu r8,-8(r6)
  337. bdnz 1b
  338. andi. r5,r5,7
  339. 2: cmplwi 0,r5,4
  340. blt 3f
  341. lwzu r0,-4(r4)
  342. subi r5,r5,4
  343. stwu r0,-4(r6)
  344. 3: cmpwi 0,r5,0
  345. beqlr
  346. mtctr r5
  347. 4: lbzu r0,-1(r4)
  348. stbu r0,-1(r6)
  349. bdnz 4b
  350. blr
  351. 5: mtctr r0
  352. 6: lbzu r7,-1(r4)
  353. stbu r7,-1(r6)
  354. bdnz 6b
  355. subf r5,r0,r5
  356. rlwinm. r7,r5,32-3,3,31
  357. beq 2b
  358. mtctr r7
  359. b 1b
  360. _GLOBAL(memcmp)
  361. cmpwi 0,r5,0
  362. ble- 2f
  363. mtctr r5
  364. addi r6,r3,-1
  365. addi r4,r4,-1
  366. 1: lbzu r3,1(r6)
  367. lbzu r0,1(r4)
  368. subf. r3,r0,r3
  369. bdnzt 2,1b
  370. blr
  371. 2: li r3,0
  372. blr
  373. _GLOBAL(memchr)
  374. cmpwi 0,r5,0
  375. ble- 2f
  376. mtctr r5
  377. addi r3,r3,-1
  378. 1: lbzu r0,1(r3)
  379. cmpw 0,r0,r4
  380. bdnzf 2,1b
  381. beqlr
  382. 2: li r3,0
  383. blr
  384. _GLOBAL(__copy_tofrom_user)
  385. addi r4,r4,-4
  386. addi r6,r3,-4
  387. neg r0,r3
  388. andi. r0,r0,CACHELINE_MASK /* # bytes to start of cache line */
  389. beq 58f
  390. cmplw 0,r5,r0 /* is this more than total to do? */
  391. blt 63f /* if not much to do */
  392. andi. r8,r0,3 /* get it word-aligned first */
  393. mtctr r8
  394. beq+ 61f
  395. 70: lbz r9,4(r4) /* do some bytes */
  396. 71: stb r9,4(r6)
  397. addi r4,r4,1
  398. addi r6,r6,1
  399. bdnz 70b
  400. 61: subf r5,r0,r5
  401. srwi. r0,r0,2
  402. mtctr r0
  403. beq 58f
  404. 72: lwzu r9,4(r4) /* do some words */
  405. 73: stwu r9,4(r6)
  406. bdnz 72b
  407. .section __ex_table,"a"
  408. .align 2
  409. .long 70b,100f
  410. .long 71b,101f
  411. .long 72b,102f
  412. .long 73b,103f
  413. .text
  414. 58: srwi. r0,r5,LG_CACHELINE_BYTES /* # complete cachelines */
  415. clrlwi r5,r5,32-LG_CACHELINE_BYTES
  416. li r11,4
  417. beq 63f
  418. #ifdef CONFIG_8xx
  419. /* Don't use prefetch on 8xx */
  420. mtctr r0
  421. li r0,0
  422. 53: COPY_16_BYTES_WITHEX(0)
  423. bdnz 53b
  424. #else /* not CONFIG_8xx */
  425. /* Here we decide how far ahead to prefetch the source */
  426. li r3,4
  427. cmpwi r0,1
  428. li r7,0
  429. ble 114f
  430. li r7,1
  431. #if MAX_COPY_PREFETCH > 1
  432. /* Heuristically, for large transfers we prefetch
  433. MAX_COPY_PREFETCH cachelines ahead. For small transfers
  434. we prefetch 1 cacheline ahead. */
  435. cmpwi r0,MAX_COPY_PREFETCH
  436. ble 112f
  437. li r7,MAX_COPY_PREFETCH
  438. 112: mtctr r7
  439. 111: dcbt r3,r4
  440. addi r3,r3,CACHELINE_BYTES
  441. bdnz 111b
  442. #else
  443. dcbt r3,r4
  444. addi r3,r3,CACHELINE_BYTES
  445. #endif /* MAX_COPY_PREFETCH > 1 */
  446. 114: subf r8,r7,r0
  447. mr r0,r7
  448. mtctr r8
  449. 53: dcbt r3,r4
  450. 54: dcbz r11,r6
  451. .section __ex_table,"a"
  452. .align 2
  453. .long 54b,105f
  454. .text
  455. /* the main body of the cacheline loop */
  456. COPY_16_BYTES_WITHEX(0)
  457. #if L1_CACHE_BYTES >= 32
  458. COPY_16_BYTES_WITHEX(1)
  459. #if L1_CACHE_BYTES >= 64
  460. COPY_16_BYTES_WITHEX(2)
  461. COPY_16_BYTES_WITHEX(3)
  462. #if L1_CACHE_BYTES >= 128
  463. COPY_16_BYTES_WITHEX(4)
  464. COPY_16_BYTES_WITHEX(5)
  465. COPY_16_BYTES_WITHEX(6)
  466. COPY_16_BYTES_WITHEX(7)
  467. #endif
  468. #endif
  469. #endif
  470. bdnz 53b
  471. cmpwi r0,0
  472. li r3,4
  473. li r7,0
  474. bne 114b
  475. #endif /* CONFIG_8xx */
  476. 63: srwi. r0,r5,2
  477. mtctr r0
  478. beq 64f
  479. 30: lwzu r0,4(r4)
  480. 31: stwu r0,4(r6)
  481. bdnz 30b
  482. 64: andi. r0,r5,3
  483. mtctr r0
  484. beq+ 65f
  485. 40: lbz r0,4(r4)
  486. 41: stb r0,4(r6)
  487. addi r4,r4,1
  488. addi r6,r6,1
  489. bdnz 40b
  490. 65: li r3,0
  491. blr
  492. /* read fault, initial single-byte copy */
  493. 100: li r9,0
  494. b 90f
  495. /* write fault, initial single-byte copy */
  496. 101: li r9,1
  497. 90: subf r5,r8,r5
  498. li r3,0
  499. b 99f
  500. /* read fault, initial word copy */
  501. 102: li r9,0
  502. b 91f
  503. /* write fault, initial word copy */
  504. 103: li r9,1
  505. 91: li r3,2
  506. b 99f
  507. /*
  508. * this stuff handles faults in the cacheline loop and branches to either
  509. * 104f (if in read part) or 105f (if in write part), after updating r5
  510. */
  511. COPY_16_BYTES_EXCODE(0)
  512. #if L1_CACHE_BYTES >= 32
  513. COPY_16_BYTES_EXCODE(1)
  514. #if L1_CACHE_BYTES >= 64
  515. COPY_16_BYTES_EXCODE(2)
  516. COPY_16_BYTES_EXCODE(3)
  517. #if L1_CACHE_BYTES >= 128
  518. COPY_16_BYTES_EXCODE(4)
  519. COPY_16_BYTES_EXCODE(5)
  520. COPY_16_BYTES_EXCODE(6)
  521. COPY_16_BYTES_EXCODE(7)
  522. #endif
  523. #endif
  524. #endif
  525. /* read fault in cacheline loop */
  526. 104: li r9,0
  527. b 92f
  528. /* fault on dcbz (effectively a write fault) */
  529. /* or write fault in cacheline loop */
  530. 105: li r9,1
  531. 92: li r3,LG_CACHELINE_BYTES
  532. mfctr r8
  533. add r0,r0,r8
  534. b 106f
  535. /* read fault in final word loop */
  536. 108: li r9,0
  537. b 93f
  538. /* write fault in final word loop */
  539. 109: li r9,1
  540. 93: andi. r5,r5,3
  541. li r3,2
  542. b 99f
  543. /* read fault in final byte loop */
  544. 110: li r9,0
  545. b 94f
  546. /* write fault in final byte loop */
  547. 111: li r9,1
  548. 94: li r5,0
  549. li r3,0
  550. /*
  551. * At this stage the number of bytes not copied is
  552. * r5 + (ctr << r3), and r9 is 0 for read or 1 for write.
  553. */
  554. 99: mfctr r0
  555. 106: slw r3,r0,r3
  556. add. r3,r3,r5
  557. beq 120f /* shouldn't happen */
  558. cmpwi 0,r9,0
  559. bne 120f
  560. /* for a read fault, first try to continue the copy one byte at a time */
  561. mtctr r3
  562. 130: lbz r0,4(r4)
  563. 131: stb r0,4(r6)
  564. addi r4,r4,1
  565. addi r6,r6,1
  566. bdnz 130b
  567. /* then clear out the destination: r3 bytes starting at 4(r6) */
  568. 132: mfctr r3
  569. srwi. r0,r3,2
  570. li r9,0
  571. mtctr r0
  572. beq 113f
  573. 112: stwu r9,4(r6)
  574. bdnz 112b
  575. 113: andi. r0,r3,3
  576. mtctr r0
  577. beq 120f
  578. 114: stb r9,4(r6)
  579. addi r6,r6,1
  580. bdnz 114b
  581. 120: blr
  582. .section __ex_table,"a"
  583. .align 2
  584. .long 30b,108b
  585. .long 31b,109b
  586. .long 40b,110b
  587. .long 41b,111b
  588. .long 130b,132b
  589. .long 131b,120b
  590. .long 112b,120b
  591. .long 114b,120b
  592. .text
  593. _GLOBAL(__clear_user)
  594. addi r6,r3,-4
  595. li r3,0
  596. li r5,0
  597. cmplwi 0,r4,4
  598. blt 7f
  599. /* clear a single word */
  600. 11: stwu r5,4(r6)
  601. beqlr
  602. /* clear word sized chunks */
  603. andi. r0,r6,3
  604. add r4,r0,r4
  605. subf r6,r0,r6
  606. srwi r0,r4,2
  607. andi. r4,r4,3
  608. mtctr r0
  609. bdz 7f
  610. 1: stwu r5,4(r6)
  611. bdnz 1b
  612. /* clear byte sized chunks */
  613. 7: cmpwi 0,r4,0
  614. beqlr
  615. mtctr r4
  616. addi r6,r6,3
  617. 8: stbu r5,1(r6)
  618. bdnz 8b
  619. blr
  620. 90: mr r3,r4
  621. blr
  622. 91: mfctr r3
  623. slwi r3,r3,2
  624. add r3,r3,r4
  625. blr
  626. 92: mfctr r3
  627. blr
  628. .section __ex_table,"a"
  629. .align 2
  630. .long 11b,90b
  631. .long 1b,91b
  632. .long 8b,92b
  633. .text
  634. _GLOBAL(__strncpy_from_user)
  635. addi r6,r3,-1
  636. addi r4,r4,-1
  637. cmpwi 0,r5,0
  638. beq 2f
  639. mtctr r5
  640. 1: lbzu r0,1(r4)
  641. cmpwi 0,r0,0
  642. stbu r0,1(r6)
  643. bdnzf 2,1b /* dec ctr, branch if ctr != 0 && !cr0.eq */
  644. beq 3f
  645. 2: addi r6,r6,1
  646. 3: subf r3,r3,r6
  647. blr
  648. 99: li r3,-EFAULT
  649. blr
  650. .section __ex_table,"a"
  651. .align 2
  652. .long 1b,99b
  653. .text
  654. /* r3 = str, r4 = len (> 0), r5 = top (highest addr) */
  655. _GLOBAL(__strnlen_user)
  656. addi r7,r3,-1
  657. subf r6,r7,r5 /* top+1 - str */
  658. cmplw 0,r4,r6
  659. bge 0f
  660. mr r6,r4
  661. 0: mtctr r6 /* ctr = min(len, top - str) */
  662. 1: lbzu r0,1(r7) /* get next byte */
  663. cmpwi 0,r0,0
  664. bdnzf 2,1b /* loop if --ctr != 0 && byte != 0 */
  665. addi r7,r7,1
  666. subf r3,r3,r7 /* number of bytes we have looked at */
  667. beqlr /* return if we found a 0 byte */
  668. cmpw 0,r3,r4 /* did we look at all len bytes? */
  669. blt 99f /* if not, must have hit top */
  670. addi r3,r4,1 /* return len + 1 to indicate no null found */
  671. blr
  672. 99: li r3,0 /* bad address, return 0 */
  673. blr
  674. .section __ex_table,"a"
  675. .align 2
  676. .long 1b,99b