uaccess.h 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747
  1. /*
  2. * Authors: Hans-Peter Nilsson (hp@axis.com)
  3. *
  4. */
  5. #ifndef _CRIS_ARCH_UACCESS_H
  6. #define _CRIS_ARCH_UACCESS_H
  7. /*
  8. * We don't tell gcc that we are accessing memory, but this is OK
  9. * because we do not write to any memory gcc knows about, so there
  10. * are no aliasing issues.
  11. *
  12. * Note that PC at a fault is the address *at* the faulting
  13. * instruction for CRISv32.
  14. */
  15. #define __put_user_asm(x, addr, err, op) \
  16. __asm__ __volatile__( \
  17. "2: "op" %1,[%2]\n" \
  18. "4:\n" \
  19. " .section .fixup,\"ax\"\n" \
  20. "3: move.d %3,%0\n" \
  21. " jump 4b\n" \
  22. " nop\n" \
  23. " .previous\n" \
  24. " .section __ex_table,\"a\"\n" \
  25. " .dword 2b,3b\n" \
  26. " .previous\n" \
  27. : "=r" (err) \
  28. : "r" (x), "r" (addr), "g" (-EFAULT), "0" (err))
  29. #define __put_user_asm_64(x, addr, err) do { \
  30. int dummy_for_put_user_asm_64_; \
  31. __asm__ __volatile__( \
  32. "2: move.d %M2,[%1+]\n" \
  33. "4: move.d %H2,[%1]\n" \
  34. "5:\n" \
  35. " .section .fixup,\"ax\"\n" \
  36. "3: move.d %4,%0\n" \
  37. " jump 5b\n" \
  38. " .previous\n" \
  39. " .section __ex_table,\"a\"\n" \
  40. " .dword 2b,3b\n" \
  41. " .dword 4b,3b\n" \
  42. " .previous\n" \
  43. : "=r" (err), "=b" (dummy_for_put_user_asm_64_) \
  44. : "r" (x), "1" (addr), "g" (-EFAULT), \
  45. "0" (err)); \
  46. } while (0)
  47. /* See comment before __put_user_asm. */
  48. #define __get_user_asm(x, addr, err, op) \
  49. __asm__ __volatile__( \
  50. "2: "op" [%2],%1\n" \
  51. "4:\n" \
  52. " .section .fixup,\"ax\"\n" \
  53. "3: move.d %3,%0\n" \
  54. " jump 4b\n" \
  55. " moveq 0,%1\n" \
  56. " .previous\n" \
  57. " .section __ex_table,\"a\"\n" \
  58. " .dword 2b,3b\n" \
  59. " .previous\n" \
  60. : "=r" (err), "=r" (x) \
  61. : "r" (addr), "g" (-EFAULT), "0" (err))
  62. #define __get_user_asm_64(x, addr, err) do { \
  63. int dummy_for_get_user_asm_64_; \
  64. __asm__ __volatile__( \
  65. "2: move.d [%2+],%M1\n" \
  66. "4: move.d [%2],%H1\n" \
  67. "5:\n" \
  68. " .section .fixup,\"ax\"\n" \
  69. "3: move.d %4,%0\n" \
  70. " jump 5b\n" \
  71. " moveq 0,%1\n" \
  72. " .previous\n" \
  73. " .section __ex_table,\"a\"\n" \
  74. " .dword 2b,3b\n" \
  75. " .dword 4b,3b\n" \
  76. " .previous\n" \
  77. : "=r" (err), "=r" (x), \
  78. "=b" (dummy_for_get_user_asm_64_) \
  79. : "2" (addr), "g" (-EFAULT), "0" (err));\
  80. } while (0)
  81. /*
  82. * Copy a null terminated string from userspace.
  83. *
  84. * Must return:
  85. * -EFAULT for an exception
  86. * count if we hit the buffer limit
  87. * bytes copied if we hit a null byte
  88. * (without the null byte)
  89. */
  90. static inline long
  91. __do_strncpy_from_user(char *dst, const char *src, long count)
  92. {
  93. long res;
  94. if (count == 0)
  95. return 0;
  96. /*
  97. * Currently, in 2.4.0-test9, most ports use a simple byte-copy loop.
  98. * So do we.
  99. *
  100. * This code is deduced from:
  101. *
  102. * char tmp2;
  103. * long tmp1, tmp3;
  104. * tmp1 = count;
  105. * while ((*dst++ = (tmp2 = *src++)) != 0
  106. * && --tmp1)
  107. * ;
  108. *
  109. * res = count - tmp1;
  110. *
  111. * with tweaks.
  112. */
  113. __asm__ __volatile__ (
  114. " move.d %3,%0\n"
  115. "5: move.b [%2+],$acr\n"
  116. "1: beq 6f\n"
  117. " move.b $acr,[%1+]\n"
  118. " subq 1,%0\n"
  119. "2: bne 1b\n"
  120. " move.b [%2+],$acr\n"
  121. "6: sub.d %3,%0\n"
  122. " neg.d %0,%0\n"
  123. "3:\n"
  124. " .section .fixup,\"ax\"\n"
  125. "4: move.d %7,%0\n"
  126. " jump 3b\n"
  127. " nop\n"
  128. /* The address for a fault at the first move is trivial.
  129. The address for a fault at the second move is that of
  130. the preceding branch insn, since the move insn is in
  131. its delay-slot. Just so you don't get confused... */
  132. " .previous\n"
  133. " .section __ex_table,\"a\"\n"
  134. " .dword 5b,4b\n"
  135. " .dword 2b,4b\n"
  136. " .previous"
  137. : "=r" (res), "=b" (dst), "=b" (src), "=r" (count)
  138. : "3" (count), "1" (dst), "2" (src), "g" (-EFAULT)
  139. : "acr");
  140. return res;
  141. }
  142. /* A few copy asms to build up the more complex ones from.
  143. Note again, a post-increment is performed regardless of whether a bus
  144. fault occurred in that instruction, and PC for a faulted insn is the
  145. address for the insn, or for the preceding branch when in a delay-slot. */
  146. #define __asm_copy_user_cont(to, from, ret, COPY, FIXUP, TENTRY) \
  147. __asm__ __volatile__ ( \
  148. COPY \
  149. "1:\n" \
  150. " .section .fixup,\"ax\"\n" \
  151. FIXUP \
  152. " .previous\n" \
  153. " .section __ex_table,\"a\"\n" \
  154. TENTRY \
  155. " .previous\n" \
  156. : "=b" (to), "=b" (from), "=r" (ret) \
  157. : "0" (to), "1" (from), "2" (ret) \
  158. : "acr", "memory")
  159. #define __asm_copy_from_user_1(to, from, ret) \
  160. __asm_copy_user_cont(to, from, ret, \
  161. "2: move.b [%1+],$acr\n" \
  162. " move.b $acr,[%0+]\n", \
  163. "3: addq 1,%2\n" \
  164. " jump 1b\n" \
  165. " clear.b [%0+]\n", \
  166. " .dword 2b,3b\n")
  167. #define __asm_copy_from_user_2x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
  168. __asm_copy_user_cont(to, from, ret, \
  169. COPY \
  170. "2: move.w [%1+],$acr\n" \
  171. " move.w $acr,[%0+]\n", \
  172. FIXUP \
  173. "3: addq 2,%2\n" \
  174. " jump 1b\n" \
  175. " clear.w [%0+]\n", \
  176. TENTRY \
  177. " .dword 2b,3b\n")
  178. #define __asm_copy_from_user_2(to, from, ret) \
  179. __asm_copy_from_user_2x_cont(to, from, ret, "", "", "")
  180. #define __asm_copy_from_user_3(to, from, ret) \
  181. __asm_copy_from_user_2x_cont(to, from, ret, \
  182. "4: move.b [%1+],$acr\n" \
  183. " move.b $acr,[%0+]\n", \
  184. "5: addq 1,%2\n" \
  185. " clear.b [%0+]\n", \
  186. " .dword 4b,5b\n")
  187. #define __asm_copy_from_user_4x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
  188. __asm_copy_user_cont(to, from, ret, \
  189. COPY \
  190. "2: move.d [%1+],$acr\n" \
  191. " move.d $acr,[%0+]\n", \
  192. FIXUP \
  193. "3: addq 4,%2\n" \
  194. " jump 1b\n" \
  195. " clear.d [%0+]\n", \
  196. TENTRY \
  197. " .dword 2b,3b\n")
  198. #define __asm_copy_from_user_4(to, from, ret) \
  199. __asm_copy_from_user_4x_cont(to, from, ret, "", "", "")
  200. #define __asm_copy_from_user_5(to, from, ret) \
  201. __asm_copy_from_user_4x_cont(to, from, ret, \
  202. "4: move.b [%1+],$acr\n" \
  203. " move.b $acr,[%0+]\n", \
  204. "5: addq 1,%2\n" \
  205. " clear.b [%0+]\n", \
  206. " .dword 4b,5b\n")
  207. #define __asm_copy_from_user_6x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
  208. __asm_copy_from_user_4x_cont(to, from, ret, \
  209. COPY \
  210. "4: move.w [%1+],$acr\n" \
  211. " move.w $acr,[%0+]\n", \
  212. FIXUP \
  213. "5: addq 2,%2\n" \
  214. " clear.w [%0+]\n", \
  215. TENTRY \
  216. " .dword 4b,5b\n")
  217. #define __asm_copy_from_user_6(to, from, ret) \
  218. __asm_copy_from_user_6x_cont(to, from, ret, "", "", "")
  219. #define __asm_copy_from_user_7(to, from, ret) \
  220. __asm_copy_from_user_6x_cont(to, from, ret, \
  221. "6: move.b [%1+],$acr\n" \
  222. " move.b $acr,[%0+]\n", \
  223. "7: addq 1,%2\n" \
  224. " clear.b [%0+]\n", \
  225. " .dword 6b,7b\n")
  226. #define __asm_copy_from_user_8x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
  227. __asm_copy_from_user_4x_cont(to, from, ret, \
  228. COPY \
  229. "4: move.d [%1+],$acr\n" \
  230. " move.d $acr,[%0+]\n", \
  231. FIXUP \
  232. "5: addq 4,%2\n" \
  233. " clear.d [%0+]\n", \
  234. TENTRY \
  235. " .dword 4b,5b\n")
  236. #define __asm_copy_from_user_8(to, from, ret) \
  237. __asm_copy_from_user_8x_cont(to, from, ret, "", "", "")
  238. #define __asm_copy_from_user_9(to, from, ret) \
  239. __asm_copy_from_user_8x_cont(to, from, ret, \
  240. "6: move.b [%1+],$acr\n" \
  241. " move.b $acr,[%0+]\n", \
  242. "7: addq 1,%2\n" \
  243. " clear.b [%0+]\n", \
  244. " .dword 6b,7b\n")
  245. #define __asm_copy_from_user_10x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
  246. __asm_copy_from_user_8x_cont(to, from, ret, \
  247. COPY \
  248. "6: move.w [%1+],$acr\n" \
  249. " move.w $acr,[%0+]\n", \
  250. FIXUP \
  251. "7: addq 2,%2\n" \
  252. " clear.w [%0+]\n", \
  253. TENTRY \
  254. " .dword 6b,7b\n")
  255. #define __asm_copy_from_user_10(to, from, ret) \
  256. __asm_copy_from_user_10x_cont(to, from, ret, "", "", "")
  257. #define __asm_copy_from_user_11(to, from, ret) \
  258. __asm_copy_from_user_10x_cont(to, from, ret, \
  259. "8: move.b [%1+],$acr\n" \
  260. " move.b $acr,[%0+]\n", \
  261. "9: addq 1,%2\n" \
  262. " clear.b [%0+]\n", \
  263. " .dword 8b,9b\n")
  264. #define __asm_copy_from_user_12x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
  265. __asm_copy_from_user_8x_cont(to, from, ret, \
  266. COPY \
  267. "6: move.d [%1+],$acr\n" \
  268. " move.d $acr,[%0+]\n", \
  269. FIXUP \
  270. "7: addq 4,%2\n" \
  271. " clear.d [%0+]\n", \
  272. TENTRY \
  273. " .dword 6b,7b\n")
  274. #define __asm_copy_from_user_12(to, from, ret) \
  275. __asm_copy_from_user_12x_cont(to, from, ret, "", "", "")
  276. #define __asm_copy_from_user_13(to, from, ret) \
  277. __asm_copy_from_user_12x_cont(to, from, ret, \
  278. "8: move.b [%1+],$acr\n" \
  279. " move.b $acr,[%0+]\n", \
  280. "9: addq 1,%2\n" \
  281. " clear.b [%0+]\n", \
  282. " .dword 8b,9b\n")
  283. #define __asm_copy_from_user_14x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
  284. __asm_copy_from_user_12x_cont(to, from, ret, \
  285. COPY \
  286. "8: move.w [%1+],$acr\n" \
  287. " move.w $acr,[%0+]\n", \
  288. FIXUP \
  289. "9: addq 2,%2\n" \
  290. " clear.w [%0+]\n", \
  291. TENTRY \
  292. " .dword 8b,9b\n")
  293. #define __asm_copy_from_user_14(to, from, ret) \
  294. __asm_copy_from_user_14x_cont(to, from, ret, "", "", "")
  295. #define __asm_copy_from_user_15(to, from, ret) \
  296. __asm_copy_from_user_14x_cont(to, from, ret, \
  297. "10: move.b [%1+],$acr\n" \
  298. " move.b $acr,[%0+]\n", \
  299. "11: addq 1,%2\n" \
  300. " clear.b [%0+]\n", \
  301. " .dword 10b,11b\n")
  302. #define __asm_copy_from_user_16x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
  303. __asm_copy_from_user_12x_cont(to, from, ret, \
  304. COPY \
  305. "8: move.d [%1+],$acr\n" \
  306. " move.d $acr,[%0+]\n", \
  307. FIXUP \
  308. "9: addq 4,%2\n" \
  309. " clear.d [%0+]\n", \
  310. TENTRY \
  311. " .dword 8b,9b\n")
  312. #define __asm_copy_from_user_16(to, from, ret) \
  313. __asm_copy_from_user_16x_cont(to, from, ret, "", "", "")
  314. #define __asm_copy_from_user_20x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
  315. __asm_copy_from_user_16x_cont(to, from, ret, \
  316. COPY \
  317. "10: move.d [%1+],$acr\n" \
  318. " move.d $acr,[%0+]\n", \
  319. FIXUP \
  320. "11: addq 4,%2\n" \
  321. " clear.d [%0+]\n", \
  322. TENTRY \
  323. " .dword 10b,11b\n")
  324. #define __asm_copy_from_user_20(to, from, ret) \
  325. __asm_copy_from_user_20x_cont(to, from, ret, "", "", "")
  326. #define __asm_copy_from_user_24x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
  327. __asm_copy_from_user_20x_cont(to, from, ret, \
  328. COPY \
  329. "12: move.d [%1+],$acr\n" \
  330. " move.d $acr,[%0+]\n", \
  331. FIXUP \
  332. "13: addq 4,%2\n" \
  333. " clear.d [%0+]\n", \
  334. TENTRY \
  335. " .dword 12b,13b\n")
  336. #define __asm_copy_from_user_24(to, from, ret) \
  337. __asm_copy_from_user_24x_cont(to, from, ret, "", "", "")
  338. /* And now, the to-user ones. */
  339. #define __asm_copy_to_user_1(to, from, ret) \
  340. __asm_copy_user_cont(to, from, ret, \
  341. " move.b [%1+],$acr\n" \
  342. "2: move.b $acr,[%0+]\n", \
  343. "3: jump 1b\n" \
  344. " addq 1,%2\n", \
  345. " .dword 2b,3b\n")
  346. #define __asm_copy_to_user_2x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
  347. __asm_copy_user_cont(to, from, ret, \
  348. COPY \
  349. " move.w [%1+],$acr\n" \
  350. "2: move.w $acr,[%0+]\n", \
  351. FIXUP \
  352. "3: jump 1b\n" \
  353. " addq 2,%2\n", \
  354. TENTRY \
  355. " .dword 2b,3b\n")
  356. #define __asm_copy_to_user_2(to, from, ret) \
  357. __asm_copy_to_user_2x_cont(to, from, ret, "", "", "")
  358. #define __asm_copy_to_user_3(to, from, ret) \
  359. __asm_copy_to_user_2x_cont(to, from, ret, \
  360. " move.b [%1+],$acr\n" \
  361. "4: move.b $acr,[%0+]\n", \
  362. "5: addq 1,%2\n", \
  363. " .dword 4b,5b\n")
  364. #define __asm_copy_to_user_4x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
  365. __asm_copy_user_cont(to, from, ret, \
  366. COPY \
  367. " move.d [%1+],$acr\n" \
  368. "2: move.d $acr,[%0+]\n", \
  369. FIXUP \
  370. "3: jump 1b\n" \
  371. " addq 4,%2\n", \
  372. TENTRY \
  373. " .dword 2b,3b\n")
  374. #define __asm_copy_to_user_4(to, from, ret) \
  375. __asm_copy_to_user_4x_cont(to, from, ret, "", "", "")
  376. #define __asm_copy_to_user_5(to, from, ret) \
  377. __asm_copy_to_user_4x_cont(to, from, ret, \
  378. " move.b [%1+],$acr\n" \
  379. "4: move.b $acr,[%0+]\n", \
  380. "5: addq 1,%2\n", \
  381. " .dword 4b,5b\n")
  382. #define __asm_copy_to_user_6x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
  383. __asm_copy_to_user_4x_cont(to, from, ret, \
  384. COPY \
  385. " move.w [%1+],$acr\n" \
  386. "4: move.w $acr,[%0+]\n", \
  387. FIXUP \
  388. "5: addq 2,%2\n", \
  389. TENTRY \
  390. " .dword 4b,5b\n")
  391. #define __asm_copy_to_user_6(to, from, ret) \
  392. __asm_copy_to_user_6x_cont(to, from, ret, "", "", "")
  393. #define __asm_copy_to_user_7(to, from, ret) \
  394. __asm_copy_to_user_6x_cont(to, from, ret, \
  395. " move.b [%1+],$acr\n" \
  396. "6: move.b $acr,[%0+]\n", \
  397. "7: addq 1,%2\n", \
  398. " .dword 6b,7b\n")
  399. #define __asm_copy_to_user_8x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
  400. __asm_copy_to_user_4x_cont(to, from, ret, \
  401. COPY \
  402. " move.d [%1+],$acr\n" \
  403. "4: move.d $acr,[%0+]\n", \
  404. FIXUP \
  405. "5: addq 4,%2\n", \
  406. TENTRY \
  407. " .dword 4b,5b\n")
  408. #define __asm_copy_to_user_8(to, from, ret) \
  409. __asm_copy_to_user_8x_cont(to, from, ret, "", "", "")
  410. #define __asm_copy_to_user_9(to, from, ret) \
  411. __asm_copy_to_user_8x_cont(to, from, ret, \
  412. " move.b [%1+],$acr\n" \
  413. "6: move.b $acr,[%0+]\n", \
  414. "7: addq 1,%2\n", \
  415. " .dword 6b,7b\n")
  416. #define __asm_copy_to_user_10x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
  417. __asm_copy_to_user_8x_cont(to, from, ret, \
  418. COPY \
  419. " move.w [%1+],$acr\n" \
  420. "6: move.w $acr,[%0+]\n", \
  421. FIXUP \
  422. "7: addq 2,%2\n", \
  423. TENTRY \
  424. " .dword 6b,7b\n")
  425. #define __asm_copy_to_user_10(to, from, ret) \
  426. __asm_copy_to_user_10x_cont(to, from, ret, "", "", "")
  427. #define __asm_copy_to_user_11(to, from, ret) \
  428. __asm_copy_to_user_10x_cont(to, from, ret, \
  429. " move.b [%1+],$acr\n" \
  430. "8: move.b $acr,[%0+]\n", \
  431. "9: addq 1,%2\n", \
  432. " .dword 8b,9b\n")
  433. #define __asm_copy_to_user_12x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
  434. __asm_copy_to_user_8x_cont(to, from, ret, \
  435. COPY \
  436. " move.d [%1+],$acr\n" \
  437. "6: move.d $acr,[%0+]\n", \
  438. FIXUP \
  439. "7: addq 4,%2\n", \
  440. TENTRY \
  441. " .dword 6b,7b\n")
  442. #define __asm_copy_to_user_12(to, from, ret) \
  443. __asm_copy_to_user_12x_cont(to, from, ret, "", "", "")
  444. #define __asm_copy_to_user_13(to, from, ret) \
  445. __asm_copy_to_user_12x_cont(to, from, ret, \
  446. " move.b [%1+],$acr\n" \
  447. "8: move.b $acr,[%0+]\n", \
  448. "9: addq 1,%2\n", \
  449. " .dword 8b,9b\n")
  450. #define __asm_copy_to_user_14x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
  451. __asm_copy_to_user_12x_cont(to, from, ret, \
  452. COPY \
  453. " move.w [%1+],$acr\n" \
  454. "8: move.w $acr,[%0+]\n", \
  455. FIXUP \
  456. "9: addq 2,%2\n", \
  457. TENTRY \
  458. " .dword 8b,9b\n")
  459. #define __asm_copy_to_user_14(to, from, ret) \
  460. __asm_copy_to_user_14x_cont(to, from, ret, "", "", "")
  461. #define __asm_copy_to_user_15(to, from, ret) \
  462. __asm_copy_to_user_14x_cont(to, from, ret, \
  463. " move.b [%1+],$acr\n" \
  464. "10: move.b $acr,[%0+]\n", \
  465. "11: addq 1,%2\n", \
  466. " .dword 10b,11b\n")
  467. #define __asm_copy_to_user_16x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
  468. __asm_copy_to_user_12x_cont(to, from, ret, \
  469. COPY \
  470. " move.d [%1+],$acr\n" \
  471. "8: move.d $acr,[%0+]\n", \
  472. FIXUP \
  473. "9: addq 4,%2\n", \
  474. TENTRY \
  475. " .dword 8b,9b\n")
  476. #define __asm_copy_to_user_16(to, from, ret) \
  477. __asm_copy_to_user_16x_cont(to, from, ret, "", "", "")
  478. #define __asm_copy_to_user_20x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
  479. __asm_copy_to_user_16x_cont(to, from, ret, \
  480. COPY \
  481. " move.d [%1+],$acr\n" \
  482. "10: move.d $acr,[%0+]\n", \
  483. FIXUP \
  484. "11: addq 4,%2\n", \
  485. TENTRY \
  486. " .dword 10b,11b\n")
  487. #define __asm_copy_to_user_20(to, from, ret) \
  488. __asm_copy_to_user_20x_cont(to, from, ret, "", "", "")
  489. #define __asm_copy_to_user_24x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
  490. __asm_copy_to_user_20x_cont(to, from, ret, \
  491. COPY \
  492. " move.d [%1+],$acr\n" \
  493. "12: move.d $acr,[%0+]\n", \
  494. FIXUP \
  495. "13: addq 4,%2\n", \
  496. TENTRY \
  497. " .dword 12b,13b\n")
  498. #define __asm_copy_to_user_24(to, from, ret) \
  499. __asm_copy_to_user_24x_cont(to, from, ret, "", "", "")
  500. /* Define a few clearing asms with exception handlers. */
  501. /* This frame-asm is like the __asm_copy_user_cont one, but has one less
  502. input. */
  503. #define __asm_clear(to, ret, CLEAR, FIXUP, TENTRY) \
  504. __asm__ __volatile__ ( \
  505. CLEAR \
  506. "1:\n" \
  507. " .section .fixup,\"ax\"\n" \
  508. FIXUP \
  509. " .previous\n" \
  510. " .section __ex_table,\"a\"\n" \
  511. TENTRY \
  512. " .previous" \
  513. : "=b" (to), "=r" (ret) \
  514. : "0" (to), "1" (ret) \
  515. : "memory")
  516. #define __asm_clear_1(to, ret) \
  517. __asm_clear(to, ret, \
  518. "2: clear.b [%0+]\n", \
  519. "3: jump 1b\n" \
  520. " addq 1,%1\n", \
  521. " .dword 2b,3b\n")
  522. #define __asm_clear_2(to, ret) \
  523. __asm_clear(to, ret, \
  524. "2: clear.w [%0+]\n", \
  525. "3: jump 1b\n" \
  526. " addq 2,%1\n", \
  527. " .dword 2b,3b\n")
  528. #define __asm_clear_3(to, ret) \
  529. __asm_clear(to, ret, \
  530. "2: clear.w [%0+]\n" \
  531. "3: clear.b [%0+]\n", \
  532. "4: addq 2,%1\n" \
  533. "5: jump 1b\n" \
  534. " addq 1,%1\n", \
  535. " .dword 2b,4b\n" \
  536. " .dword 3b,5b\n")
  537. #define __asm_clear_4x_cont(to, ret, CLEAR, FIXUP, TENTRY) \
  538. __asm_clear(to, ret, \
  539. CLEAR \
  540. "2: clear.d [%0+]\n", \
  541. FIXUP \
  542. "3: jump 1b\n" \
  543. " addq 4,%1\n", \
  544. TENTRY \
  545. " .dword 2b,3b\n")
  546. #define __asm_clear_4(to, ret) \
  547. __asm_clear_4x_cont(to, ret, "", "", "")
  548. #define __asm_clear_8x_cont(to, ret, CLEAR, FIXUP, TENTRY) \
  549. __asm_clear_4x_cont(to, ret, \
  550. CLEAR \
  551. "4: clear.d [%0+]\n", \
  552. FIXUP \
  553. "5: addq 4,%1\n", \
  554. TENTRY \
  555. " .dword 4b,5b\n")
  556. #define __asm_clear_8(to, ret) \
  557. __asm_clear_8x_cont(to, ret, "", "", "")
  558. #define __asm_clear_12x_cont(to, ret, CLEAR, FIXUP, TENTRY) \
  559. __asm_clear_8x_cont(to, ret, \
  560. CLEAR \
  561. "6: clear.d [%0+]\n", \
  562. FIXUP \
  563. "7: addq 4,%1\n", \
  564. TENTRY \
  565. " .dword 6b,7b\n")
  566. #define __asm_clear_12(to, ret) \
  567. __asm_clear_12x_cont(to, ret, "", "", "")
  568. #define __asm_clear_16x_cont(to, ret, CLEAR, FIXUP, TENTRY) \
  569. __asm_clear_12x_cont(to, ret, \
  570. CLEAR \
  571. "8: clear.d [%0+]\n", \
  572. FIXUP \
  573. "9: addq 4,%1\n", \
  574. TENTRY \
  575. " .dword 8b,9b\n")
  576. #define __asm_clear_16(to, ret) \
  577. __asm_clear_16x_cont(to, ret, "", "", "")
  578. #define __asm_clear_20x_cont(to, ret, CLEAR, FIXUP, TENTRY) \
  579. __asm_clear_16x_cont(to, ret, \
  580. CLEAR \
  581. "10: clear.d [%0+]\n", \
  582. FIXUP \
  583. "11: addq 4,%1\n", \
  584. TENTRY \
  585. " .dword 10b,11b\n")
  586. #define __asm_clear_20(to, ret) \
  587. __asm_clear_20x_cont(to, ret, "", "", "")
  588. #define __asm_clear_24x_cont(to, ret, CLEAR, FIXUP, TENTRY) \
  589. __asm_clear_20x_cont(to, ret, \
  590. CLEAR \
  591. "12: clear.d [%0+]\n", \
  592. FIXUP \
  593. "13: addq 4,%1\n", \
  594. TENTRY \
  595. " .dword 12b,13b\n")
  596. #define __asm_clear_24(to, ret) \
  597. __asm_clear_24x_cont(to, ret, "", "", "")
  598. /*
  599. * Return the size of a string (including the ending 0)
  600. *
  601. * Return length of string in userspace including terminating 0
  602. * or 0 for error. Return a value greater than N if too long.
  603. */
  604. static inline long
  605. strnlen_user(const char *s, long n)
  606. {
  607. long res, tmp1;
  608. if (!access_ok(VERIFY_READ, s, 0))
  609. return 0;
  610. /*
  611. * This code is deduced from:
  612. *
  613. * tmp1 = n;
  614. * while (tmp1-- > 0 && *s++)
  615. * ;
  616. *
  617. * res = n - tmp1;
  618. *
  619. * (with tweaks).
  620. */
  621. __asm__ __volatile__ (
  622. " move.d %1,$acr\n"
  623. " cmpq 0,$acr\n"
  624. "0:\n"
  625. " ble 1f\n"
  626. " subq 1,$acr\n"
  627. "4: test.b [%0+]\n"
  628. " bne 0b\n"
  629. " cmpq 0,$acr\n"
  630. "1:\n"
  631. " move.d %1,%0\n"
  632. " sub.d $acr,%0\n"
  633. "2:\n"
  634. " .section .fixup,\"ax\"\n"
  635. "3: jump 2b\n"
  636. " clear.d %0\n"
  637. " .previous\n"
  638. " .section __ex_table,\"a\"\n"
  639. " .dword 4b,3b\n"
  640. " .previous\n"
  641. : "=r" (res), "=r" (tmp1)
  642. : "0" (s), "1" (n)
  643. : "acr");
  644. return res;
  645. }
  646. #endif