uaccess_std.c 7.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317
  1. /*
  2. * Standard user space access functions based on mvcp/mvcs and doing
  3. * interesting things in the secondary space mode.
  4. *
  5. * Copyright IBM Corp. 2006
  6. * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com),
  7. * Gerald Schaefer (gerald.schaefer@de.ibm.com)
  8. */
  9. #include <linux/errno.h>
  10. #include <linux/mm.h>
  11. #include <linux/uaccess.h>
  12. #include <asm/futex.h>
  13. #include "uaccess.h"
  14. #ifndef CONFIG_64BIT
  15. #define AHI "ahi"
  16. #define ALR "alr"
  17. #define CLR "clr"
  18. #define LHI "lhi"
  19. #define SLR "slr"
  20. #else
  21. #define AHI "aghi"
  22. #define ALR "algr"
  23. #define CLR "clgr"
  24. #define LHI "lghi"
  25. #define SLR "slgr"
  26. #endif
  27. size_t copy_from_user_std(size_t size, const void __user *ptr, void *x)
  28. {
  29. unsigned long tmp1, tmp2;
  30. tmp1 = -256UL;
  31. asm volatile(
  32. "0: mvcp 0(%0,%2),0(%1),%3\n"
  33. "10:jz 8f\n"
  34. "1:"ALR" %0,%3\n"
  35. " la %1,256(%1)\n"
  36. " la %2,256(%2)\n"
  37. "2: mvcp 0(%0,%2),0(%1),%3\n"
  38. "11:jnz 1b\n"
  39. " j 8f\n"
  40. "3: la %4,255(%1)\n" /* %4 = ptr + 255 */
  41. " "LHI" %3,-4096\n"
  42. " nr %4,%3\n" /* %4 = (ptr + 255) & -4096 */
  43. " "SLR" %4,%1\n"
  44. " "CLR" %0,%4\n" /* copy crosses next page boundary? */
  45. " jnh 5f\n"
  46. "4: mvcp 0(%4,%2),0(%1),%3\n"
  47. "12:"SLR" %0,%4\n"
  48. " "ALR" %2,%4\n"
  49. "5:"LHI" %4,-1\n"
  50. " "ALR" %4,%0\n" /* copy remaining size, subtract 1 */
  51. " bras %3,7f\n" /* memset loop */
  52. " xc 0(1,%2),0(%2)\n"
  53. "6: xc 0(256,%2),0(%2)\n"
  54. " la %2,256(%2)\n"
  55. "7:"AHI" %4,-256\n"
  56. " jnm 6b\n"
  57. " ex %4,0(%3)\n"
  58. " j 9f\n"
  59. "8:"SLR" %0,%0\n"
  60. "9: \n"
  61. EX_TABLE(0b,3b) EX_TABLE(2b,3b) EX_TABLE(4b,5b)
  62. EX_TABLE(10b,3b) EX_TABLE(11b,3b) EX_TABLE(12b,5b)
  63. : "+a" (size), "+a" (ptr), "+a" (x), "+a" (tmp1), "=a" (tmp2)
  64. : : "cc", "memory");
  65. return size;
  66. }
  67. static size_t copy_from_user_std_check(size_t size, const void __user *ptr,
  68. void *x)
  69. {
  70. if (size <= 1024)
  71. return copy_from_user_std(size, ptr, x);
  72. return copy_from_user_pt(size, ptr, x);
  73. }
  74. size_t copy_to_user_std(size_t size, void __user *ptr, const void *x)
  75. {
  76. unsigned long tmp1, tmp2;
  77. tmp1 = -256UL;
  78. asm volatile(
  79. "0: mvcs 0(%0,%1),0(%2),%3\n"
  80. "7: jz 5f\n"
  81. "1:"ALR" %0,%3\n"
  82. " la %1,256(%1)\n"
  83. " la %2,256(%2)\n"
  84. "2: mvcs 0(%0,%1),0(%2),%3\n"
  85. "8: jnz 1b\n"
  86. " j 5f\n"
  87. "3: la %4,255(%1)\n" /* %4 = ptr + 255 */
  88. " "LHI" %3,-4096\n"
  89. " nr %4,%3\n" /* %4 = (ptr + 255) & -4096 */
  90. " "SLR" %4,%1\n"
  91. " "CLR" %0,%4\n" /* copy crosses next page boundary? */
  92. " jnh 6f\n"
  93. "4: mvcs 0(%4,%1),0(%2),%3\n"
  94. "9:"SLR" %0,%4\n"
  95. " j 6f\n"
  96. "5:"SLR" %0,%0\n"
  97. "6: \n"
  98. EX_TABLE(0b,3b) EX_TABLE(2b,3b) EX_TABLE(4b,6b)
  99. EX_TABLE(7b,3b) EX_TABLE(8b,3b) EX_TABLE(9b,6b)
  100. : "+a" (size), "+a" (ptr), "+a" (x), "+a" (tmp1), "=a" (tmp2)
  101. : : "cc", "memory");
  102. return size;
  103. }
  104. static size_t copy_to_user_std_check(size_t size, void __user *ptr,
  105. const void *x)
  106. {
  107. if (size <= 1024)
  108. return copy_to_user_std(size, ptr, x);
  109. return copy_to_user_pt(size, ptr, x);
  110. }
  111. static size_t copy_in_user_std(size_t size, void __user *to,
  112. const void __user *from)
  113. {
  114. unsigned long tmp1;
  115. asm volatile(
  116. " sacf 256\n"
  117. " "AHI" %0,-1\n"
  118. " jo 5f\n"
  119. " bras %3,3f\n"
  120. "0:"AHI" %0,257\n"
  121. "1: mvc 0(1,%1),0(%2)\n"
  122. " la %1,1(%1)\n"
  123. " la %2,1(%2)\n"
  124. " "AHI" %0,-1\n"
  125. " jnz 1b\n"
  126. " j 5f\n"
  127. "2: mvc 0(256,%1),0(%2)\n"
  128. " la %1,256(%1)\n"
  129. " la %2,256(%2)\n"
  130. "3:"AHI" %0,-256\n"
  131. " jnm 2b\n"
  132. "4: ex %0,1b-0b(%3)\n"
  133. "5: "SLR" %0,%0\n"
  134. "6: sacf 0\n"
  135. EX_TABLE(1b,6b) EX_TABLE(2b,0b) EX_TABLE(4b,0b)
  136. : "+a" (size), "+a" (to), "+a" (from), "=a" (tmp1)
  137. : : "cc", "memory");
  138. return size;
  139. }
  140. static size_t clear_user_std(size_t size, void __user *to)
  141. {
  142. unsigned long tmp1, tmp2;
  143. asm volatile(
  144. " sacf 256\n"
  145. " "AHI" %0,-1\n"
  146. " jo 5f\n"
  147. " bras %3,3f\n"
  148. " xc 0(1,%1),0(%1)\n"
  149. "0:"AHI" %0,257\n"
  150. " la %2,255(%1)\n" /* %2 = ptr + 255 */
  151. " srl %2,12\n"
  152. " sll %2,12\n" /* %2 = (ptr + 255) & -4096 */
  153. " "SLR" %2,%1\n"
  154. " "CLR" %0,%2\n" /* clear crosses next page boundary? */
  155. " jnh 5f\n"
  156. " "AHI" %2,-1\n"
  157. "1: ex %2,0(%3)\n"
  158. " "AHI" %2,1\n"
  159. " "SLR" %0,%2\n"
  160. " j 5f\n"
  161. "2: xc 0(256,%1),0(%1)\n"
  162. " la %1,256(%1)\n"
  163. "3:"AHI" %0,-256\n"
  164. " jnm 2b\n"
  165. "4: ex %0,0(%3)\n"
  166. "5: "SLR" %0,%0\n"
  167. "6: sacf 0\n"
  168. EX_TABLE(1b,6b) EX_TABLE(2b,0b) EX_TABLE(4b,0b)
  169. : "+a" (size), "+a" (to), "=a" (tmp1), "=a" (tmp2)
  170. : : "cc", "memory");
  171. return size;
  172. }
  173. size_t strnlen_user_std(size_t size, const char __user *src)
  174. {
  175. register unsigned long reg0 asm("0") = 0UL;
  176. unsigned long tmp1, tmp2;
  177. asm volatile(
  178. " la %2,0(%1)\n"
  179. " la %3,0(%0,%1)\n"
  180. " "SLR" %0,%0\n"
  181. " sacf 256\n"
  182. "0: srst %3,%2\n"
  183. " jo 0b\n"
  184. " la %0,1(%3)\n" /* strnlen_user results includes \0 */
  185. " "SLR" %0,%1\n"
  186. "1: sacf 0\n"
  187. EX_TABLE(0b,1b)
  188. : "+a" (size), "+a" (src), "=a" (tmp1), "=a" (tmp2)
  189. : "d" (reg0) : "cc", "memory");
  190. return size;
  191. }
  192. size_t strncpy_from_user_std(size_t size, const char __user *src, char *dst)
  193. {
  194. register unsigned long reg0 asm("0") = 0UL;
  195. unsigned long tmp1, tmp2;
  196. asm volatile(
  197. " la %3,0(%1)\n"
  198. " la %4,0(%0,%1)\n"
  199. " sacf 256\n"
  200. "0: srst %4,%3\n"
  201. " jo 0b\n"
  202. " sacf 0\n"
  203. " la %0,0(%4)\n"
  204. " jh 1f\n" /* found \0 in string ? */
  205. " "AHI" %4,1\n" /* include \0 in copy */
  206. "1:"SLR" %0,%1\n" /* %0 = return length (without \0) */
  207. " "SLR" %4,%1\n" /* %4 = copy length (including \0) */
  208. "2: mvcp 0(%4,%2),0(%1),%5\n"
  209. " jz 9f\n"
  210. "3:"AHI" %4,-256\n"
  211. " la %1,256(%1)\n"
  212. " la %2,256(%2)\n"
  213. "4: mvcp 0(%4,%2),0(%1),%5\n"
  214. " jnz 3b\n"
  215. " j 9f\n"
  216. "7: sacf 0\n"
  217. "8:"LHI" %0,%6\n"
  218. "9:\n"
  219. EX_TABLE(0b,7b) EX_TABLE(2b,8b) EX_TABLE(4b,8b)
  220. : "+a" (size), "+a" (src), "+d" (dst), "=a" (tmp1), "=a" (tmp2)
  221. : "d" (reg0), "K" (-EFAULT) : "cc", "memory");
  222. return size;
  223. }
  224. #define __futex_atomic_op(insn, ret, oldval, newval, uaddr, oparg) \
  225. asm volatile( \
  226. " sacf 256\n" \
  227. "0: l %1,0(%6)\n" \
  228. "1:"insn \
  229. "2: cs %1,%2,0(%6)\n" \
  230. "3: jl 1b\n" \
  231. " lhi %0,0\n" \
  232. "4: sacf 0\n" \
  233. EX_TABLE(0b,4b) EX_TABLE(2b,4b) EX_TABLE(3b,4b) \
  234. : "=d" (ret), "=&d" (oldval), "=&d" (newval), \
  235. "=m" (*uaddr) \
  236. : "0" (-EFAULT), "d" (oparg), "a" (uaddr), \
  237. "m" (*uaddr) : "cc");
  238. int futex_atomic_op_std(int op, u32 __user *uaddr, int oparg, int *old)
  239. {
  240. int oldval = 0, newval, ret;
  241. switch (op) {
  242. case FUTEX_OP_SET:
  243. __futex_atomic_op("lr %2,%5\n",
  244. ret, oldval, newval, uaddr, oparg);
  245. break;
  246. case FUTEX_OP_ADD:
  247. __futex_atomic_op("lr %2,%1\nar %2,%5\n",
  248. ret, oldval, newval, uaddr, oparg);
  249. break;
  250. case FUTEX_OP_OR:
  251. __futex_atomic_op("lr %2,%1\nor %2,%5\n",
  252. ret, oldval, newval, uaddr, oparg);
  253. break;
  254. case FUTEX_OP_ANDN:
  255. __futex_atomic_op("lr %2,%1\nnr %2,%5\n",
  256. ret, oldval, newval, uaddr, oparg);
  257. break;
  258. case FUTEX_OP_XOR:
  259. __futex_atomic_op("lr %2,%1\nxr %2,%5\n",
  260. ret, oldval, newval, uaddr, oparg);
  261. break;
  262. default:
  263. ret = -ENOSYS;
  264. }
  265. *old = oldval;
  266. return ret;
  267. }
  268. int futex_atomic_cmpxchg_std(u32 *uval, u32 __user *uaddr,
  269. u32 oldval, u32 newval)
  270. {
  271. int ret;
  272. asm volatile(
  273. " sacf 256\n"
  274. "0: cs %1,%4,0(%5)\n"
  275. "1: la %0,0\n"
  276. "2: sacf 0\n"
  277. EX_TABLE(0b,2b) EX_TABLE(1b,2b)
  278. : "=d" (ret), "+d" (oldval), "=m" (*uaddr)
  279. : "0" (-EFAULT), "d" (newval), "a" (uaddr), "m" (*uaddr)
  280. : "cc", "memory" );
  281. *uval = oldval;
  282. return ret;
  283. }
  284. struct uaccess_ops uaccess_std = {
  285. .copy_from_user = copy_from_user_std_check,
  286. .copy_from_user_small = copy_from_user_std,
  287. .copy_to_user = copy_to_user_std_check,
  288. .copy_to_user_small = copy_to_user_std,
  289. .copy_in_user = copy_in_user_std,
  290. .clear_user = clear_user_std,
  291. .strnlen_user = strnlen_user_std,
  292. .strncpy_from_user = strncpy_from_user_std,
  293. .futex_atomic_op = futex_atomic_op_std,
  294. .futex_atomic_cmpxchg = futex_atomic_cmpxchg_std,
  295. };