uaccess_std.c 7.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317
  1. /*
  2. * arch/s390/lib/uaccess_std.c
  3. *
  4. * Standard user space access functions based on mvcp/mvcs and doing
  5. * interesting things in the secondary space mode.
  6. *
  7. * Copyright (C) IBM Corp. 2006
  8. * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com),
  9. * Gerald Schaefer (gerald.schaefer@de.ibm.com)
  10. */
  11. #include <linux/errno.h>
  12. #include <linux/mm.h>
  13. #include <linux/uaccess.h>
  14. #include <asm/futex.h>
  15. #include "uaccess.h"
  16. #ifndef __s390x__
  17. #define AHI "ahi"
  18. #define ALR "alr"
  19. #define CLR "clr"
  20. #define LHI "lhi"
  21. #define SLR "slr"
  22. #else
  23. #define AHI "aghi"
  24. #define ALR "algr"
  25. #define CLR "clgr"
  26. #define LHI "lghi"
  27. #define SLR "slgr"
  28. #endif
  29. size_t copy_from_user_std(size_t size, const void __user *ptr, void *x)
  30. {
  31. unsigned long tmp1, tmp2;
  32. tmp1 = -256UL;
  33. asm volatile(
  34. "0: mvcp 0(%0,%2),0(%1),%3\n"
  35. " jz 8f\n"
  36. "1:"ALR" %0,%3\n"
  37. " la %1,256(%1)\n"
  38. " la %2,256(%2)\n"
  39. "2: mvcp 0(%0,%2),0(%1),%3\n"
  40. " jnz 1b\n"
  41. " j 8f\n"
  42. "3: la %4,255(%1)\n" /* %4 = ptr + 255 */
  43. " "LHI" %3,-4096\n"
  44. " nr %4,%3\n" /* %4 = (ptr + 255) & -4096 */
  45. " "SLR" %4,%1\n"
  46. " "CLR" %0,%4\n" /* copy crosses next page boundary? */
  47. " jnh 5f\n"
  48. "4: mvcp 0(%4,%2),0(%1),%3\n"
  49. " "SLR" %0,%4\n"
  50. " "ALR" %2,%4\n"
  51. "5:"LHI" %4,-1\n"
  52. " "ALR" %4,%0\n" /* copy remaining size, subtract 1 */
  53. " bras %3,7f\n" /* memset loop */
  54. " xc 0(1,%2),0(%2)\n"
  55. "6: xc 0(256,%2),0(%2)\n"
  56. " la %2,256(%2)\n"
  57. "7:"AHI" %4,-256\n"
  58. " jnm 6b\n"
  59. " ex %4,0(%3)\n"
  60. " j 9f\n"
  61. "8:"SLR" %0,%0\n"
  62. "9: \n"
  63. EX_TABLE(0b,3b) EX_TABLE(2b,3b) EX_TABLE(4b,5b)
  64. : "+a" (size), "+a" (ptr), "+a" (x), "+a" (tmp1), "=a" (tmp2)
  65. : : "cc", "memory");
  66. return size;
  67. }
  68. static size_t copy_from_user_std_check(size_t size, const void __user *ptr,
  69. void *x)
  70. {
  71. if (size <= 1024)
  72. return copy_from_user_std(size, ptr, x);
  73. return copy_from_user_pt(size, ptr, x);
  74. }
  75. size_t copy_to_user_std(size_t size, void __user *ptr, const void *x)
  76. {
  77. unsigned long tmp1, tmp2;
  78. tmp1 = -256UL;
  79. asm volatile(
  80. "0: mvcs 0(%0,%1),0(%2),%3\n"
  81. " jz 5f\n"
  82. "1:"ALR" %0,%3\n"
  83. " la %1,256(%1)\n"
  84. " la %2,256(%2)\n"
  85. "2: mvcs 0(%0,%1),0(%2),%3\n"
  86. " jnz 1b\n"
  87. " j 5f\n"
  88. "3: la %4,255(%1)\n" /* %4 = ptr + 255 */
  89. " "LHI" %3,-4096\n"
  90. " nr %4,%3\n" /* %4 = (ptr + 255) & -4096 */
  91. " "SLR" %4,%1\n"
  92. " "CLR" %0,%4\n" /* copy crosses next page boundary? */
  93. " jnh 6f\n"
  94. "4: mvcs 0(%4,%1),0(%2),%3\n"
  95. " "SLR" %0,%4\n"
  96. " j 6f\n"
  97. "5:"SLR" %0,%0\n"
  98. "6: \n"
  99. EX_TABLE(0b,3b) EX_TABLE(2b,3b) EX_TABLE(4b,6b)
  100. : "+a" (size), "+a" (ptr), "+a" (x), "+a" (tmp1), "=a" (tmp2)
  101. : : "cc", "memory");
  102. return size;
  103. }
  104. static size_t copy_to_user_std_check(size_t size, void __user *ptr,
  105. const void *x)
  106. {
  107. if (size <= 1024)
  108. return copy_to_user_std(size, ptr, x);
  109. return copy_to_user_pt(size, ptr, x);
  110. }
  111. static size_t copy_in_user_std(size_t size, void __user *to,
  112. const void __user *from)
  113. {
  114. unsigned long tmp1;
  115. asm volatile(
  116. " "AHI" %0,-1\n"
  117. " jo 5f\n"
  118. " sacf 256\n"
  119. " bras %3,3f\n"
  120. "0:"AHI" %0,257\n"
  121. "1: mvc 0(1,%1),0(%2)\n"
  122. " la %1,1(%1)\n"
  123. " la %2,1(%2)\n"
  124. " "AHI" %0,-1\n"
  125. " jnz 1b\n"
  126. " j 5f\n"
  127. "2: mvc 0(256,%1),0(%2)\n"
  128. " la %1,256(%1)\n"
  129. " la %2,256(%2)\n"
  130. "3:"AHI" %0,-256\n"
  131. " jnm 2b\n"
  132. "4: ex %0,1b-0b(%3)\n"
  133. " sacf 0\n"
  134. "5: "SLR" %0,%0\n"
  135. "6:\n"
  136. EX_TABLE(1b,6b) EX_TABLE(2b,0b) EX_TABLE(4b,0b)
  137. : "+a" (size), "+a" (to), "+a" (from), "=a" (tmp1)
  138. : : "cc", "memory");
  139. return size;
  140. }
  141. static size_t clear_user_std(size_t size, void __user *to)
  142. {
  143. unsigned long tmp1, tmp2;
  144. asm volatile(
  145. " "AHI" %0,-1\n"
  146. " jo 5f\n"
  147. " sacf 256\n"
  148. " bras %3,3f\n"
  149. " xc 0(1,%1),0(%1)\n"
  150. "0:"AHI" %0,257\n"
  151. " la %2,255(%1)\n" /* %2 = ptr + 255 */
  152. " srl %2,12\n"
  153. " sll %2,12\n" /* %2 = (ptr + 255) & -4096 */
  154. " "SLR" %2,%1\n"
  155. " "CLR" %0,%2\n" /* clear crosses next page boundary? */
  156. " jnh 5f\n"
  157. " "AHI" %2,-1\n"
  158. "1: ex %2,0(%3)\n"
  159. " "AHI" %2,1\n"
  160. " "SLR" %0,%2\n"
  161. " j 5f\n"
  162. "2: xc 0(256,%1),0(%1)\n"
  163. " la %1,256(%1)\n"
  164. "3:"AHI" %0,-256\n"
  165. " jnm 2b\n"
  166. "4: ex %0,0(%3)\n"
  167. " sacf 0\n"
  168. "5: "SLR" %0,%0\n"
  169. "6:\n"
  170. EX_TABLE(1b,6b) EX_TABLE(2b,0b) EX_TABLE(4b,0b)
  171. : "+a" (size), "+a" (to), "=a" (tmp1), "=a" (tmp2)
  172. : : "cc", "memory");
  173. return size;
  174. }
  175. size_t strnlen_user_std(size_t size, const char __user *src)
  176. {
  177. register unsigned long reg0 asm("0") = 0UL;
  178. unsigned long tmp1, tmp2;
  179. asm volatile(
  180. " la %2,0(%1)\n"
  181. " la %3,0(%0,%1)\n"
  182. " "SLR" %0,%0\n"
  183. " sacf 256\n"
  184. "0: srst %3,%2\n"
  185. " jo 0b\n"
  186. " la %0,1(%3)\n" /* strnlen_user results includes \0 */
  187. " "SLR" %0,%1\n"
  188. "1: sacf 0\n"
  189. EX_TABLE(0b,1b)
  190. : "+a" (size), "+a" (src), "=a" (tmp1), "=a" (tmp2)
  191. : "d" (reg0) : "cc", "memory");
  192. return size;
  193. }
  194. size_t strncpy_from_user_std(size_t size, const char __user *src, char *dst)
  195. {
  196. register unsigned long reg0 asm("0") = 0UL;
  197. unsigned long tmp1, tmp2;
  198. asm volatile(
  199. " la %3,0(%1)\n"
  200. " la %4,0(%0,%1)\n"
  201. " sacf 256\n"
  202. "0: srst %4,%3\n"
  203. " jo 0b\n"
  204. " sacf 0\n"
  205. " la %0,0(%4)\n"
  206. " jh 1f\n" /* found \0 in string ? */
  207. " "AHI" %4,1\n" /* include \0 in copy */
  208. "1:"SLR" %0,%1\n" /* %0 = return length (without \0) */
  209. " "SLR" %4,%1\n" /* %4 = copy length (including \0) */
  210. "2: mvcp 0(%4,%2),0(%1),%5\n"
  211. " jz 9f\n"
  212. "3:"AHI" %4,-256\n"
  213. " la %1,256(%1)\n"
  214. " la %2,256(%2)\n"
  215. "4: mvcp 0(%4,%2),0(%1),%5\n"
  216. " jnz 3b\n"
  217. " j 9f\n"
  218. "7: sacf 0\n"
  219. "8:"LHI" %0,%6\n"
  220. "9:\n"
  221. EX_TABLE(0b,7b) EX_TABLE(2b,8b) EX_TABLE(4b,8b)
  222. : "+a" (size), "+a" (src), "+d" (dst), "=a" (tmp1), "=a" (tmp2)
  223. : "d" (reg0), "K" (-EFAULT) : "cc", "memory");
  224. return size;
  225. }
  226. #define __futex_atomic_op(insn, ret, oldval, newval, uaddr, oparg) \
  227. asm volatile( \
  228. " sacf 256\n" \
  229. "0: l %1,0(%6)\n" \
  230. "1:"insn \
  231. "2: cs %1,%2,0(%6)\n" \
  232. "3: jl 1b\n" \
  233. " lhi %0,0\n" \
  234. "4: sacf 0\n" \
  235. EX_TABLE(0b,4b) EX_TABLE(2b,4b) EX_TABLE(3b,4b) \
  236. : "=d" (ret), "=&d" (oldval), "=&d" (newval), \
  237. "=m" (*uaddr) \
  238. : "0" (-EFAULT), "d" (oparg), "a" (uaddr), \
  239. "m" (*uaddr) : "cc");
  240. int futex_atomic_op_std(int op, int __user *uaddr, int oparg, int *old)
  241. {
  242. int oldval = 0, newval, ret;
  243. switch (op) {
  244. case FUTEX_OP_SET:
  245. __futex_atomic_op("lr %2,%5\n",
  246. ret, oldval, newval, uaddr, oparg);
  247. break;
  248. case FUTEX_OP_ADD:
  249. __futex_atomic_op("lr %2,%1\nar %2,%5\n",
  250. ret, oldval, newval, uaddr, oparg);
  251. break;
  252. case FUTEX_OP_OR:
  253. __futex_atomic_op("lr %2,%1\nor %2,%5\n",
  254. ret, oldval, newval, uaddr, oparg);
  255. break;
  256. case FUTEX_OP_ANDN:
  257. __futex_atomic_op("lr %2,%1\nnr %2,%5\n",
  258. ret, oldval, newval, uaddr, oparg);
  259. break;
  260. case FUTEX_OP_XOR:
  261. __futex_atomic_op("lr %2,%1\nxr %2,%5\n",
  262. ret, oldval, newval, uaddr, oparg);
  263. break;
  264. default:
  265. ret = -ENOSYS;
  266. }
  267. *old = oldval;
  268. return ret;
  269. }
  270. int futex_atomic_cmpxchg_std(int __user *uaddr, int oldval, int newval)
  271. {
  272. int ret;
  273. asm volatile(
  274. " sacf 256\n"
  275. "0: cs %1,%4,0(%5)\n"
  276. "1: lr %0,%1\n"
  277. "2: sacf 0\n"
  278. EX_TABLE(0b,2b) EX_TABLE(1b,2b)
  279. : "=d" (ret), "+d" (oldval), "=m" (*uaddr)
  280. : "0" (-EFAULT), "d" (newval), "a" (uaddr), "m" (*uaddr)
  281. : "cc", "memory" );
  282. return ret;
  283. }
  284. struct uaccess_ops uaccess_std = {
  285. .copy_from_user = copy_from_user_std_check,
  286. .copy_from_user_small = copy_from_user_std,
  287. .copy_to_user = copy_to_user_std_check,
  288. .copy_to_user_small = copy_to_user_std,
  289. .copy_in_user = copy_in_user_std,
  290. .clear_user = clear_user_std,
  291. .strnlen_user = strnlen_user_std,
  292. .strncpy_from_user = strncpy_from_user_std,
  293. .futex_atomic_op = futex_atomic_op_std,
  294. .futex_atomic_cmpxchg = futex_atomic_cmpxchg_std,
  295. };