uaccess_64.h 6.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235
  1. #ifndef _ASM_X86_UACCESS_64_H
  2. #define _ASM_X86_UACCESS_64_H
  3. /*
  4. * User space memory access functions
  5. */
  6. #include <linux/compiler.h>
  7. #include <linux/errno.h>
  8. #include <linux/prefetch.h>
  9. #include <linux/lockdep.h>
  10. #include <asm/page.h>
  11. /*
  12. * Copy To/From Userspace
  13. */
  14. /* Handles exceptions in both to and from, but doesn't do access_ok */
  15. __must_check unsigned long
  16. copy_user_generic(void *to, const void *from, unsigned len);
  17. __must_check unsigned long
  18. _copy_to_user(void __user *to, const void *from, unsigned len);
  19. __must_check unsigned long
  20. _copy_from_user(void *to, const void __user *from, unsigned len);
  21. __must_check unsigned long
  22. copy_in_user(void __user *to, const void __user *from, unsigned len);
  23. static inline unsigned long __must_check copy_from_user(void *to,
  24. const void __user *from,
  25. unsigned long n)
  26. {
  27. int sz = __compiletime_object_size(to);
  28. might_fault();
  29. if (likely(sz == -1 || sz >= n))
  30. n = _copy_from_user(to, from, n);
  31. #ifdef CONFIG_DEBUG_VM
  32. else
  33. WARN(1, "Buffer overflow detected!\n");
  34. #endif
  35. return n;
  36. }
  37. static __always_inline __must_check
  38. int copy_to_user(void __user *dst, const void *src, unsigned size)
  39. {
  40. might_fault();
  41. return _copy_to_user(dst, src, size);
  42. }
  43. static __always_inline __must_check
  44. int __copy_from_user(void *dst, const void __user *src, unsigned size)
  45. {
  46. int ret = 0;
  47. might_fault();
  48. if (!__builtin_constant_p(size))
  49. return copy_user_generic(dst, (__force void *)src, size);
  50. switch (size) {
  51. case 1:__get_user_asm(*(u8 *)dst, (u8 __user *)src,
  52. ret, "b", "b", "=q", 1);
  53. return ret;
  54. case 2:__get_user_asm(*(u16 *)dst, (u16 __user *)src,
  55. ret, "w", "w", "=r", 2);
  56. return ret;
  57. case 4:__get_user_asm(*(u32 *)dst, (u32 __user *)src,
  58. ret, "l", "k", "=r", 4);
  59. return ret;
  60. case 8:__get_user_asm(*(u64 *)dst, (u64 __user *)src,
  61. ret, "q", "", "=r", 8);
  62. return ret;
  63. case 10:
  64. __get_user_asm(*(u64 *)dst, (u64 __user *)src,
  65. ret, "q", "", "=r", 10);
  66. if (unlikely(ret))
  67. return ret;
  68. __get_user_asm(*(u16 *)(8 + (char *)dst),
  69. (u16 __user *)(8 + (char __user *)src),
  70. ret, "w", "w", "=r", 2);
  71. return ret;
  72. case 16:
  73. __get_user_asm(*(u64 *)dst, (u64 __user *)src,
  74. ret, "q", "", "=r", 16);
  75. if (unlikely(ret))
  76. return ret;
  77. __get_user_asm(*(u64 *)(8 + (char *)dst),
  78. (u64 __user *)(8 + (char __user *)src),
  79. ret, "q", "", "=r", 8);
  80. return ret;
  81. default:
  82. return copy_user_generic(dst, (__force void *)src, size);
  83. }
  84. }
  85. static __always_inline __must_check
  86. int __copy_to_user(void __user *dst, const void *src, unsigned size)
  87. {
  88. int ret = 0;
  89. might_fault();
  90. if (!__builtin_constant_p(size))
  91. return copy_user_generic((__force void *)dst, src, size);
  92. switch (size) {
  93. case 1:__put_user_asm(*(u8 *)src, (u8 __user *)dst,
  94. ret, "b", "b", "iq", 1);
  95. return ret;
  96. case 2:__put_user_asm(*(u16 *)src, (u16 __user *)dst,
  97. ret, "w", "w", "ir", 2);
  98. return ret;
  99. case 4:__put_user_asm(*(u32 *)src, (u32 __user *)dst,
  100. ret, "l", "k", "ir", 4);
  101. return ret;
  102. case 8:__put_user_asm(*(u64 *)src, (u64 __user *)dst,
  103. ret, "q", "", "er", 8);
  104. return ret;
  105. case 10:
  106. __put_user_asm(*(u64 *)src, (u64 __user *)dst,
  107. ret, "q", "", "er", 10);
  108. if (unlikely(ret))
  109. return ret;
  110. asm("":::"memory");
  111. __put_user_asm(4[(u16 *)src], 4 + (u16 __user *)dst,
  112. ret, "w", "w", "ir", 2);
  113. return ret;
  114. case 16:
  115. __put_user_asm(*(u64 *)src, (u64 __user *)dst,
  116. ret, "q", "", "er", 16);
  117. if (unlikely(ret))
  118. return ret;
  119. asm("":::"memory");
  120. __put_user_asm(1[(u64 *)src], 1 + (u64 __user *)dst,
  121. ret, "q", "", "er", 8);
  122. return ret;
  123. default:
  124. return copy_user_generic((__force void *)dst, src, size);
  125. }
  126. }
  127. static __always_inline __must_check
  128. int __copy_in_user(void __user *dst, const void __user *src, unsigned size)
  129. {
  130. int ret = 0;
  131. might_fault();
  132. if (!__builtin_constant_p(size))
  133. return copy_user_generic((__force void *)dst,
  134. (__force void *)src, size);
  135. switch (size) {
  136. case 1: {
  137. u8 tmp;
  138. __get_user_asm(tmp, (u8 __user *)src,
  139. ret, "b", "b", "=q", 1);
  140. if (likely(!ret))
  141. __put_user_asm(tmp, (u8 __user *)dst,
  142. ret, "b", "b", "iq", 1);
  143. return ret;
  144. }
  145. case 2: {
  146. u16 tmp;
  147. __get_user_asm(tmp, (u16 __user *)src,
  148. ret, "w", "w", "=r", 2);
  149. if (likely(!ret))
  150. __put_user_asm(tmp, (u16 __user *)dst,
  151. ret, "w", "w", "ir", 2);
  152. return ret;
  153. }
  154. case 4: {
  155. u32 tmp;
  156. __get_user_asm(tmp, (u32 __user *)src,
  157. ret, "l", "k", "=r", 4);
  158. if (likely(!ret))
  159. __put_user_asm(tmp, (u32 __user *)dst,
  160. ret, "l", "k", "ir", 4);
  161. return ret;
  162. }
  163. case 8: {
  164. u64 tmp;
  165. __get_user_asm(tmp, (u64 __user *)src,
  166. ret, "q", "", "=r", 8);
  167. if (likely(!ret))
  168. __put_user_asm(tmp, (u64 __user *)dst,
  169. ret, "q", "", "er", 8);
  170. return ret;
  171. }
  172. default:
  173. return copy_user_generic((__force void *)dst,
  174. (__force void *)src, size);
  175. }
  176. }
  177. __must_check long
  178. strncpy_from_user(char *dst, const char __user *src, long count);
  179. __must_check long
  180. __strncpy_from_user(char *dst, const char __user *src, long count);
  181. __must_check long strnlen_user(const char __user *str, long n);
  182. __must_check long __strnlen_user(const char __user *str, long n);
  183. __must_check long strlen_user(const char __user *str);
  184. __must_check unsigned long clear_user(void __user *mem, unsigned long len);
  185. __must_check unsigned long __clear_user(void __user *mem, unsigned long len);
  186. static __must_check __always_inline int
  187. __copy_from_user_inatomic(void *dst, const void __user *src, unsigned size)
  188. {
  189. return copy_user_generic(dst, (__force const void *)src, size);
  190. }
  191. static __must_check __always_inline int
  192. __copy_to_user_inatomic(void __user *dst, const void *src, unsigned size)
  193. {
  194. return copy_user_generic((__force void *)dst, src, size);
  195. }
  196. extern long __copy_user_nocache(void *dst, const void __user *src,
  197. unsigned size, int zerorest);
  198. static inline int
  199. __copy_from_user_nocache(void *dst, const void __user *src, unsigned size)
  200. {
  201. might_sleep();
  202. return __copy_user_nocache(dst, src, size, 1);
  203. }
  204. static inline int
  205. __copy_from_user_inatomic_nocache(void *dst, const void __user *src,
  206. unsigned size)
  207. {
  208. return __copy_user_nocache(dst, src, size, 0);
  209. }
  210. unsigned long
  211. copy_user_handle_tail(char *to, char *from, unsigned len, unsigned zerorest);
  212. #endif /* _ASM_X86_UACCESS_64_H */