uaccess_64.h 5.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201
  1. #ifndef __X86_64_UACCESS_H
  2. #define __X86_64_UACCESS_H
  3. /*
  4. * User space memory access functions
  5. */
  6. #include <linux/compiler.h>
  7. #include <linux/errno.h>
  8. #include <linux/prefetch.h>
  9. #include <asm/page.h>
  10. /*
  11. * Copy To/From Userspace
  12. */
  13. /* Handles exceptions in both to and from, but doesn't do access_ok */
  14. __must_check unsigned long
  15. copy_user_generic(void *to, const void *from, unsigned len);
  16. __must_check unsigned long
  17. copy_to_user(void __user *to, const void *from, unsigned len);
  18. __must_check unsigned long
  19. copy_from_user(void *to, const void __user *from, unsigned len);
  20. __must_check unsigned long
  21. copy_in_user(void __user *to, const void __user *from, unsigned len);
  22. static __always_inline __must_check
  23. int __copy_from_user(void *dst, const void __user *src, unsigned size)
  24. {
  25. int ret = 0;
  26. if (!__builtin_constant_p(size))
  27. return copy_user_generic(dst, (__force void *)src, size);
  28. switch (size) {
  29. case 1:__get_user_asm(*(u8 *)dst, (u8 __user *)src,
  30. ret, "b", "b", "=q", 1);
  31. return ret;
  32. case 2:__get_user_asm(*(u16 *)dst, (u16 __user *)src,
  33. ret, "w", "w", "=r", 2);
  34. return ret;
  35. case 4:__get_user_asm(*(u32 *)dst, (u32 __user *)src,
  36. ret, "l", "k", "=r", 4);
  37. return ret;
  38. case 8:__get_user_asm(*(u64 *)dst, (u64 __user *)src,
  39. ret, "q", "", "=r", 8);
  40. return ret;
  41. case 10:
  42. __get_user_asm(*(u64 *)dst, (u64 __user *)src,
  43. ret, "q", "", "=r", 16);
  44. if (unlikely(ret))
  45. return ret;
  46. __get_user_asm(*(u16 *)(8 + (char *)dst),
  47. (u16 __user *)(8 + (char __user *)src),
  48. ret, "w", "w", "=r", 2);
  49. return ret;
  50. case 16:
  51. __get_user_asm(*(u64 *)dst, (u64 __user *)src,
  52. ret, "q", "", "=r", 16);
  53. if (unlikely(ret))
  54. return ret;
  55. __get_user_asm(*(u64 *)(8 + (char *)dst),
  56. (u64 __user *)(8 + (char __user *)src),
  57. ret, "q", "", "=r", 8);
  58. return ret;
  59. default:
  60. return copy_user_generic(dst, (__force void *)src, size);
  61. }
  62. }
  63. static __always_inline __must_check
  64. int __copy_to_user(void __user *dst, const void *src, unsigned size)
  65. {
  66. int ret = 0;
  67. if (!__builtin_constant_p(size))
  68. return copy_user_generic((__force void *)dst, src, size);
  69. switch (size) {
  70. case 1:__put_user_asm(*(u8 *)src, (u8 __user *)dst,
  71. ret, "b", "b", "iq", 1);
  72. return ret;
  73. case 2:__put_user_asm(*(u16 *)src, (u16 __user *)dst,
  74. ret, "w", "w", "ir", 2);
  75. return ret;
  76. case 4:__put_user_asm(*(u32 *)src, (u32 __user *)dst,
  77. ret, "l", "k", "ir", 4);
  78. return ret;
  79. case 8:__put_user_asm(*(u64 *)src, (u64 __user *)dst,
  80. ret, "q", "", "ir", 8);
  81. return ret;
  82. case 10:
  83. __put_user_asm(*(u64 *)src, (u64 __user *)dst,
  84. ret, "q", "", "ir", 10);
  85. if (unlikely(ret))
  86. return ret;
  87. asm("":::"memory");
  88. __put_user_asm(4[(u16 *)src], 4 + (u16 __user *)dst,
  89. ret, "w", "w", "ir", 2);
  90. return ret;
  91. case 16:
  92. __put_user_asm(*(u64 *)src, (u64 __user *)dst,
  93. ret, "q", "", "ir", 16);
  94. if (unlikely(ret))
  95. return ret;
  96. asm("":::"memory");
  97. __put_user_asm(1[(u64 *)src], 1 + (u64 __user *)dst,
  98. ret, "q", "", "ir", 8);
  99. return ret;
  100. default:
  101. return copy_user_generic((__force void *)dst, src, size);
  102. }
  103. }
  104. static __always_inline __must_check
  105. int __copy_in_user(void __user *dst, const void __user *src, unsigned size)
  106. {
  107. int ret = 0;
  108. if (!__builtin_constant_p(size))
  109. return copy_user_generic((__force void *)dst,
  110. (__force void *)src, size);
  111. switch (size) {
  112. case 1: {
  113. u8 tmp;
  114. __get_user_asm(tmp, (u8 __user *)src,
  115. ret, "b", "b", "=q", 1);
  116. if (likely(!ret))
  117. __put_user_asm(tmp, (u8 __user *)dst,
  118. ret, "b", "b", "iq", 1);
  119. return ret;
  120. }
  121. case 2: {
  122. u16 tmp;
  123. __get_user_asm(tmp, (u16 __user *)src,
  124. ret, "w", "w", "=r", 2);
  125. if (likely(!ret))
  126. __put_user_asm(tmp, (u16 __user *)dst,
  127. ret, "w", "w", "ir", 2);
  128. return ret;
  129. }
  130. case 4: {
  131. u32 tmp;
  132. __get_user_asm(tmp, (u32 __user *)src,
  133. ret, "l", "k", "=r", 4);
  134. if (likely(!ret))
  135. __put_user_asm(tmp, (u32 __user *)dst,
  136. ret, "l", "k", "ir", 4);
  137. return ret;
  138. }
  139. case 8: {
  140. u64 tmp;
  141. __get_user_asm(tmp, (u64 __user *)src,
  142. ret, "q", "", "=r", 8);
  143. if (likely(!ret))
  144. __put_user_asm(tmp, (u64 __user *)dst,
  145. ret, "q", "", "ir", 8);
  146. return ret;
  147. }
  148. default:
  149. return copy_user_generic((__force void *)dst,
  150. (__force void *)src, size);
  151. }
  152. }
  153. __must_check long
  154. strncpy_from_user(char *dst, const char __user *src, long count);
  155. __must_check long
  156. __strncpy_from_user(char *dst, const char __user *src, long count);
  157. __must_check long strnlen_user(const char __user *str, long n);
  158. __must_check long __strnlen_user(const char __user *str, long n);
  159. __must_check long strlen_user(const char __user *str);
  160. __must_check unsigned long clear_user(void __user *mem, unsigned long len);
  161. __must_check unsigned long __clear_user(void __user *mem, unsigned long len);
  162. __must_check long __copy_from_user_inatomic(void *dst, const void __user *src,
  163. unsigned size);
  164. static __must_check __always_inline int
  165. __copy_to_user_inatomic(void __user *dst, const void *src, unsigned size)
  166. {
  167. return copy_user_generic((__force void *)dst, src, size);
  168. }
  169. extern long __copy_user_nocache(void *dst, const void __user *src,
  170. unsigned size, int zerorest);
  171. static inline int __copy_from_user_nocache(void *dst, const void __user *src,
  172. unsigned size)
  173. {
  174. might_sleep();
  175. return __copy_user_nocache(dst, src, size, 1);
  176. }
  177. static inline int __copy_from_user_inatomic_nocache(void *dst,
  178. const void __user *src,
  179. unsigned size)
  180. {
  181. return __copy_user_nocache(dst, src, size, 0);
  182. }
  183. unsigned long
  184. copy_user_handle_tail(char *to, char *from, unsigned len, unsigned zerorest);
  185. #endif /* __X86_64_UACCESS_H */