uaccess_64.h 5.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202
  1. #ifndef ASM_X86__UACCESS_64_H
  2. #define ASM_X86__UACCESS_64_H
  3. /*
  4. * User space memory access functions
  5. */
  6. #include <linux/compiler.h>
  7. #include <linux/errno.h>
  8. #include <linux/prefetch.h>
  9. #include <linux/lockdep.h>
  10. #include <asm/page.h>
  11. /*
  12. * Copy To/From Userspace
  13. */
  14. /* Handles exceptions in both to and from, but doesn't do access_ok */
  15. __must_check unsigned long
  16. copy_user_generic(void *to, const void *from, unsigned len);
  17. __must_check unsigned long
  18. copy_to_user(void __user *to, const void *from, unsigned len);
  19. __must_check unsigned long
  20. copy_from_user(void *to, const void __user *from, unsigned len);
  21. __must_check unsigned long
  22. copy_in_user(void __user *to, const void __user *from, unsigned len);
  23. static __always_inline __must_check
  24. int __copy_from_user(void *dst, const void __user *src, unsigned size)
  25. {
  26. int ret = 0;
  27. if (!__builtin_constant_p(size))
  28. return copy_user_generic(dst, (__force void *)src, size);
  29. switch (size) {
  30. case 1:__get_user_asm(*(u8 *)dst, (u8 __user *)src,
  31. ret, "b", "b", "=q", 1);
  32. return ret;
  33. case 2:__get_user_asm(*(u16 *)dst, (u16 __user *)src,
  34. ret, "w", "w", "=r", 2);
  35. return ret;
  36. case 4:__get_user_asm(*(u32 *)dst, (u32 __user *)src,
  37. ret, "l", "k", "=r", 4);
  38. return ret;
  39. case 8:__get_user_asm(*(u64 *)dst, (u64 __user *)src,
  40. ret, "q", "", "=r", 8);
  41. return ret;
  42. case 10:
  43. __get_user_asm(*(u64 *)dst, (u64 __user *)src,
  44. ret, "q", "", "=r", 16);
  45. if (unlikely(ret))
  46. return ret;
  47. __get_user_asm(*(u16 *)(8 + (char *)dst),
  48. (u16 __user *)(8 + (char __user *)src),
  49. ret, "w", "w", "=r", 2);
  50. return ret;
  51. case 16:
  52. __get_user_asm(*(u64 *)dst, (u64 __user *)src,
  53. ret, "q", "", "=r", 16);
  54. if (unlikely(ret))
  55. return ret;
  56. __get_user_asm(*(u64 *)(8 + (char *)dst),
  57. (u64 __user *)(8 + (char __user *)src),
  58. ret, "q", "", "=r", 8);
  59. return ret;
  60. default:
  61. return copy_user_generic(dst, (__force void *)src, size);
  62. }
  63. }
  64. static __always_inline __must_check
  65. int __copy_to_user(void __user *dst, const void *src, unsigned size)
  66. {
  67. int ret = 0;
  68. if (!__builtin_constant_p(size))
  69. return copy_user_generic((__force void *)dst, src, size);
  70. switch (size) {
  71. case 1:__put_user_asm(*(u8 *)src, (u8 __user *)dst,
  72. ret, "b", "b", "iq", 1);
  73. return ret;
  74. case 2:__put_user_asm(*(u16 *)src, (u16 __user *)dst,
  75. ret, "w", "w", "ir", 2);
  76. return ret;
  77. case 4:__put_user_asm(*(u32 *)src, (u32 __user *)dst,
  78. ret, "l", "k", "ir", 4);
  79. return ret;
  80. case 8:__put_user_asm(*(u64 *)src, (u64 __user *)dst,
  81. ret, "q", "", "ir", 8);
  82. return ret;
  83. case 10:
  84. __put_user_asm(*(u64 *)src, (u64 __user *)dst,
  85. ret, "q", "", "ir", 10);
  86. if (unlikely(ret))
  87. return ret;
  88. asm("":::"memory");
  89. __put_user_asm(4[(u16 *)src], 4 + (u16 __user *)dst,
  90. ret, "w", "w", "ir", 2);
  91. return ret;
  92. case 16:
  93. __put_user_asm(*(u64 *)src, (u64 __user *)dst,
  94. ret, "q", "", "ir", 16);
  95. if (unlikely(ret))
  96. return ret;
  97. asm("":::"memory");
  98. __put_user_asm(1[(u64 *)src], 1 + (u64 __user *)dst,
  99. ret, "q", "", "ir", 8);
  100. return ret;
  101. default:
  102. return copy_user_generic((__force void *)dst, src, size);
  103. }
  104. }
  105. static __always_inline __must_check
  106. int __copy_in_user(void __user *dst, const void __user *src, unsigned size)
  107. {
  108. int ret = 0;
  109. if (!__builtin_constant_p(size))
  110. return copy_user_generic((__force void *)dst,
  111. (__force void *)src, size);
  112. switch (size) {
  113. case 1: {
  114. u8 tmp;
  115. __get_user_asm(tmp, (u8 __user *)src,
  116. ret, "b", "b", "=q", 1);
  117. if (likely(!ret))
  118. __put_user_asm(tmp, (u8 __user *)dst,
  119. ret, "b", "b", "iq", 1);
  120. return ret;
  121. }
  122. case 2: {
  123. u16 tmp;
  124. __get_user_asm(tmp, (u16 __user *)src,
  125. ret, "w", "w", "=r", 2);
  126. if (likely(!ret))
  127. __put_user_asm(tmp, (u16 __user *)dst,
  128. ret, "w", "w", "ir", 2);
  129. return ret;
  130. }
  131. case 4: {
  132. u32 tmp;
  133. __get_user_asm(tmp, (u32 __user *)src,
  134. ret, "l", "k", "=r", 4);
  135. if (likely(!ret))
  136. __put_user_asm(tmp, (u32 __user *)dst,
  137. ret, "l", "k", "ir", 4);
  138. return ret;
  139. }
  140. case 8: {
  141. u64 tmp;
  142. __get_user_asm(tmp, (u64 __user *)src,
  143. ret, "q", "", "=r", 8);
  144. if (likely(!ret))
  145. __put_user_asm(tmp, (u64 __user *)dst,
  146. ret, "q", "", "ir", 8);
  147. return ret;
  148. }
  149. default:
  150. return copy_user_generic((__force void *)dst,
  151. (__force void *)src, size);
  152. }
  153. }
  154. __must_check long
  155. strncpy_from_user(char *dst, const char __user *src, long count);
  156. __must_check long
  157. __strncpy_from_user(char *dst, const char __user *src, long count);
  158. __must_check long strnlen_user(const char __user *str, long n);
  159. __must_check long __strnlen_user(const char __user *str, long n);
  160. __must_check long strlen_user(const char __user *str);
  161. __must_check unsigned long clear_user(void __user *mem, unsigned long len);
  162. __must_check unsigned long __clear_user(void __user *mem, unsigned long len);
  163. __must_check long __copy_from_user_inatomic(void *dst, const void __user *src,
  164. unsigned size);
  165. static __must_check __always_inline int
  166. __copy_to_user_inatomic(void __user *dst, const void *src, unsigned size)
  167. {
  168. return copy_user_generic((__force void *)dst, src, size);
  169. }
  170. extern long __copy_user_nocache(void *dst, const void __user *src,
  171. unsigned size, int zerorest);
  172. static inline int __copy_from_user_nocache(void *dst, const void __user *src,
  173. unsigned size)
  174. {
  175. might_sleep();
  176. return __copy_user_nocache(dst, src, size, 1);
  177. }
  178. static inline int __copy_from_user_inatomic_nocache(void *dst,
  179. const void __user *src,
  180. unsigned size)
  181. {
  182. return __copy_user_nocache(dst, src, size, 0);
  183. }
  184. unsigned long
  185. copy_user_handle_tail(char *to, char *from, unsigned len, unsigned zerorest);
  186. #endif /* ASM_X86__UACCESS_64_H */