|
@@ -6,6 +6,7 @@
|
|
|
*/
|
|
|
|
|
|
#ifdef __KERNEL__
|
|
|
+#include <linux/errno.h>
|
|
|
#include <linux/compiler.h>
|
|
|
#include <linux/string.h>
|
|
|
#include <linux/thread_info.h>
|
|
@@ -204,6 +205,14 @@ __asm__ __volatile__( \
|
|
|
|
|
|
extern int __get_user_bad(void);
|
|
|
|
|
|
+extern void copy_from_user_overflow(void)
|
|
|
+#ifdef CONFIG_DEBUG_STRICT_USER_COPY_CHECKS
|
|
|
+ __compiletime_error("copy_from_user() buffer size is not provably correct")
|
|
|
+#else
|
|
|
+ __compiletime_warning("copy_from_user() buffer size is not provably correct")
|
|
|
+#endif
|
|
|
+;
|
|
|
+
|
|
|
extern unsigned long __must_check ___copy_from_user(void *to,
|
|
|
const void __user *from,
|
|
|
unsigned long size);
|
|
@@ -212,10 +221,16 @@ extern unsigned long copy_from_user_fixup(void *to, const void __user *from,
|
|
|
static inline unsigned long __must_check
|
|
|
copy_from_user(void *to, const void __user *from, unsigned long size)
|
|
|
{
|
|
|
- unsigned long ret = ___copy_from_user(to, from, size);
|
|
|
-
|
|
|
- if (unlikely(ret))
|
|
|
- ret = copy_from_user_fixup(to, from, size);
|
|
|
+ unsigned long ret = (unsigned long) -EFAULT;
|
|
|
+ int sz = __compiletime_object_size(to);
|
|
|
+
|
|
|
+ if (likely(sz == -1 || sz >= size)) {
|
|
|
+ ret = ___copy_from_user(to, from, size);
|
|
|
+ if (unlikely(ret))
|
|
|
+ ret = copy_from_user_fixup(to, from, size);
|
|
|
+ } else {
|
|
|
+ copy_from_user_overflow();
|
|
|
+ }
|
|
|
return ret;
|
|
|
}
|
|
|
#define __copy_from_user copy_from_user
|