|
@@ -310,10 +310,15 @@ static inline unsigned long copy_from_user(void *to,
|
|
|
{
|
|
|
unsigned long over;
|
|
|
|
|
|
- if (access_ok(VERIFY_READ, from, n))
|
|
|
+ if (access_ok(VERIFY_READ, from, n)) {
|
|
|
+ if (!__builtin_constant_p(n))
|
|
|
+ check_object_size(to, n, false);
|
|
|
return __copy_tofrom_user((__force void __user *)to, from, n);
|
|
|
+ }
|
|
|
if ((unsigned long)from < TASK_SIZE) {
|
|
|
over = (unsigned long)from + n - TASK_SIZE;
|
|
|
+ if (!__builtin_constant_p(n - over))
|
|
|
+ check_object_size(to, n - over, false);
|
|
|
return __copy_tofrom_user((__force void __user *)to, from,
|
|
|
n - over) + over;
|
|
|
}
|
|
@@ -325,10 +330,15 @@ static inline unsigned long copy_to_user(void __user *to,
|
|
|
{
|
|
|
unsigned long over;
|
|
|
|
|
|
- if (access_ok(VERIFY_WRITE, to, n))
|
|
|
+ if (access_ok(VERIFY_WRITE, to, n)) {
|
|
|
+ if (!__builtin_constant_p(n))
|
|
|
+ check_object_size(from, n, true);
|
|
|
return __copy_tofrom_user(to, (__force void __user *)from, n);
|
|
|
+ }
|
|
|
if ((unsigned long)to < TASK_SIZE) {
|
|
|
over = (unsigned long)to + n - TASK_SIZE;
|
|
|
+ if (!__builtin_constant_p(n))
|
|
|
+ check_object_size(from, n - over, true);
|
|
|
return __copy_tofrom_user(to, (__force void __user *)from,
|
|
|
n - over) + over;
|
|
|
}
|
|
@@ -372,6 +382,10 @@ static inline unsigned long __copy_from_user_inatomic(void *to,
|
|
|
if (ret == 0)
|
|
|
return 0;
|
|
|
}
|
|
|
+
|
|
|
+ if (!__builtin_constant_p(n))
|
|
|
+ check_object_size(to, n, false);
|
|
|
+
|
|
|
return __copy_tofrom_user((__force void __user *)to, from, n);
|
|
|
}
|
|
|
|
|
@@ -398,6 +412,9 @@ static inline unsigned long __copy_to_user_inatomic(void __user *to,
|
|
|
if (ret == 0)
|
|
|
return 0;
|
|
|
}
|
|
|
+ if (!__builtin_constant_p(n))
|
|
|
+ check_object_size(from, n, true);
|
|
|
+
|
|
|
return __copy_tofrom_user(to, (__force const void __user *)from, n);
|
|
|
}
|
|
|
|