|
@@ -258,6 +258,7 @@ do { \
|
|
|
__chk_user_ptr(ptr); \
|
|
|
if (!is_kernel_addr((unsigned long)__gu_addr)) \
|
|
|
might_fault(); \
|
|
|
+ barrier_nospec(); \
|
|
|
__get_user_size(__gu_val, __gu_addr, (size), __gu_err); \
|
|
|
(x) = (__typeof__(*(ptr)))__gu_val; \
|
|
|
__gu_err; \
|
|
@@ -269,8 +270,10 @@ do { \
|
|
|
unsigned long __gu_val = 0; \
|
|
|
const __typeof__(*(ptr)) __user *__gu_addr = (ptr); \
|
|
|
might_fault(); \
|
|
|
- if (access_ok(VERIFY_READ, __gu_addr, (size))) \
|
|
|
+ if (access_ok(VERIFY_READ, __gu_addr, (size))) { \
|
|
|
+ barrier_nospec(); \
|
|
|
__get_user_size(__gu_val, __gu_addr, (size), __gu_err); \
|
|
|
+ } \
|
|
|
(x) = (__force __typeof__(*(ptr)))__gu_val; \
|
|
|
__gu_err; \
|
|
|
})
|
|
@@ -281,6 +284,7 @@ do { \
|
|
|
unsigned long __gu_val; \
|
|
|
const __typeof__(*(ptr)) __user *__gu_addr = (ptr); \
|
|
|
__chk_user_ptr(ptr); \
|
|
|
+ barrier_nospec(); \
|
|
|
__get_user_size(__gu_val, __gu_addr, (size), __gu_err); \
|
|
|
(x) = (__force __typeof__(*(ptr)))__gu_val; \
|
|
|
__gu_err; \
|
|
@@ -308,15 +312,19 @@ static inline unsigned long raw_copy_from_user(void *to,
|
|
|
|
|
|
switch (n) {
|
|
|
case 1:
|
|
|
+ barrier_nospec();
|
|
|
__get_user_size(*(u8 *)to, from, 1, ret);
|
|
|
break;
|
|
|
case 2:
|
|
|
+ barrier_nospec();
|
|
|
__get_user_size(*(u16 *)to, from, 2, ret);
|
|
|
break;
|
|
|
case 4:
|
|
|
+ barrier_nospec();
|
|
|
__get_user_size(*(u32 *)to, from, 4, ret);
|
|
|
break;
|
|
|
case 8:
|
|
|
+ barrier_nospec();
|
|
|
__get_user_size(*(u64 *)to, from, 8, ret);
|
|
|
break;
|
|
|
}
|
|
@@ -324,6 +332,7 @@ static inline unsigned long raw_copy_from_user(void *to,
|
|
|
return 0;
|
|
|
}
|
|
|
|
|
|
+ barrier_nospec();
|
|
|
return __copy_tofrom_user((__force void __user *)to, from, n);
|
|
|
}
|
|
|
|