|
@@ -48,9 +48,10 @@ do { \
|
|
} while (0)
|
|
} while (0)
|
|
|
|
|
|
static inline int
|
|
static inline int
|
|
-arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
|
|
|
|
|
|
+arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *_uaddr)
|
|
{
|
|
{
|
|
int oldval = 0, ret, tmp;
|
|
int oldval = 0, ret, tmp;
|
|
|
|
+ u32 __user *uaddr = __uaccess_mask_ptr(_uaddr);
|
|
|
|
|
|
pagefault_disable();
|
|
pagefault_disable();
|
|
|
|
|
|
@@ -88,15 +89,17 @@ arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
|
|
}
|
|
}
|
|
|
|
|
|
static inline int
|
|
static inline int
|
|
-futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
|
|
|
|
|
|
+futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *_uaddr,
|
|
u32 oldval, u32 newval)
|
|
u32 oldval, u32 newval)
|
|
{
|
|
{
|
|
int ret = 0;
|
|
int ret = 0;
|
|
u32 val, tmp;
|
|
u32 val, tmp;
|
|
|
|
+ u32 __user *uaddr;
|
|
|
|
|
|
- if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
|
|
|
|
|
|
+ if (!access_ok(VERIFY_WRITE, _uaddr, sizeof(u32)))
|
|
return -EFAULT;
|
|
return -EFAULT;
|
|
|
|
|
|
|
|
+ uaddr = __uaccess_mask_ptr(_uaddr);
|
|
uaccess_enable();
|
|
uaccess_enable();
|
|
asm volatile("// futex_atomic_cmpxchg_inatomic\n"
|
|
asm volatile("// futex_atomic_cmpxchg_inatomic\n"
|
|
" prfm pstl1strm, %2\n"
|
|
" prfm pstl1strm, %2\n"
|