|
@@ -331,12 +331,12 @@ do { \
|
|
|
|
|
|
unsigned long __copy_user_ll(void *to, const void *from, unsigned long n)
|
|
|
{
|
|
|
- stac();
|
|
|
+ __uaccess_begin();
|
|
|
if (movsl_is_ok(to, from, n))
|
|
|
__copy_user(to, from, n);
|
|
|
else
|
|
|
n = __copy_user_intel(to, from, n);
|
|
|
- clac();
|
|
|
+ __uaccess_end();
|
|
|
return n;
|
|
|
}
|
|
|
EXPORT_SYMBOL(__copy_user_ll);
|
|
@@ -344,7 +344,7 @@ EXPORT_SYMBOL(__copy_user_ll);
|
|
|
unsigned long __copy_from_user_ll_nocache_nozero(void *to, const void __user *from,
|
|
|
unsigned long n)
|
|
|
{
|
|
|
- stac();
|
|
|
+ __uaccess_begin();
|
|
|
#ifdef CONFIG_X86_INTEL_USERCOPY
|
|
|
if (n > 64 && static_cpu_has(X86_FEATURE_XMM2))
|
|
|
n = __copy_user_intel_nocache(to, from, n);
|
|
@@ -353,7 +353,7 @@ unsigned long __copy_from_user_ll_nocache_nozero(void *to, const void __user *fr
|
|
|
#else
|
|
|
__copy_user(to, from, n);
|
|
|
#endif
|
|
|
- clac();
|
|
|
+ __uaccess_end();
|
|
|
return n;
|
|
|
}
|
|
|
EXPORT_SYMBOL(__copy_from_user_ll_nocache_nozero);
|