|
@@ -35,8 +35,8 @@
|
|
|
|
|
|
.text
|
|
|
ENTRY(__get_user_1)
|
|
|
- GET_THREAD_INFO(%_ASM_DX)
|
|
|
- cmp TI_addr_limit(%_ASM_DX),%_ASM_AX
|
|
|
+ mov PER_CPU_VAR(current_task), %_ASM_DX
|
|
|
+ cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
|
|
|
jae bad_get_user
|
|
|
ASM_STAC
|
|
|
1: movzbl (%_ASM_AX),%edx
|
|
@@ -48,8 +48,8 @@ ENDPROC(__get_user_1)
|
|
|
ENTRY(__get_user_2)
|
|
|
add $1,%_ASM_AX
|
|
|
jc bad_get_user
|
|
|
- GET_THREAD_INFO(%_ASM_DX)
|
|
|
- cmp TI_addr_limit(%_ASM_DX),%_ASM_AX
|
|
|
+ mov PER_CPU_VAR(current_task), %_ASM_DX
|
|
|
+ cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
|
|
|
jae bad_get_user
|
|
|
ASM_STAC
|
|
|
2: movzwl -1(%_ASM_AX),%edx
|
|
@@ -61,8 +61,8 @@ ENDPROC(__get_user_2)
|
|
|
ENTRY(__get_user_4)
|
|
|
add $3,%_ASM_AX
|
|
|
jc bad_get_user
|
|
|
- GET_THREAD_INFO(%_ASM_DX)
|
|
|
- cmp TI_addr_limit(%_ASM_DX),%_ASM_AX
|
|
|
+ mov PER_CPU_VAR(current_task), %_ASM_DX
|
|
|
+ cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
|
|
|
jae bad_get_user
|
|
|
ASM_STAC
|
|
|
3: movl -3(%_ASM_AX),%edx
|
|
@@ -75,8 +75,8 @@ ENTRY(__get_user_8)
|
|
|
#ifdef CONFIG_X86_64
|
|
|
add $7,%_ASM_AX
|
|
|
jc bad_get_user
|
|
|
- GET_THREAD_INFO(%_ASM_DX)
|
|
|
- cmp TI_addr_limit(%_ASM_DX),%_ASM_AX
|
|
|
+ mov PER_CPU_VAR(current_task), %_ASM_DX
|
|
|
+ cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
|
|
|
jae bad_get_user
|
|
|
ASM_STAC
|
|
|
4: movq -7(%_ASM_AX),%rdx
|
|
@@ -86,8 +86,8 @@ ENTRY(__get_user_8)
|
|
|
#else
|
|
|
add $7,%_ASM_AX
|
|
|
jc bad_get_user_8
|
|
|
- GET_THREAD_INFO(%_ASM_DX)
|
|
|
- cmp TI_addr_limit(%_ASM_DX),%_ASM_AX
|
|
|
+ mov PER_CPU_VAR(current_task), %_ASM_DX
|
|
|
+ cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
|
|
|
jae bad_get_user_8
|
|
|
ASM_STAC
|
|
|
4: movl -7(%_ASM_AX),%edx
|