|
@@ -73,32 +73,29 @@ bool irq_fpu_usable(void)
|
|
}
|
|
}
|
|
EXPORT_SYMBOL(irq_fpu_usable);
|
|
EXPORT_SYMBOL(irq_fpu_usable);
|
|
|
|
|
|
-void kernel_fpu_begin(void)
|
|
|
|
|
|
+void __kernel_fpu_begin(void)
|
|
{
|
|
{
|
|
struct task_struct *me = current;
|
|
struct task_struct *me = current;
|
|
|
|
|
|
- WARN_ON_ONCE(!irq_fpu_usable());
|
|
|
|
- preempt_disable();
|
|
|
|
if (__thread_has_fpu(me)) {
|
|
if (__thread_has_fpu(me)) {
|
|
__save_init_fpu(me);
|
|
__save_init_fpu(me);
|
|
__thread_clear_has_fpu(me);
|
|
__thread_clear_has_fpu(me);
|
|
- /* We do 'stts()' in kernel_fpu_end() */
|
|
|
|
|
|
+ /* We do 'stts()' in __kernel_fpu_end() */
|
|
} else if (!use_eager_fpu()) {
|
|
} else if (!use_eager_fpu()) {
|
|
this_cpu_write(fpu_owner_task, NULL);
|
|
this_cpu_write(fpu_owner_task, NULL);
|
|
clts();
|
|
clts();
|
|
}
|
|
}
|
|
}
|
|
}
|
|
-EXPORT_SYMBOL(kernel_fpu_begin);
|
|
|
|
|
|
+EXPORT_SYMBOL(__kernel_fpu_begin);
|
|
|
|
|
|
-void kernel_fpu_end(void)
|
|
|
|
|
|
+void __kernel_fpu_end(void)
|
|
{
|
|
{
|
|
if (use_eager_fpu())
|
|
if (use_eager_fpu())
|
|
math_state_restore();
|
|
math_state_restore();
|
|
else
|
|
else
|
|
stts();
|
|
stts();
|
|
- preempt_enable();
|
|
|
|
}
|
|
}
|
|
-EXPORT_SYMBOL(kernel_fpu_end);
|
|
|
|
|
|
+EXPORT_SYMBOL(__kernel_fpu_end);
|
|
|
|
|
|
void unlazy_fpu(struct task_struct *tsk)
|
|
void unlazy_fpu(struct task_struct *tsk)
|
|
{
|
|
{
|