|
@@ -14,23 +14,18 @@ extern struct task_struct *__switch_to(struct task_struct *,
|
|
|
struct task_struct *);
|
|
|
#define switch_to(prev, next, last) ((last) = __switch_to((prev), (next)))
|
|
|
|
|
|
-struct thread_struct;
|
|
|
extern struct task_struct *_switch(struct thread_struct *prev,
|
|
|
struct thread_struct *next);
|
|
|
|
|
|
-extern void enable_kernel_fp(void);
|
|
|
-extern void enable_kernel_altivec(void);
|
|
|
-extern void enable_kernel_vsx(void);
|
|
|
+extern void switch_booke_debug_regs(struct debug_reg *new_debug);
|
|
|
+
|
|
|
extern int emulate_altivec(struct pt_regs *);
|
|
|
-extern void __giveup_vsx(struct task_struct *);
|
|
|
-extern void giveup_vsx(struct task_struct *);
|
|
|
-extern void enable_kernel_spe(void);
|
|
|
-extern void load_up_spe(struct task_struct *);
|
|
|
-extern void giveup_all(struct task_struct *);
|
|
|
+
|
|
|
extern void flush_all_to_thread(struct task_struct *);
|
|
|
-extern void switch_booke_debug_regs(struct debug_reg *new_debug);
|
|
|
+extern void giveup_all(struct task_struct *);
|
|
|
|
|
|
#ifdef CONFIG_PPC_FPU
|
|
|
+extern void enable_kernel_fp(void);
|
|
|
extern void flush_fp_to_thread(struct task_struct *);
|
|
|
extern void giveup_fpu(struct task_struct *);
|
|
|
extern void __giveup_fpu(struct task_struct *);
|
|
@@ -38,14 +33,12 @@ static inline void disable_kernel_fp(void)
|
|
|
{
|
|
|
msr_check_and_clear(MSR_FP);
|
|
|
}
|
|
|
-
|
|
|
#else
|
|
|
static inline void flush_fp_to_thread(struct task_struct *t) { }
|
|
|
-static inline void giveup_fpu(struct task_struct *t) { }
|
|
|
-static inline void __giveup_fpu(struct task_struct *t) { }
|
|
|
#endif
|
|
|
|
|
|
#ifdef CONFIG_ALTIVEC
|
|
|
+extern void enable_kernel_altivec(void);
|
|
|
extern void flush_altivec_to_thread(struct task_struct *);
|
|
|
extern void giveup_altivec(struct task_struct *);
|
|
|
extern void __giveup_altivec(struct task_struct *);
|
|
@@ -53,25 +46,21 @@ static inline void disable_kernel_altivec(void)
|
|
|
{
|
|
|
msr_check_and_clear(MSR_VEC);
|
|
|
}
|
|
|
-#else
|
|
|
-static inline void flush_altivec_to_thread(struct task_struct *t) { }
|
|
|
-static inline void giveup_altivec(struct task_struct *t) { }
|
|
|
-static inline void __giveup_altivec(struct task_struct *t) { }
|
|
|
#endif
|
|
|
|
|
|
#ifdef CONFIG_VSX
|
|
|
+extern void enable_kernel_vsx(void);
|
|
|
extern void flush_vsx_to_thread(struct task_struct *);
|
|
|
+extern void giveup_vsx(struct task_struct *);
|
|
|
+extern void __giveup_vsx(struct task_struct *);
|
|
|
static inline void disable_kernel_vsx(void)
|
|
|
{
|
|
|
msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX);
|
|
|
}
|
|
|
-#else
|
|
|
-static inline void flush_vsx_to_thread(struct task_struct *t)
|
|
|
-{
|
|
|
-}
|
|
|
#endif
|
|
|
|
|
|
#ifdef CONFIG_SPE
|
|
|
+extern void enable_kernel_spe(void);
|
|
|
extern void flush_spe_to_thread(struct task_struct *);
|
|
|
extern void giveup_spe(struct task_struct *);
|
|
|
extern void __giveup_spe(struct task_struct *);
|
|
@@ -79,10 +68,6 @@ static inline void disable_kernel_spe(void)
|
|
|
{
|
|
|
msr_check_and_clear(MSR_SPE);
|
|
|
}
|
|
|
-#else
|
|
|
-static inline void flush_spe_to_thread(struct task_struct *t) { }
|
|
|
-static inline void giveup_spe(struct task_struct *t) { }
|
|
|
-static inline void __giveup_spe(struct task_struct *t) { }
|
|
|
#endif
|
|
|
|
|
|
static inline void clear_task_ebb(struct task_struct *t)
|