|
@@ -29,10 +29,7 @@
|
|
|
#define vector __attribute__((vector_size(16)))
|
|
|
#endif
|
|
|
|
|
|
-#include <linux/preempt.h>
|
|
|
-#include <linux/export.h>
|
|
|
-#include <linux/sched.h>
|
|
|
-#include <asm/switch_to.h>
|
|
|
+#include "xor_vmx.h"
|
|
|
|
|
|
typedef vector signed char unative_t;
|
|
|
|
|
@@ -64,16 +61,13 @@ typedef vector signed char unative_t;
|
|
|
V1##_3 = vec_xor(V1##_3, V2##_3); \
|
|
|
} while (0)
|
|
|
|
|
|
-void xor_altivec_2(unsigned long bytes, unsigned long *v1_in,
|
|
|
- unsigned long *v2_in)
|
|
|
+void __xor_altivec_2(unsigned long bytes, unsigned long *v1_in,
|
|
|
+ unsigned long *v2_in)
|
|
|
{
|
|
|
DEFINE(v1);
|
|
|
DEFINE(v2);
|
|
|
unsigned long lines = bytes / (sizeof(unative_t)) / 4;
|
|
|
|
|
|
- preempt_disable();
|
|
|
- enable_kernel_altivec();
|
|
|
-
|
|
|
do {
|
|
|
LOAD(v1);
|
|
|
LOAD(v2);
|
|
@@ -83,23 +77,16 @@ void xor_altivec_2(unsigned long bytes, unsigned long *v1_in,
|
|
|
v1 += 4;
|
|
|
v2 += 4;
|
|
|
} while (--lines > 0);
|
|
|
-
|
|
|
- disable_kernel_altivec();
|
|
|
- preempt_enable();
|
|
|
}
|
|
|
-EXPORT_SYMBOL(xor_altivec_2);
|
|
|
|
|
|
-void xor_altivec_3(unsigned long bytes, unsigned long *v1_in,
|
|
|
- unsigned long *v2_in, unsigned long *v3_in)
|
|
|
+void __xor_altivec_3(unsigned long bytes, unsigned long *v1_in,
|
|
|
+ unsigned long *v2_in, unsigned long *v3_in)
|
|
|
{
|
|
|
DEFINE(v1);
|
|
|
DEFINE(v2);
|
|
|
DEFINE(v3);
|
|
|
unsigned long lines = bytes / (sizeof(unative_t)) / 4;
|
|
|
|
|
|
- preempt_disable();
|
|
|
- enable_kernel_altivec();
|
|
|
-
|
|
|
do {
|
|
|
LOAD(v1);
|
|
|
LOAD(v2);
|
|
@@ -112,15 +99,11 @@ void xor_altivec_3(unsigned long bytes, unsigned long *v1_in,
|
|
|
v2 += 4;
|
|
|
v3 += 4;
|
|
|
} while (--lines > 0);
|
|
|
-
|
|
|
- disable_kernel_altivec();
|
|
|
- preempt_enable();
|
|
|
}
|
|
|
-EXPORT_SYMBOL(xor_altivec_3);
|
|
|
|
|
|
-void xor_altivec_4(unsigned long bytes, unsigned long *v1_in,
|
|
|
- unsigned long *v2_in, unsigned long *v3_in,
|
|
|
- unsigned long *v4_in)
|
|
|
+void __xor_altivec_4(unsigned long bytes, unsigned long *v1_in,
|
|
|
+ unsigned long *v2_in, unsigned long *v3_in,
|
|
|
+ unsigned long *v4_in)
|
|
|
{
|
|
|
DEFINE(v1);
|
|
|
DEFINE(v2);
|
|
@@ -128,9 +111,6 @@ void xor_altivec_4(unsigned long bytes, unsigned long *v1_in,
|
|
|
DEFINE(v4);
|
|
|
unsigned long lines = bytes / (sizeof(unative_t)) / 4;
|
|
|
|
|
|
- preempt_disable();
|
|
|
- enable_kernel_altivec();
|
|
|
-
|
|
|
do {
|
|
|
LOAD(v1);
|
|
|
LOAD(v2);
|
|
@@ -146,15 +126,11 @@ void xor_altivec_4(unsigned long bytes, unsigned long *v1_in,
|
|
|
v3 += 4;
|
|
|
v4 += 4;
|
|
|
} while (--lines > 0);
|
|
|
-
|
|
|
- disable_kernel_altivec();
|
|
|
- preempt_enable();
|
|
|
}
|
|
|
-EXPORT_SYMBOL(xor_altivec_4);
|
|
|
|
|
|
-void xor_altivec_5(unsigned long bytes, unsigned long *v1_in,
|
|
|
- unsigned long *v2_in, unsigned long *v3_in,
|
|
|
- unsigned long *v4_in, unsigned long *v5_in)
|
|
|
+void __xor_altivec_5(unsigned long bytes, unsigned long *v1_in,
|
|
|
+ unsigned long *v2_in, unsigned long *v3_in,
|
|
|
+ unsigned long *v4_in, unsigned long *v5_in)
|
|
|
{
|
|
|
DEFINE(v1);
|
|
|
DEFINE(v2);
|
|
@@ -163,9 +139,6 @@ void xor_altivec_5(unsigned long bytes, unsigned long *v1_in,
|
|
|
DEFINE(v5);
|
|
|
unsigned long lines = bytes / (sizeof(unative_t)) / 4;
|
|
|
|
|
|
- preempt_disable();
|
|
|
- enable_kernel_altivec();
|
|
|
-
|
|
|
do {
|
|
|
LOAD(v1);
|
|
|
LOAD(v2);
|
|
@@ -184,8 +157,4 @@ void xor_altivec_5(unsigned long bytes, unsigned long *v1_in,
|
|
|
v4 += 4;
|
|
|
v5 += 4;
|
|
|
} while (--lines > 0);
|
|
|
-
|
|
|
- disable_kernel_altivec();
|
|
|
- preempt_enable();
|
|
|
}
|
|
|
-EXPORT_SYMBOL(xor_altivec_5);
|