|
@@ -605,6 +605,53 @@ int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm)
|
|
|
}
|
|
|
EXPORT_SYMBOL_GPL(crypto_tfm_in_queue);
|
|
|
|
|
|
+static inline void crypto_inc_byte(u8 *a, unsigned int size)
|
|
|
+{
|
|
|
+ u8 *b = (a + size);
|
|
|
+ u8 c;
|
|
|
+
|
|
|
+ for (; size; size--) {
|
|
|
+ c = *--b + 1;
|
|
|
+ *b = c;
|
|
|
+ if (c)
|
|
|
+ break;
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+void crypto_inc(u8 *a, unsigned int size)
|
|
|
+{
|
|
|
+ __be32 *b = (__be32 *)(a + size);
|
|
|
+ u32 c;
|
|
|
+
|
|
|
+ for (; size >= 4; size -= 4) {
|
|
|
+ c = be32_to_cpu(*--b) + 1;
|
|
|
+ *b = cpu_to_be32(c);
|
|
|
+ if (c)
|
|
|
+ return;
|
|
|
+ }
|
|
|
+
|
|
|
+ crypto_inc_byte(a, size);
|
|
|
+}
|
|
|
+EXPORT_SYMBOL_GPL(crypto_inc);
|
|
|
+
|
|
|
+static inline void crypto_xor_byte(u8 *a, const u8 *b, unsigned int size)
|
|
|
+{
|
|
|
+ for (; size; size--)
|
|
|
+ *a++ ^= *b++;
|
|
|
+}
|
|
|
+
|
|
|
+void crypto_xor(u8 *dst, const u8 *src, unsigned int size)
|
|
|
+{
|
|
|
+ u32 *a = (u32 *)dst;
|
|
|
+ u32 *b = (u32 *)src;
|
|
|
+
|
|
|
+ for (; size >= 4; size -= 4)
|
|
|
+ *a++ ^= *b++;
|
|
|
+
|
|
|
+ crypto_xor_byte((u8 *)a, (u8 *)b, size);
|
|
|
+}
|
|
|
+EXPORT_SYMBOL_GPL(crypto_xor);
|
|
|
+
|
|
|
static int __init crypto_algapi_init(void)
|
|
|
{
|
|
|
crypto_init_proc();
|