|
@@ -28,6 +28,8 @@
|
|
|
static DEFINE_MUTEX(crypto_default_null_skcipher_lock);
|
|
|
static struct crypto_blkcipher *crypto_default_null_skcipher;
|
|
|
static int crypto_default_null_skcipher_refcnt;
|
|
|
+static struct crypto_skcipher *crypto_default_null_skcipher2;
|
|
|
+static int crypto_default_null_skcipher2_refcnt;
|
|
|
|
|
|
static int null_compress(struct crypto_tfm *tfm, const u8 *src,
|
|
|
unsigned int slen, u8 *dst, unsigned int *dlen)
|
|
@@ -188,6 +190,42 @@ void crypto_put_default_null_skcipher(void)
|
|
|
}
|
|
|
EXPORT_SYMBOL_GPL(crypto_put_default_null_skcipher);
|
|
|
|
|
|
+struct crypto_skcipher *crypto_get_default_null_skcipher2(void)
|
|
|
+{
|
|
|
+ struct crypto_skcipher *tfm;
|
|
|
+
|
|
|
+ mutex_lock(&crypto_default_null_skcipher_lock);
|
|
|
+ tfm = crypto_default_null_skcipher2;
|
|
|
+
|
|
|
+ if (!tfm) {
|
|
|
+ tfm = crypto_alloc_skcipher("ecb(cipher_null)",
|
|
|
+ 0, CRYPTO_ALG_ASYNC);
|
|
|
+ if (IS_ERR(tfm))
|
|
|
+ goto unlock;
|
|
|
+
|
|
|
+ crypto_default_null_skcipher2 = tfm;
|
|
|
+ }
|
|
|
+
|
|
|
+ crypto_default_null_skcipher2_refcnt++;
|
|
|
+
|
|
|
+unlock:
|
|
|
+ mutex_unlock(&crypto_default_null_skcipher_lock);
|
|
|
+
|
|
|
+ return tfm;
|
|
|
+}
|
|
|
+EXPORT_SYMBOL_GPL(crypto_get_default_null_skcipher2);
|
|
|
+
|
|
|
+void crypto_put_default_null_skcipher2(void)
|
|
|
+{
|
|
|
+ mutex_lock(&crypto_default_null_skcipher_lock);
|
|
|
+ if (!--crypto_default_null_skcipher2_refcnt) {
|
|
|
+ crypto_free_skcipher(crypto_default_null_skcipher2);
|
|
|
+ crypto_default_null_skcipher2 = NULL;
|
|
|
+ }
|
|
|
+ mutex_unlock(&crypto_default_null_skcipher_lock);
|
|
|
+}
|
|
|
+EXPORT_SYMBOL_GPL(crypto_put_default_null_skcipher2);
|
|
|
+
|
|
|
static int __init crypto_null_mod_init(void)
|
|
|
{
|
|
|
int ret = 0;
|