[PATCH 2/4] crypto: pass the flag CRYPTO_ALG_ALLOCATES_MEMORY

From: Mikulas Patocka
Date: Tue Jun 16 2020 - 11:02:10 EST


Pass the flag CRYPTO_ALG_ALLOCATES_MEMORY down through the crypto API.

Signed-off-by: Mikulas Patocka <mpatocka@xxxxxxxxxx>

---
crypto/adiantum.c | 3 ++-
crypto/authenc.c | 5 +++--
crypto/authencesn.c | 5 +++--
crypto/ccm.c | 7 ++++---
crypto/chacha20poly1305.c | 5 +++--
crypto/cryptd.c | 7 +++++--
crypto/ctr.c | 3 ++-
crypto/cts.c | 5 +++--
crypto/essiv.c | 5 +++--
crypto/gcm.c | 15 +++++++++------
crypto/geniv.c | 3 ++-
crypto/lrw.c | 5 +++--
crypto/rsa-pkcs1pad.c | 5 +++--
crypto/xts.c | 2 +-
include/crypto/algapi.h | 9 +++++++++
15 files changed, 55 insertions(+), 29 deletions(-)

Index: linux-2.6/crypto/authenc.c
===================================================================
--- linux-2.6.orig/crypto/authenc.c
+++ linux-2.6/crypto/authenc.c
@@ -388,7 +388,8 @@ static int crypto_authenc_create(struct
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
return -EINVAL;

- mask = crypto_requires_sync(algt->type, algt->mask);
+ mask = crypto_requires_sync(algt->type, algt->mask) |
+ crypto_requires_nomem(algt->type, algt->mask);

inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
if (!inst)
@@ -424,7 +425,7 @@ static int crypto_authenc_create(struct
goto err_free_inst;

inst->alg.base.cra_flags = (auth_base->cra_flags |
- enc->base.cra_flags) & CRYPTO_ALG_ASYNC;
+ enc->base.cra_flags) & (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY);
inst->alg.base.cra_priority = enc->base.cra_priority * 10 +
auth_base->cra_priority;
inst->alg.base.cra_blocksize = enc->base.cra_blocksize;
Index: linux-2.6/crypto/authencesn.c
===================================================================
--- linux-2.6.orig/crypto/authencesn.c
+++ linux-2.6/crypto/authencesn.c
@@ -406,7 +406,8 @@ static int crypto_authenc_esn_create(str
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
return -EINVAL;

- mask = crypto_requires_sync(algt->type, algt->mask);
+ mask = crypto_requires_sync(algt->type, algt->mask) |
+ crypto_requires_nomem(algt->type, algt->mask);

inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
if (!inst)
@@ -438,7 +439,7 @@ static int crypto_authenc_esn_create(str
goto err_free_inst;

inst->alg.base.cra_flags = (auth_base->cra_flags |
- enc->base.cra_flags) & CRYPTO_ALG_ASYNC;
+ enc->base.cra_flags) & (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY);
inst->alg.base.cra_priority = enc->base.cra_priority * 10 +
auth_base->cra_priority;
inst->alg.base.cra_blocksize = enc->base.cra_blocksize;
Index: linux-2.6/crypto/ccm.c
===================================================================
--- linux-2.6.orig/crypto/ccm.c
+++ linux-2.6/crypto/ccm.c
@@ -462,7 +462,8 @@ static int crypto_ccm_create_common(stru
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
return -EINVAL;

- mask = crypto_requires_sync(algt->type, algt->mask);
+ mask = crypto_requires_sync(algt->type, algt->mask) |
+ crypto_requires_nomem(algt->type, algt->mask);

inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
if (!inst)
@@ -507,7 +508,7 @@ static int crypto_ccm_create_common(stru
mac->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
goto err_free_inst;

- inst->alg.base.cra_flags = ctr->base.cra_flags & CRYPTO_ALG_ASYNC;
+ inst->alg.base.cra_flags = ctr->base.cra_flags & (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY);
inst->alg.base.cra_priority = (mac->base.cra_priority +
ctr->base.cra_priority) / 2;
inst->alg.base.cra_blocksize = 1;
@@ -759,7 +760,7 @@ static int crypto_rfc4309_create(struct
CRYPTO_MAX_ALG_NAME)
goto err_free_inst;

- inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
+ inst->alg.base.cra_flags = alg->base.cra_flags & (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY);
inst->alg.base.cra_priority = alg->base.cra_priority;
inst->alg.base.cra_blocksize = 1;
inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
Index: linux-2.6/crypto/chacha20poly1305.c
===================================================================
--- linux-2.6.orig/crypto/chacha20poly1305.c
+++ linux-2.6/crypto/chacha20poly1305.c
@@ -573,7 +573,8 @@ static int chachapoly_create(struct cryp
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
return -EINVAL;

- mask = crypto_requires_sync(algt->type, algt->mask);
+ mask = crypto_requires_sync(algt->type, algt->mask) |
+ crypto_requires_nomem(algt->type, algt->mask);

inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
if (!inst)
@@ -614,7 +615,7 @@ static int chachapoly_create(struct cryp
goto err_free_inst;

inst->alg.base.cra_flags = (chacha->base.cra_flags |
- poly->base.cra_flags) & CRYPTO_ALG_ASYNC;
+ poly->base.cra_flags) & (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY);
inst->alg.base.cra_priority = (chacha->base.cra_priority +
poly->base.cra_priority) / 2;
inst->alg.base.cra_blocksize = 1;
Index: linux-2.6/crypto/cryptd.c
===================================================================
--- linux-2.6.orig/crypto/cryptd.c
+++ linux-2.6/crypto/cryptd.c
@@ -396,6 +396,7 @@ static int cryptd_create_skcipher(struct
goto err_free_inst;

inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC |
+ (alg->base.cra_flags & CRYPTO_ALG_ALLOCATES_MEMORY) |
(alg->base.cra_flags & CRYPTO_ALG_INTERNAL);

inst->alg.ivsize = crypto_skcipher_alg_ivsize(alg);
@@ -663,7 +664,8 @@ static int cryptd_create_hash(struct cry

inst->alg.halg.base.cra_flags = CRYPTO_ALG_ASYNC |
(alg->base.cra_flags & (CRYPTO_ALG_INTERNAL |
- CRYPTO_ALG_OPTIONAL_KEY));
+ CRYPTO_ALG_OPTIONAL_KEY |
+ CRYPTO_ALG_ALLOCATES_MEMORY));

inst->alg.halg.digestsize = alg->digestsize;
inst->alg.halg.statesize = alg->statesize;
@@ -849,7 +851,8 @@ static int cryptd_create_aead(struct cry
goto err_free_inst;

inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC |
- (alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
+ (alg->base.cra_flags & CRYPTO_ALG_INTERNAL) |
+ (alg->base.cra_flags & CRYPTO_ALG_ALLOCATES_MEMORY);
inst->alg.base.cra_ctxsize = sizeof(struct cryptd_aead_ctx);

inst->alg.ivsize = crypto_aead_alg_ivsize(alg);
Index: linux-2.6/crypto/ctr.c
===================================================================
--- linux-2.6.orig/crypto/ctr.c
+++ linux-2.6/crypto/ctr.c
@@ -276,6 +276,7 @@ static int crypto_rfc3686_create(struct
return -ENOMEM;

mask = crypto_requires_sync(algt->type, algt->mask) |
+ crypto_requires_nomem(algt->type, algt->mask) |
crypto_requires_off(algt->type, algt->mask,
CRYPTO_ALG_NEED_FALLBACK);

@@ -310,7 +311,7 @@ static int crypto_rfc3686_create(struct
inst->alg.base.cra_blocksize = 1;
inst->alg.base.cra_alignmask = alg->base.cra_alignmask;

- inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
+ inst->alg.base.cra_flags = alg->base.cra_flags & (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY);

inst->alg.ivsize = CTR_RFC3686_IV_SIZE;
inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
Index: linux-2.6/crypto/cts.c
===================================================================
--- linux-2.6.orig/crypto/cts.c
+++ linux-2.6/crypto/cts.c
@@ -337,7 +337,8 @@ static int crypto_cts_create(struct cryp
if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask)
return -EINVAL;

- mask = crypto_requires_sync(algt->type, algt->mask);
+ mask = crypto_requires_sync(algt->type, algt->mask) |
+ crypto_requires_nomem(algt->type, algt->mask);

inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
if (!inst)
@@ -364,7 +365,7 @@ static int crypto_cts_create(struct cryp
if (err)
goto err_free_inst;

- inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
+ inst->alg.base.cra_flags = alg->base.cra_flags & (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY);
inst->alg.base.cra_priority = alg->base.cra_priority;
inst->alg.base.cra_blocksize = alg->base.cra_blocksize;
inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
Index: linux-2.6/crypto/essiv.c
===================================================================
--- linux-2.6.orig/crypto/essiv.c
+++ linux-2.6/crypto/essiv.c
@@ -468,7 +468,8 @@ static int essiv_create(struct crypto_te
return PTR_ERR(shash_name);

type = algt->type & algt->mask;
- mask = crypto_requires_sync(algt->type, algt->mask);
+ mask = crypto_requires_sync(algt->type, algt->mask) |
+ crypto_requires_nomem(algt->type, algt->mask);

switch (type) {
case CRYPTO_ALG_TYPE_SKCIPHER:
@@ -559,7 +560,7 @@ static int essiv_create(struct crypto_te
hash_alg->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
goto out_free_hash;

- base->cra_flags = block_base->cra_flags & CRYPTO_ALG_ASYNC;
+ base->cra_flags = block_base->cra_flags & (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY);
base->cra_blocksize = block_base->cra_blocksize;
base->cra_ctxsize = sizeof(struct essiv_tfm_ctx);
base->cra_alignmask = block_base->cra_alignmask;
Index: linux-2.6/crypto/gcm.c
===================================================================
--- linux-2.6.orig/crypto/gcm.c
+++ linux-2.6/crypto/gcm.c
@@ -593,7 +593,8 @@ static int crypto_gcm_create_common(stru
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
return -EINVAL;

- mask = crypto_requires_sync(algt->type, algt->mask);
+ mask = crypto_requires_sync(algt->type, algt->mask) |
+ crypto_requires_nomem(algt->type, algt->mask);

inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
if (!inst)
@@ -636,7 +637,7 @@ static int crypto_gcm_create_common(stru
goto err_free_inst;

inst->alg.base.cra_flags = (ghash->base.cra_flags |
- ctr->base.cra_flags) & CRYPTO_ALG_ASYNC;
+ ctr->base.cra_flags) & (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY);
inst->alg.base.cra_priority = (ghash->base.cra_priority +
ctr->base.cra_priority) / 2;
inst->alg.base.cra_blocksize = 1;
@@ -849,7 +850,8 @@ static int crypto_rfc4106_create(struct
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
return -EINVAL;

- mask = crypto_requires_sync(algt->type, algt->mask);
+ mask = crypto_requires_sync(algt->type, algt->mask) |
+ crypto_requires_nomem(algt->type, algt->mask);

inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
if (!inst)
@@ -882,7 +884,7 @@ static int crypto_rfc4106_create(struct
CRYPTO_MAX_ALG_NAME)
goto err_free_inst;

- inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
+ inst->alg.base.cra_flags = alg->base.cra_flags & (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY);
inst->alg.base.cra_priority = alg->base.cra_priority;
inst->alg.base.cra_blocksize = 1;
inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
@@ -1071,7 +1073,8 @@ static int crypto_rfc4543_create(struct
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
return -EINVAL;

- mask = crypto_requires_sync(algt->type, algt->mask);
+ mask = crypto_requires_sync(algt->type, algt->mask) |
+ crypto_requires_nomem(algt->type, algt->mask);

inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
if (!inst)
@@ -1104,7 +1107,7 @@ static int crypto_rfc4543_create(struct
CRYPTO_MAX_ALG_NAME)
goto err_free_inst;

- inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
+ inst->alg.base.cra_flags = alg->base.cra_flags & (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY);
inst->alg.base.cra_priority = alg->base.cra_priority;
inst->alg.base.cra_blocksize = 1;
inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
Index: linux-2.6/crypto/geniv.c
===================================================================
--- linux-2.6.orig/crypto/geniv.c
+++ linux-2.6/crypto/geniv.c
@@ -64,6 +64,7 @@ struct aead_instance *aead_geniv_alloc(s

/* Ignore async algorithms if necessary. */
mask |= crypto_requires_sync(algt->type, algt->mask);
+ mask |= crypto_requires_nomem(algt->type, algt->mask);

err = crypto_grab_aead(spawn, aead_crypto_instance(inst),
crypto_attr_alg_name(tb[1]), type, mask);
@@ -89,7 +90,7 @@ struct aead_instance *aead_geniv_alloc(s
CRYPTO_MAX_ALG_NAME)
goto err_free_inst;

- inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
+ inst->alg.base.cra_flags = alg->base.cra_flags & (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY);
inst->alg.base.cra_priority = alg->base.cra_priority;
inst->alg.base.cra_blocksize = alg->base.cra_blocksize;
inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
Index: linux-2.6/crypto/lrw.c
===================================================================
--- linux-2.6.orig/crypto/lrw.c
+++ linux-2.6/crypto/lrw.c
@@ -311,7 +311,8 @@ static int create(struct crypto_template
if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask)
return -EINVAL;

- mask = crypto_requires_sync(algt->type, algt->mask);
+ mask = crypto_requires_sync(algt->type, algt->mask) |
+ crypto_requires_nomem(algt->type, algt->mask);

cipher_name = crypto_attr_alg_name(tb[1]);
if (IS_ERR(cipher_name))
@@ -379,7 +380,7 @@ static int create(struct crypto_template
} else
goto err_free_inst;

- inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
+ inst->alg.base.cra_flags = alg->base.cra_flags & (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY);
inst->alg.base.cra_priority = alg->base.cra_priority;
inst->alg.base.cra_blocksize = LRW_BLOCK_SIZE;
inst->alg.base.cra_alignmask = alg->base.cra_alignmask |
Index: linux-2.6/crypto/rsa-pkcs1pad.c
===================================================================
--- linux-2.6.orig/crypto/rsa-pkcs1pad.c
+++ linux-2.6/crypto/rsa-pkcs1pad.c
@@ -611,7 +611,8 @@ static int pkcs1pad_create(struct crypto
if ((algt->type ^ CRYPTO_ALG_TYPE_AKCIPHER) & algt->mask)
return -EINVAL;

- mask = crypto_requires_sync(algt->type, algt->mask);
+ mask = crypto_requires_sync(algt->type, algt->mask) |
+ crypto_requires_nomem(algt->type, algt->mask);

inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
if (!inst)
@@ -658,7 +659,7 @@ static int pkcs1pad_create(struct crypto
goto err_free_inst;
}

- inst->alg.base.cra_flags = rsa_alg->base.cra_flags & CRYPTO_ALG_ASYNC;
+ inst->alg.base.cra_flags = rsa_alg->base.cra_flags & (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY);
inst->alg.base.cra_priority = rsa_alg->base.cra_priority;
inst->alg.base.cra_ctxsize = sizeof(struct pkcs1pad_ctx);

Index: linux-2.6/crypto/xts.c
===================================================================
--- linux-2.6.orig/crypto/xts.c
+++ linux-2.6/crypto/xts.c
@@ -415,7 +415,7 @@ static int create(struct crypto_template
} else
goto err_free_inst;

- inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
+ inst->alg.base.cra_flags = alg->base.cra_flags & (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY);
inst->alg.base.cra_priority = alg->base.cra_priority;
inst->alg.base.cra_blocksize = XTS_BLOCK_SIZE;
inst->alg.base.cra_alignmask = alg->base.cra_alignmask |
Index: linux-2.6/include/crypto/algapi.h
===================================================================
--- linux-2.6.orig/include/crypto/algapi.h
+++ linux-2.6/include/crypto/algapi.h
@@ -247,6 +247,15 @@ static inline int crypto_requires_sync(u
return crypto_requires_off(type, mask, CRYPTO_ALG_ASYNC);
}

+/*
+ * Returns CRYPTO_ALG_ALLOCATES_MEMORY if type/mask requires the use of sync algorithms.
+ * Otherwise returns zero.
+ */
+static inline int crypto_requires_nomem(u32 type, u32 mask)
+{
+ return crypto_requires_off(type, mask, CRYPTO_ALG_ALLOCATES_MEMORY);
+}
+
noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size);

/**
Index: linux-2.6/crypto/adiantum.c
===================================================================
--- linux-2.6.orig/crypto/adiantum.c
+++ linux-2.6/crypto/adiantum.c
@@ -507,7 +507,8 @@ static int adiantum_create(struct crypto
if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask)
return -EINVAL;

- mask = crypto_requires_sync(algt->type, algt->mask);
+ mask = crypto_requires_sync(algt->type, algt->mask) |
+ crypto_requires_nomem(algt->type, algt->mask);

inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
if (!inst)