[PATCH v2 7/9] crypto: virtio: Add IV buffer in structure virtio_crypto_sym_request

From: Bibo Mao

Date: Thu Dec 04 2025 - 06:25:18 EST


Add IV buffer in structure virtio_crypto_sym_request to avoid unnecessary
IV buffer allocation in encrypt/decrypt process. And IV buffer is cleared
when encrypt/decrypt is finished.

Signed-off-by: Bibo Mao <maobibo@xxxxxxxxxxx>
---
.../virtio/virtio_crypto_skcipher_algs.c | 20 +++++++------------
1 file changed, 7 insertions(+), 13 deletions(-)

diff --git a/drivers/crypto/virtio/virtio_crypto_skcipher_algs.c b/drivers/crypto/virtio/virtio_crypto_skcipher_algs.c
index a7c7c726e6d9..c911b7ba8f13 100644
--- a/drivers/crypto/virtio/virtio_crypto_skcipher_algs.c
+++ b/drivers/crypto/virtio/virtio_crypto_skcipher_algs.c
@@ -30,9 +30,9 @@ struct virtio_crypto_sym_request {

/* Cipher or aead */
uint32_t type;
- uint8_t *iv;
/* Encryption? */
bool encrypt;
+ uint8_t iv[0];
};

struct virtio_crypto_algo {
@@ -402,12 +402,7 @@ __virtio_crypto_skcipher_do_req(struct virtio_crypto_sym_request *vc_sym_req,
* Avoid to do DMA from the stack, switch to using
* dynamically-allocated for the IV
*/
- iv = kzalloc_node(ivsize, GFP_ATOMIC,
- dev_to_node(&vcrypto->vdev->dev));
- if (!iv) {
- err = -ENOMEM;
- goto free;
- }
+ iv = vc_sym_req->iv;
memcpy(iv, req->iv, ivsize);
if (!vc_sym_req->encrypt)
scatterwalk_map_and_copy(req->iv, req->src,
@@ -416,7 +411,6 @@ __virtio_crypto_skcipher_do_req(struct virtio_crypto_sym_request *vc_sym_req,

sg_init_one(&iv_sg, iv, ivsize);
sgs[num_out++] = &iv_sg;
- vc_sym_req->iv = iv;

/* Source data */
for (sg = req->src; src_nents; sg = sg_next(sg), src_nents--)
@@ -438,12 +432,10 @@ __virtio_crypto_skcipher_do_req(struct virtio_crypto_sym_request *vc_sym_req,
virtqueue_kick(data_vq->vq);
spin_unlock_irqrestore(&data_vq->lock, flags);
if (unlikely(err < 0))
- goto free_iv;
+ goto free;

return 0;

-free_iv:
- kfree_sensitive(iv);
free:
kfree(sgs);
return err;
@@ -501,8 +493,10 @@ static int virtio_crypto_skcipher_init(struct crypto_skcipher *tfm)
{
struct virtio_crypto_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
+ int size;

- crypto_skcipher_set_reqsize(tfm, sizeof(struct virtio_crypto_sym_request));
+ size = sizeof(struct virtio_crypto_sym_request) + crypto_skcipher_ivsize(tfm);
+ crypto_skcipher_set_reqsize(tfm, size);
ctx->alg = container_of(alg, struct virtio_crypto_algo, algo.base);

return 0;
@@ -552,7 +546,7 @@ static void virtio_crypto_skcipher_finalize_req(
scatterwalk_map_and_copy(req->iv, req->dst,
req->cryptlen - ivsize,
ivsize, 0);
- kfree_sensitive(vc_sym_req->iv);
+ memzero_explicit(vc_sym_req->iv, ivsize);
virtcrypto_clear_request(&vc_sym_req->base);

crypto_finalize_skcipher_request(vc_sym_req->base.dataq->engine,
--
2.39.3