diff options
author | Herbert Xu <herbert@gondor.apana.org.au> | 2025-02-23 14:27:51 +0800 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2025-03-02 15:21:47 +0800 |
commit | f79d2d2852facc72b91a78e5c423722c7dc53d72 (patch) | |
tree | 54930887abed53572f0bf898adeedc33f56bd5e5 /crypto/skcipher.c | |
parent | 006401d29a5cc3774b035e933ca6f063134b8433 (diff) |
crypto: skcipher - Use restrict rather than hand-rolling accesses
Rather than accessing 'alg' directly to avoid the aliasing issue
which leads to unnecessary reloads, use the __restrict keyword
to explicitly tell the compiler that there is no aliasing.
This generates equivalent if not superior code on x86 with gcc 12.
Note that in skcipher_walk_virt the alg assignment is moved after
might_sleep_if because that function is a compiler barrier and
forces a reload.
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto/skcipher.c')
-rw-r--r-- | crypto/skcipher.c | 51 |
1 files changed, 23 insertions, 28 deletions
diff --git a/crypto/skcipher.c b/crypto/skcipher.c index b0e1f3c12cde..53123d3685d5 100644 --- a/crypto/skcipher.c +++ b/crypto/skcipher.c @@ -293,14 +293,16 @@ static int skcipher_walk_first(struct skcipher_walk *walk) return skcipher_walk_next(walk); } -int skcipher_walk_virt(struct skcipher_walk *walk, - struct skcipher_request *req, bool atomic) +int skcipher_walk_virt(struct skcipher_walk *__restrict walk, + struct skcipher_request *__restrict req, bool atomic) { - const struct skcipher_alg *alg = - crypto_skcipher_alg(crypto_skcipher_reqtfm(req)); + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); + struct skcipher_alg *alg; might_sleep_if(req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP); + alg = crypto_skcipher_alg(tfm); + walk->total = req->cryptlen; walk->nbytes = 0; walk->iv = req->iv; @@ -316,14 +318,9 @@ int skcipher_walk_virt(struct skcipher_walk *walk, scatterwalk_start(&walk->in, req->src); scatterwalk_start(&walk->out, req->dst); - /* - * Accessing 'alg' directly generates better code than using the - * crypto_skcipher_blocksize() and similar helper functions here, as it - * prevents the algorithm pointer from being repeatedly reloaded. - */ - walk->blocksize = alg->base.cra_blocksize; - walk->ivsize = alg->co.ivsize; - walk->alignmask = alg->base.cra_alignmask; + walk->blocksize = crypto_skcipher_blocksize(tfm); + walk->ivsize = crypto_skcipher_ivsize(tfm); + walk->alignmask = crypto_skcipher_alignmask(tfm); if (alg->co.base.cra_type != &crypto_skcipher_type) walk->stride = alg->co.chunksize; @@ -334,10 +331,11 @@ int skcipher_walk_virt(struct skcipher_walk *walk, } EXPORT_SYMBOL_GPL(skcipher_walk_virt); -static int skcipher_walk_aead_common(struct skcipher_walk *walk, - struct aead_request *req, bool atomic) +static int skcipher_walk_aead_common(struct skcipher_walk *__restrict walk, + struct aead_request *__restrict req, + bool atomic) { - const struct aead_alg *alg = crypto_aead_alg(crypto_aead_reqtfm(req)); + struct crypto_aead *tfm = crypto_aead_reqtfm(req); walk->nbytes = 0; walk->iv = req->iv; @@ -353,21 +351,17 @@ static int skcipher_walk_aead_common(struct skcipher_walk *walk, scatterwalk_start_at_pos(&walk->in, req->src, req->assoclen); scatterwalk_start_at_pos(&walk->out, req->dst, req->assoclen); - /* - * Accessing 'alg' directly generates better code than using the - * crypto_aead_blocksize() and similar helper functions here, as it - * prevents the algorithm pointer from being repeatedly reloaded. - */ - walk->blocksize = alg->base.cra_blocksize; - walk->stride = alg->chunksize; - walk->ivsize = alg->ivsize; - walk->alignmask = alg->base.cra_alignmask; + walk->blocksize = crypto_aead_blocksize(tfm); + walk->stride = crypto_aead_chunksize(tfm); + walk->ivsize = crypto_aead_ivsize(tfm); + walk->alignmask = crypto_aead_alignmask(tfm); return skcipher_walk_first(walk); } -int skcipher_walk_aead_encrypt(struct skcipher_walk *walk, - struct aead_request *req, bool atomic) +int skcipher_walk_aead_encrypt(struct skcipher_walk *__restrict walk, + struct aead_request *__restrict req, + bool atomic) { walk->total = req->cryptlen; @@ -375,8 +369,9 @@ int skcipher_walk_aead_encrypt(struct skcipher_walk *walk, } EXPORT_SYMBOL_GPL(skcipher_walk_aead_encrypt); -int skcipher_walk_aead_decrypt(struct skcipher_walk *walk, - struct aead_request *req, bool atomic) +int skcipher_walk_aead_decrypt(struct skcipher_walk *__restrict walk, + struct aead_request *__restrict req, + bool atomic) { struct crypto_aead *tfm = crypto_aead_reqtfm(req); |