crypto: skcipher - Use restrict rather than hand-rolling accesses

Rather than accessing 'alg' directly to avoid the aliasing issue
which leads to unnecessary reloads, use the __restrict keyword
to explicitly tell the compiler that there is no aliasing.

This generates equivalent if not superior code on x86 with gcc 12.

Note that in skcipher_walk_virt the alg assignment is moved after
might_sleep_if because that function is a compiler barrier and
forces a reload.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
Herbert Xu 2025-02-23 14:27:51 +08:00
parent 006401d29a
commit f79d2d2852
2 changed files with 31 additions and 34 deletions

View file

@ -293,14 +293,16 @@ static int skcipher_walk_first(struct skcipher_walk *walk)
return skcipher_walk_next(walk);
}
int skcipher_walk_virt(struct skcipher_walk *walk,
struct skcipher_request *req, bool atomic)
int skcipher_walk_virt(struct skcipher_walk *__restrict walk,
struct skcipher_request *__restrict req, bool atomic)
{
const struct skcipher_alg *alg =
crypto_skcipher_alg(crypto_skcipher_reqtfm(req));
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct skcipher_alg *alg;
might_sleep_if(req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP);
alg = crypto_skcipher_alg(tfm);
walk->total = req->cryptlen;
walk->nbytes = 0;
walk->iv = req->iv;
@ -316,14 +318,9 @@ int skcipher_walk_virt(struct skcipher_walk *walk,
scatterwalk_start(&walk->in, req->src);
scatterwalk_start(&walk->out, req->dst);
/*
* Accessing 'alg' directly generates better code than using the
* crypto_skcipher_blocksize() and similar helper functions here, as it
* prevents the algorithm pointer from being repeatedly reloaded.
*/
walk->blocksize = alg->base.cra_blocksize;
walk->ivsize = alg->co.ivsize;
walk->alignmask = alg->base.cra_alignmask;
walk->blocksize = crypto_skcipher_blocksize(tfm);
walk->ivsize = crypto_skcipher_ivsize(tfm);
walk->alignmask = crypto_skcipher_alignmask(tfm);
if (alg->co.base.cra_type != &crypto_skcipher_type)
walk->stride = alg->co.chunksize;
@ -334,10 +331,11 @@ int skcipher_walk_virt(struct skcipher_walk *walk,
}
EXPORT_SYMBOL_GPL(skcipher_walk_virt);
static int skcipher_walk_aead_common(struct skcipher_walk *walk,
struct aead_request *req, bool atomic)
static int skcipher_walk_aead_common(struct skcipher_walk *__restrict walk,
struct aead_request *__restrict req,
bool atomic)
{
const struct aead_alg *alg = crypto_aead_alg(crypto_aead_reqtfm(req));
struct crypto_aead *tfm = crypto_aead_reqtfm(req);
walk->nbytes = 0;
walk->iv = req->iv;
@ -353,21 +351,17 @@ static int skcipher_walk_aead_common(struct skcipher_walk *walk,
scatterwalk_start_at_pos(&walk->in, req->src, req->assoclen);
scatterwalk_start_at_pos(&walk->out, req->dst, req->assoclen);
/*
* Accessing 'alg' directly generates better code than using the
* crypto_aead_blocksize() and similar helper functions here, as it
* prevents the algorithm pointer from being repeatedly reloaded.
*/
walk->blocksize = alg->base.cra_blocksize;
walk->stride = alg->chunksize;
walk->ivsize = alg->ivsize;
walk->alignmask = alg->base.cra_alignmask;
walk->blocksize = crypto_aead_blocksize(tfm);
walk->stride = crypto_aead_chunksize(tfm);
walk->ivsize = crypto_aead_ivsize(tfm);
walk->alignmask = crypto_aead_alignmask(tfm);
return skcipher_walk_first(walk);
}
int skcipher_walk_aead_encrypt(struct skcipher_walk *walk,
struct aead_request *req, bool atomic)
int skcipher_walk_aead_encrypt(struct skcipher_walk *__restrict walk,
struct aead_request *__restrict req,
bool atomic)
{
walk->total = req->cryptlen;
@ -375,8 +369,9 @@ int skcipher_walk_aead_encrypt(struct skcipher_walk *walk,
}
EXPORT_SYMBOL_GPL(skcipher_walk_aead_encrypt);
int skcipher_walk_aead_decrypt(struct skcipher_walk *walk,
struct aead_request *req, bool atomic)
int skcipher_walk_aead_decrypt(struct skcipher_walk *__restrict walk,
struct aead_request *__restrict req,
bool atomic)
{
struct crypto_aead *tfm = crypto_aead_reqtfm(req);

View file

@ -197,13 +197,15 @@ int lskcipher_register_instance(struct crypto_template *tmpl,
struct lskcipher_instance *inst);
int skcipher_walk_done(struct skcipher_walk *walk, int res);
int skcipher_walk_virt(struct skcipher_walk *walk,
struct skcipher_request *req,
int skcipher_walk_virt(struct skcipher_walk *__restrict walk,
struct skcipher_request *__restrict req,
bool atomic);
int skcipher_walk_aead_encrypt(struct skcipher_walk *walk,
struct aead_request *req, bool atomic);
int skcipher_walk_aead_decrypt(struct skcipher_walk *walk,
struct aead_request *req, bool atomic);
int skcipher_walk_aead_encrypt(struct skcipher_walk *__restrict walk,
struct aead_request *__restrict req,
bool atomic);
int skcipher_walk_aead_decrypt(struct skcipher_walk *__restrict walk,
struct aead_request *__restrict req,
bool atomic);
static inline void skcipher_walk_abort(struct skcipher_walk *walk)
{