crypto: crypto4xx - use sync skcipher for fallback

This replaces struct crypto_skcipher and the extra request size
with struct crypto_sync_skcipher and SYNC_SKCIPHER_REQUEST_ON_STACK(),
which uses a fixed stack size.

Signed-off-by: Christian Lamparter <chunkeey@gmail.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
Christian Lamparter 2019-04-22 13:26:00 +02:00 committed by Herbert Xu
parent 7e92e1717e
commit 9848e4c873
3 changed files with 10 additions and 15 deletions

View File

@ -264,10 +264,10 @@ crypto4xx_ctr_crypt(struct skcipher_request *req, bool encrypt)
* overlow.
*/
if (counter + nblks < counter) {
struct skcipher_request *subreq = skcipher_request_ctx(req);
SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->sw_cipher.cipher);
int ret;
skcipher_request_set_tfm(subreq, ctx->sw_cipher.cipher);
skcipher_request_set_sync_tfm(subreq, ctx->sw_cipher.cipher);
skcipher_request_set_callback(subreq, req->base.flags,
NULL, NULL);
skcipher_request_set_crypt(subreq, req->src, req->dst,
@ -289,14 +289,14 @@ static int crypto4xx_sk_setup_fallback(struct crypto4xx_ctx *ctx,
{
int rc;
crypto_skcipher_clear_flags(ctx->sw_cipher.cipher,
crypto_sync_skcipher_clear_flags(ctx->sw_cipher.cipher,
CRYPTO_TFM_REQ_MASK);
crypto_skcipher_set_flags(ctx->sw_cipher.cipher,
crypto_sync_skcipher_set_flags(ctx->sw_cipher.cipher,
crypto_skcipher_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
rc = crypto_skcipher_setkey(ctx->sw_cipher.cipher, key, keylen);
rc = crypto_sync_skcipher_setkey(ctx->sw_cipher.cipher, key, keylen);
crypto_skcipher_clear_flags(cipher, CRYPTO_TFM_RES_MASK);
crypto_skcipher_set_flags(cipher,
crypto_skcipher_get_flags(ctx->sw_cipher.cipher) &
crypto_sync_skcipher_get_flags(ctx->sw_cipher.cipher) &
CRYPTO_TFM_RES_MASK);
return rc;

View File

@ -965,15 +965,10 @@ static int crypto4xx_sk_init(struct crypto_skcipher *sk)
if (alg->base.cra_flags & CRYPTO_ALG_NEED_FALLBACK) {
ctx->sw_cipher.cipher =
crypto_alloc_skcipher(alg->base.cra_name, 0,
CRYPTO_ALG_NEED_FALLBACK |
CRYPTO_ALG_ASYNC);
crypto_alloc_sync_skcipher(alg->base.cra_name, 0,
CRYPTO_ALG_NEED_FALLBACK);
if (IS_ERR(ctx->sw_cipher.cipher))
return PTR_ERR(ctx->sw_cipher.cipher);
crypto_skcipher_set_reqsize(sk,
sizeof(struct skcipher_request) + 32 +
crypto_skcipher_reqsize(ctx->sw_cipher.cipher));
}
amcc_alg = container_of(alg, struct crypto4xx_alg, alg.u.cipher);
@ -992,7 +987,7 @@ static void crypto4xx_sk_exit(struct crypto_skcipher *sk)
crypto4xx_common_exit(ctx);
if (ctx->sw_cipher.cipher)
crypto_free_skcipher(ctx->sw_cipher.cipher);
crypto_free_sync_skcipher(ctx->sw_cipher.cipher);
}
static int crypto4xx_aead_init(struct crypto_aead *tfm)

View File

@ -131,7 +131,7 @@ struct crypto4xx_ctx {
__le32 iv_nonce;
u32 sa_len;
union {
struct crypto_skcipher *cipher;
struct crypto_sync_skcipher *cipher;
struct crypto_aead *aead;
} sw_cipher;
};