mirror of
https://github.com/torvalds/linux.git
synced 2024-11-24 21:21:41 +00:00
crypto: sahara - avoid skcipher fallback code duplication
Factor out duplicated skcipher fallback handling code to a helper function sahara_aes_fallback(). Also, keep a single check if fallback is required in sahara_aes_crypt(). Signed-off-by: Ovidiu Panait <ovidiu.panait@windriver.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
parent
3d5a31dff1
commit
01d70a4bbf
@ -646,12 +646,37 @@ static int sahara_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
|
||||
return crypto_skcipher_setkey(ctx->fallback, key, keylen);
|
||||
}
|
||||
|
||||
static int sahara_aes_fallback(struct skcipher_request *req, unsigned long mode)
|
||||
{
|
||||
struct sahara_aes_reqctx *rctx = skcipher_request_ctx(req);
|
||||
struct sahara_ctx *ctx = crypto_skcipher_ctx(
|
||||
crypto_skcipher_reqtfm(req));
|
||||
|
||||
skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback);
|
||||
skcipher_request_set_callback(&rctx->fallback_req,
|
||||
req->base.flags,
|
||||
req->base.complete,
|
||||
req->base.data);
|
||||
skcipher_request_set_crypt(&rctx->fallback_req, req->src,
|
||||
req->dst, req->cryptlen, req->iv);
|
||||
|
||||
if (mode & FLAGS_ENCRYPT)
|
||||
return crypto_skcipher_encrypt(&rctx->fallback_req);
|
||||
|
||||
return crypto_skcipher_decrypt(&rctx->fallback_req);
|
||||
}
|
||||
|
||||
static int sahara_aes_crypt(struct skcipher_request *req, unsigned long mode)
|
||||
{
|
||||
struct sahara_aes_reqctx *rctx = skcipher_request_ctx(req);
|
||||
struct sahara_ctx *ctx = crypto_skcipher_ctx(
|
||||
crypto_skcipher_reqtfm(req));
|
||||
struct sahara_dev *dev = dev_ptr;
|
||||
int err = 0;
|
||||
|
||||
if (unlikely(ctx->keylen != AES_KEYSIZE_128))
|
||||
return sahara_aes_fallback(req, mode);
|
||||
|
||||
dev_dbg(dev->device, "nbytes: %d, enc: %d, cbc: %d\n",
|
||||
req->cryptlen, !!(mode & FLAGS_ENCRYPT), !!(mode & FLAGS_CBC));
|
||||
|
||||
@ -674,81 +699,21 @@ static int sahara_aes_crypt(struct skcipher_request *req, unsigned long mode)
|
||||
|
||||
static int sahara_aes_ecb_encrypt(struct skcipher_request *req)
|
||||
{
|
||||
struct sahara_aes_reqctx *rctx = skcipher_request_ctx(req);
|
||||
struct sahara_ctx *ctx = crypto_skcipher_ctx(
|
||||
crypto_skcipher_reqtfm(req));
|
||||
|
||||
if (unlikely(ctx->keylen != AES_KEYSIZE_128)) {
|
||||
skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback);
|
||||
skcipher_request_set_callback(&rctx->fallback_req,
|
||||
req->base.flags,
|
||||
req->base.complete,
|
||||
req->base.data);
|
||||
skcipher_request_set_crypt(&rctx->fallback_req, req->src,
|
||||
req->dst, req->cryptlen, req->iv);
|
||||
return crypto_skcipher_encrypt(&rctx->fallback_req);
|
||||
}
|
||||
|
||||
return sahara_aes_crypt(req, FLAGS_ENCRYPT);
|
||||
}
|
||||
|
||||
static int sahara_aes_ecb_decrypt(struct skcipher_request *req)
|
||||
{
|
||||
struct sahara_aes_reqctx *rctx = skcipher_request_ctx(req);
|
||||
struct sahara_ctx *ctx = crypto_skcipher_ctx(
|
||||
crypto_skcipher_reqtfm(req));
|
||||
|
||||
if (unlikely(ctx->keylen != AES_KEYSIZE_128)) {
|
||||
skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback);
|
||||
skcipher_request_set_callback(&rctx->fallback_req,
|
||||
req->base.flags,
|
||||
req->base.complete,
|
||||
req->base.data);
|
||||
skcipher_request_set_crypt(&rctx->fallback_req, req->src,
|
||||
req->dst, req->cryptlen, req->iv);
|
||||
return crypto_skcipher_decrypt(&rctx->fallback_req);
|
||||
}
|
||||
|
||||
return sahara_aes_crypt(req, 0);
|
||||
}
|
||||
|
||||
static int sahara_aes_cbc_encrypt(struct skcipher_request *req)
|
||||
{
|
||||
struct sahara_aes_reqctx *rctx = skcipher_request_ctx(req);
|
||||
struct sahara_ctx *ctx = crypto_skcipher_ctx(
|
||||
crypto_skcipher_reqtfm(req));
|
||||
|
||||
if (unlikely(ctx->keylen != AES_KEYSIZE_128)) {
|
||||
skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback);
|
||||
skcipher_request_set_callback(&rctx->fallback_req,
|
||||
req->base.flags,
|
||||
req->base.complete,
|
||||
req->base.data);
|
||||
skcipher_request_set_crypt(&rctx->fallback_req, req->src,
|
||||
req->dst, req->cryptlen, req->iv);
|
||||
return crypto_skcipher_encrypt(&rctx->fallback_req);
|
||||
}
|
||||
|
||||
return sahara_aes_crypt(req, FLAGS_ENCRYPT | FLAGS_CBC);
|
||||
}
|
||||
|
||||
static int sahara_aes_cbc_decrypt(struct skcipher_request *req)
|
||||
{
|
||||
struct sahara_aes_reqctx *rctx = skcipher_request_ctx(req);
|
||||
struct sahara_ctx *ctx = crypto_skcipher_ctx(
|
||||
crypto_skcipher_reqtfm(req));
|
||||
|
||||
if (unlikely(ctx->keylen != AES_KEYSIZE_128)) {
|
||||
skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback);
|
||||
skcipher_request_set_callback(&rctx->fallback_req,
|
||||
req->base.flags,
|
||||
req->base.complete,
|
||||
req->base.data);
|
||||
skcipher_request_set_crypt(&rctx->fallback_req, req->src,
|
||||
req->dst, req->cryptlen, req->iv);
|
||||
return crypto_skcipher_decrypt(&rctx->fallback_req);
|
||||
}
|
||||
|
||||
return sahara_aes_crypt(req, FLAGS_CBC);
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user