mirror of
https://github.com/torvalds/linux.git
synced 2024-12-02 09:01:34 +00:00
674f368a95
The CRYPTO_TFM_RES_BAD_KEY_LEN flag was apparently meant as a way to make the ->setkey() functions provide more information about errors. However, no one actually checks for this flag, which makes it pointless. Also, many algorithms fail to set this flag when given a bad length key. Reviewing just the generic implementations, this is the case for aes-fixed-time, cbcmac, echainiv, nhpoly1305, pcrypt, rfc3686, rfc4309, rfc7539, rfc7539esp, salsa20, seqiv, and xcbc. But there are probably many more in arch/*/crypto/ and drivers/crypto/. Some algorithms can even set this flag when the key is the correct length. For example, authenc and authencesn set it when the key payload is malformed in any way (not just a bad length), the atmel-sha and ccree drivers can set it if a memory allocation fails, and the chelsio driver sets it for bad auth tag lengths, not just bad key lengths. So even if someone actually wanted to start checking this flag (which seems unlikely, since it's been unused for a long time), there would be a lot of work needed to get it working correctly. But it would probably be much better to go back to the drawing board and just define different return values, like -EINVAL if the key is invalid for the algorithm vs. -EKEYREJECTED if the key was rejected by a policy like "no weak keys". That would be much simpler, less error-prone, and easier to test. So just remove this flag. Signed-off-by: Eric Biggers <ebiggers@google.com> Reviewed-by: Horia Geantă <horia.geanta@nxp.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
298 lines
7.7 KiB
C
298 lines
7.7 KiB
C
// SPDX-License-Identifier: GPL-2.0-or-later
|
|
/*
|
|
* Glue Code for the AVX assembler implementation of the Cast6 Cipher
|
|
*
|
|
* Copyright (C) 2012 Johannes Goetzfried
|
|
* <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
|
|
*
|
|
* Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
|
|
*/
|
|
|
|
#include <linux/module.h>
|
|
#include <linux/types.h>
|
|
#include <linux/crypto.h>
|
|
#include <linux/err.h>
|
|
#include <crypto/algapi.h>
|
|
#include <crypto/cast6.h>
|
|
#include <crypto/internal/simd.h>
|
|
#include <crypto/xts.h>
|
|
#include <asm/crypto/glue_helper.h>
|
|
|
|
#define CAST6_PARALLEL_BLOCKS 8
|
|
|
|
asmlinkage void cast6_ecb_enc_8way(const void *ctx, u8 *dst, const u8 *src);
|
|
asmlinkage void cast6_ecb_dec_8way(const void *ctx, u8 *dst, const u8 *src);
|
|
|
|
asmlinkage void cast6_cbc_dec_8way(const void *ctx, u8 *dst, const u8 *src);
|
|
asmlinkage void cast6_ctr_8way(const void *ctx, u8 *dst, const u8 *src,
|
|
le128 *iv);
|
|
|
|
asmlinkage void cast6_xts_enc_8way(const void *ctx, u8 *dst, const u8 *src,
|
|
le128 *iv);
|
|
asmlinkage void cast6_xts_dec_8way(const void *ctx, u8 *dst, const u8 *src,
|
|
le128 *iv);
|
|
|
|
static int cast6_setkey_skcipher(struct crypto_skcipher *tfm,
|
|
const u8 *key, unsigned int keylen)
|
|
{
|
|
return cast6_setkey(&tfm->base, key, keylen);
|
|
}
|
|
|
|
static void cast6_xts_enc(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
|
|
{
|
|
glue_xts_crypt_128bit_one(ctx, dst, src, iv, __cast6_encrypt);
|
|
}
|
|
|
|
static void cast6_xts_dec(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
|
|
{
|
|
glue_xts_crypt_128bit_one(ctx, dst, src, iv, __cast6_decrypt);
|
|
}
|
|
|
|
static void cast6_crypt_ctr(const void *ctx, u8 *d, const u8 *s, le128 *iv)
|
|
{
|
|
be128 ctrblk;
|
|
u128 *dst = (u128 *)d;
|
|
const u128 *src = (const u128 *)s;
|
|
|
|
le128_to_be128(&ctrblk, iv);
|
|
le128_inc(iv);
|
|
|
|
__cast6_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
|
|
u128_xor(dst, src, (u128 *)&ctrblk);
|
|
}
|
|
|
|
static const struct common_glue_ctx cast6_enc = {
|
|
.num_funcs = 2,
|
|
.fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
|
|
|
|
.funcs = { {
|
|
.num_blocks = CAST6_PARALLEL_BLOCKS,
|
|
.fn_u = { .ecb = cast6_ecb_enc_8way }
|
|
}, {
|
|
.num_blocks = 1,
|
|
.fn_u = { .ecb = __cast6_encrypt }
|
|
} }
|
|
};
|
|
|
|
static const struct common_glue_ctx cast6_ctr = {
|
|
.num_funcs = 2,
|
|
.fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
|
|
|
|
.funcs = { {
|
|
.num_blocks = CAST6_PARALLEL_BLOCKS,
|
|
.fn_u = { .ctr = cast6_ctr_8way }
|
|
}, {
|
|
.num_blocks = 1,
|
|
.fn_u = { .ctr = cast6_crypt_ctr }
|
|
} }
|
|
};
|
|
|
|
static const struct common_glue_ctx cast6_enc_xts = {
|
|
.num_funcs = 2,
|
|
.fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
|
|
|
|
.funcs = { {
|
|
.num_blocks = CAST6_PARALLEL_BLOCKS,
|
|
.fn_u = { .xts = cast6_xts_enc_8way }
|
|
}, {
|
|
.num_blocks = 1,
|
|
.fn_u = { .xts = cast6_xts_enc }
|
|
} }
|
|
};
|
|
|
|
static const struct common_glue_ctx cast6_dec = {
|
|
.num_funcs = 2,
|
|
.fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
|
|
|
|
.funcs = { {
|
|
.num_blocks = CAST6_PARALLEL_BLOCKS,
|
|
.fn_u = { .ecb = cast6_ecb_dec_8way }
|
|
}, {
|
|
.num_blocks = 1,
|
|
.fn_u = { .ecb = __cast6_decrypt }
|
|
} }
|
|
};
|
|
|
|
static const struct common_glue_ctx cast6_dec_cbc = {
|
|
.num_funcs = 2,
|
|
.fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
|
|
|
|
.funcs = { {
|
|
.num_blocks = CAST6_PARALLEL_BLOCKS,
|
|
.fn_u = { .cbc = cast6_cbc_dec_8way }
|
|
}, {
|
|
.num_blocks = 1,
|
|
.fn_u = { .cbc = __cast6_decrypt }
|
|
} }
|
|
};
|
|
|
|
static const struct common_glue_ctx cast6_dec_xts = {
|
|
.num_funcs = 2,
|
|
.fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
|
|
|
|
.funcs = { {
|
|
.num_blocks = CAST6_PARALLEL_BLOCKS,
|
|
.fn_u = { .xts = cast6_xts_dec_8way }
|
|
}, {
|
|
.num_blocks = 1,
|
|
.fn_u = { .xts = cast6_xts_dec }
|
|
} }
|
|
};
|
|
|
|
static int ecb_encrypt(struct skcipher_request *req)
|
|
{
|
|
return glue_ecb_req_128bit(&cast6_enc, req);
|
|
}
|
|
|
|
static int ecb_decrypt(struct skcipher_request *req)
|
|
{
|
|
return glue_ecb_req_128bit(&cast6_dec, req);
|
|
}
|
|
|
|
static int cbc_encrypt(struct skcipher_request *req)
|
|
{
|
|
return glue_cbc_encrypt_req_128bit(__cast6_encrypt, req);
|
|
}
|
|
|
|
static int cbc_decrypt(struct skcipher_request *req)
|
|
{
|
|
return glue_cbc_decrypt_req_128bit(&cast6_dec_cbc, req);
|
|
}
|
|
|
|
static int ctr_crypt(struct skcipher_request *req)
|
|
{
|
|
return glue_ctr_req_128bit(&cast6_ctr, req);
|
|
}
|
|
|
|
struct cast6_xts_ctx {
|
|
struct cast6_ctx tweak_ctx;
|
|
struct cast6_ctx crypt_ctx;
|
|
};
|
|
|
|
static int xts_cast6_setkey(struct crypto_skcipher *tfm, const u8 *key,
|
|
unsigned int keylen)
|
|
{
|
|
struct cast6_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
|
|
int err;
|
|
|
|
err = xts_verify_key(tfm, key, keylen);
|
|
if (err)
|
|
return err;
|
|
|
|
/* first half of xts-key is for crypt */
|
|
err = __cast6_setkey(&ctx->crypt_ctx, key, keylen / 2);
|
|
if (err)
|
|
return err;
|
|
|
|
/* second half of xts-key is for tweak */
|
|
return __cast6_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2);
|
|
}
|
|
|
|
static int xts_encrypt(struct skcipher_request *req)
|
|
{
|
|
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
|
struct cast6_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
|
|
|
|
return glue_xts_req_128bit(&cast6_enc_xts, req, __cast6_encrypt,
|
|
&ctx->tweak_ctx, &ctx->crypt_ctx, false);
|
|
}
|
|
|
|
static int xts_decrypt(struct skcipher_request *req)
|
|
{
|
|
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
|
struct cast6_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
|
|
|
|
return glue_xts_req_128bit(&cast6_dec_xts, req, __cast6_encrypt,
|
|
&ctx->tweak_ctx, &ctx->crypt_ctx, true);
|
|
}
|
|
|
|
static struct skcipher_alg cast6_algs[] = {
|
|
{
|
|
.base.cra_name = "__ecb(cast6)",
|
|
.base.cra_driver_name = "__ecb-cast6-avx",
|
|
.base.cra_priority = 200,
|
|
.base.cra_flags = CRYPTO_ALG_INTERNAL,
|
|
.base.cra_blocksize = CAST6_BLOCK_SIZE,
|
|
.base.cra_ctxsize = sizeof(struct cast6_ctx),
|
|
.base.cra_module = THIS_MODULE,
|
|
.min_keysize = CAST6_MIN_KEY_SIZE,
|
|
.max_keysize = CAST6_MAX_KEY_SIZE,
|
|
.setkey = cast6_setkey_skcipher,
|
|
.encrypt = ecb_encrypt,
|
|
.decrypt = ecb_decrypt,
|
|
}, {
|
|
.base.cra_name = "__cbc(cast6)",
|
|
.base.cra_driver_name = "__cbc-cast6-avx",
|
|
.base.cra_priority = 200,
|
|
.base.cra_flags = CRYPTO_ALG_INTERNAL,
|
|
.base.cra_blocksize = CAST6_BLOCK_SIZE,
|
|
.base.cra_ctxsize = sizeof(struct cast6_ctx),
|
|
.base.cra_module = THIS_MODULE,
|
|
.min_keysize = CAST6_MIN_KEY_SIZE,
|
|
.max_keysize = CAST6_MAX_KEY_SIZE,
|
|
.ivsize = CAST6_BLOCK_SIZE,
|
|
.setkey = cast6_setkey_skcipher,
|
|
.encrypt = cbc_encrypt,
|
|
.decrypt = cbc_decrypt,
|
|
}, {
|
|
.base.cra_name = "__ctr(cast6)",
|
|
.base.cra_driver_name = "__ctr-cast6-avx",
|
|
.base.cra_priority = 200,
|
|
.base.cra_flags = CRYPTO_ALG_INTERNAL,
|
|
.base.cra_blocksize = 1,
|
|
.base.cra_ctxsize = sizeof(struct cast6_ctx),
|
|
.base.cra_module = THIS_MODULE,
|
|
.min_keysize = CAST6_MIN_KEY_SIZE,
|
|
.max_keysize = CAST6_MAX_KEY_SIZE,
|
|
.ivsize = CAST6_BLOCK_SIZE,
|
|
.chunksize = CAST6_BLOCK_SIZE,
|
|
.setkey = cast6_setkey_skcipher,
|
|
.encrypt = ctr_crypt,
|
|
.decrypt = ctr_crypt,
|
|
}, {
|
|
.base.cra_name = "__xts(cast6)",
|
|
.base.cra_driver_name = "__xts-cast6-avx",
|
|
.base.cra_priority = 200,
|
|
.base.cra_flags = CRYPTO_ALG_INTERNAL,
|
|
.base.cra_blocksize = CAST6_BLOCK_SIZE,
|
|
.base.cra_ctxsize = sizeof(struct cast6_xts_ctx),
|
|
.base.cra_module = THIS_MODULE,
|
|
.min_keysize = 2 * CAST6_MIN_KEY_SIZE,
|
|
.max_keysize = 2 * CAST6_MAX_KEY_SIZE,
|
|
.ivsize = CAST6_BLOCK_SIZE,
|
|
.setkey = xts_cast6_setkey,
|
|
.encrypt = xts_encrypt,
|
|
.decrypt = xts_decrypt,
|
|
},
|
|
};
|
|
|
|
static struct simd_skcipher_alg *cast6_simd_algs[ARRAY_SIZE(cast6_algs)];
|
|
|
|
static int __init cast6_init(void)
|
|
{
|
|
const char *feature_name;
|
|
|
|
if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
|
|
&feature_name)) {
|
|
pr_info("CPU feature '%s' is not supported.\n", feature_name);
|
|
return -ENODEV;
|
|
}
|
|
|
|
return simd_register_skciphers_compat(cast6_algs,
|
|
ARRAY_SIZE(cast6_algs),
|
|
cast6_simd_algs);
|
|
}
|
|
|
|
static void __exit cast6_exit(void)
|
|
{
|
|
simd_unregister_skciphers(cast6_algs, ARRAY_SIZE(cast6_algs),
|
|
cast6_simd_algs);
|
|
}
|
|
|
|
module_init(cast6_init);
|
|
module_exit(cast6_exit);
|
|
|
|
MODULE_DESCRIPTION("Cast6 Cipher Algorithm, AVX optimized");
|
|
MODULE_LICENSE("GPL");
|
|
MODULE_ALIAS_CRYPTO("cast6");
|