mirror of
https://github.com/torvalds/linux.git
synced 2024-11-27 06:31:52 +00:00
7fa885e2a2
ARMv8.2 specifies special instructions for the SM3 cryptographic hash
and the SM4 symmetric cipher. While it is unlikely that a core would
implement one and not the other, we should only use SM4 instructions
if the SM4 CPU feature bit is set, and we currently check the SM3
feature bit instead. So fix that.
Fixes: e99ce921c4
("crypto: arm64 - add support for SM4...")
Cc: <stable@vger.kernel.org>
Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
74 lines
1.8 KiB
C
74 lines
1.8 KiB
C
// SPDX-License-Identifier: GPL-2.0
|
|
|
|
#include <asm/neon.h>
|
|
#include <asm/simd.h>
|
|
#include <crypto/sm4.h>
|
|
#include <linux/module.h>
|
|
#include <linux/cpufeature.h>
|
|
#include <linux/crypto.h>
|
|
#include <linux/types.h>
|
|
|
|
MODULE_ALIAS_CRYPTO("sm4");
|
|
MODULE_ALIAS_CRYPTO("sm4-ce");
|
|
MODULE_DESCRIPTION("SM4 symmetric cipher using ARMv8 Crypto Extensions");
|
|
MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
|
|
MODULE_LICENSE("GPL v2");
|
|
|
|
asmlinkage void sm4_ce_do_crypt(const u32 *rk, void *out, const void *in);
|
|
|
|
static void sm4_ce_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
|
|
{
|
|
const struct crypto_sm4_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
if (!may_use_simd()) {
|
|
crypto_sm4_encrypt(tfm, out, in);
|
|
} else {
|
|
kernel_neon_begin();
|
|
sm4_ce_do_crypt(ctx->rkey_enc, out, in);
|
|
kernel_neon_end();
|
|
}
|
|
}
|
|
|
|
static void sm4_ce_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
|
|
{
|
|
const struct crypto_sm4_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
if (!may_use_simd()) {
|
|
crypto_sm4_decrypt(tfm, out, in);
|
|
} else {
|
|
kernel_neon_begin();
|
|
sm4_ce_do_crypt(ctx->rkey_dec, out, in);
|
|
kernel_neon_end();
|
|
}
|
|
}
|
|
|
|
static struct crypto_alg sm4_ce_alg = {
|
|
.cra_name = "sm4",
|
|
.cra_driver_name = "sm4-ce",
|
|
.cra_priority = 200,
|
|
.cra_flags = CRYPTO_ALG_TYPE_CIPHER,
|
|
.cra_blocksize = SM4_BLOCK_SIZE,
|
|
.cra_ctxsize = sizeof(struct crypto_sm4_ctx),
|
|
.cra_module = THIS_MODULE,
|
|
.cra_u.cipher = {
|
|
.cia_min_keysize = SM4_KEY_SIZE,
|
|
.cia_max_keysize = SM4_KEY_SIZE,
|
|
.cia_setkey = crypto_sm4_set_key,
|
|
.cia_encrypt = sm4_ce_encrypt,
|
|
.cia_decrypt = sm4_ce_decrypt
|
|
}
|
|
};
|
|
|
|
static int __init sm4_ce_mod_init(void)
|
|
{
|
|
return crypto_register_alg(&sm4_ce_alg);
|
|
}
|
|
|
|
static void __exit sm4_ce_mod_fini(void)
|
|
{
|
|
crypto_unregister_alg(&sm4_ce_alg);
|
|
}
|
|
|
|
module_cpu_feature_match(SM4, sm4_ce_mod_init);
|
|
module_exit(sm4_ce_mod_fini);
|