staging: rtl8192e: rtllib_crypt_ccmp.c: Use crypto API ccm(aes)

Use ccm(aes) aead transform instead of invoking the AES block cipher
block by block.

Signed-off-by: Christina Quast <contact@christina-quast.de>
Reviewed-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Link: https://lore.kernel.org/r/20190816065936.12214-3-contact@christina-quast.de
Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
This commit is contained in:
Christina Quast 2019-08-16 08:59:36 +02:00 committed by Greg Kroah-Hartman
parent eb0e7bf3ca
commit 5ee5265674
2 changed files with 78 additions and 110 deletions

View File

@ -15,6 +15,7 @@ config RTLLIB_CRYPTO_CCMP
tristate "Support for rtllib CCMP crypto" tristate "Support for rtllib CCMP crypto"
depends on RTLLIB depends on RTLLIB
select CRYPTO_AES select CRYPTO_AES
select CRYPTO_CCM
default y default y
help help
CCMP crypto driver for rtllib. CCMP crypto driver for rtllib.

View File

@ -17,6 +17,7 @@
#include "rtllib.h" #include "rtllib.h"
#include <linux/crypto.h> #include <linux/crypto.h>
#include <crypto/aead.h>
#include <linux/scatterlist.h> #include <linux/scatterlist.h>
@ -39,20 +40,13 @@ struct rtllib_ccmp_data {
int key_idx; int key_idx;
struct crypto_tfm *tfm; struct crypto_aead *tfm;
/* scratch buffers for virt_to_page() (crypto API) */ /* scratch buffers for virt_to_page() (crypto API) */
u8 tx_b0[AES_BLOCK_LEN], tx_b[AES_BLOCK_LEN], u8 tx_aad[2 * AES_BLOCK_LEN];
tx_e[AES_BLOCK_LEN], tx_s0[AES_BLOCK_LEN]; u8 rx_aad[2 * AES_BLOCK_LEN];
u8 rx_b0[AES_BLOCK_LEN], rx_b[AES_BLOCK_LEN], rx_a[AES_BLOCK_LEN];
}; };
static void rtllib_ccmp_aes_encrypt(struct crypto_tfm *tfm,
const u8 pt[16], u8 ct[16])
{
crypto_cipher_encrypt_one((void *)tfm, ct, pt);
}
static void *rtllib_ccmp_init(int key_idx) static void *rtllib_ccmp_init(int key_idx)
{ {
struct rtllib_ccmp_data *priv; struct rtllib_ccmp_data *priv;
@ -62,7 +56,7 @@ static void *rtllib_ccmp_init(int key_idx)
goto fail; goto fail;
priv->key_idx = key_idx; priv->key_idx = key_idx;
priv->tfm = (void *)crypto_alloc_cipher("aes", 0, 0); priv->tfm = crypto_alloc_aead("ccm(aes)", 0, CRYPTO_ALG_ASYNC);
if (IS_ERR(priv->tfm)) { if (IS_ERR(priv->tfm)) {
pr_debug("Could not allocate crypto API aes\n"); pr_debug("Could not allocate crypto API aes\n");
priv->tfm = NULL; priv->tfm = NULL;
@ -73,7 +67,7 @@ static void *rtllib_ccmp_init(int key_idx)
fail: fail:
if (priv) { if (priv) {
if (priv->tfm) if (priv->tfm)
crypto_free_cipher((void *)priv->tfm); crypto_free_aead(priv->tfm);
kfree(priv); kfree(priv);
} }
@ -86,31 +80,18 @@ static void rtllib_ccmp_deinit(void *priv)
struct rtllib_ccmp_data *_priv = priv; struct rtllib_ccmp_data *_priv = priv;
if (_priv && _priv->tfm) if (_priv && _priv->tfm)
crypto_free_cipher((void *)_priv->tfm); crypto_free_aead(_priv->tfm);
kfree(priv); kfree(priv);
} }
static inline void xor_block(u8 *b, u8 *a, size_t len) static int ccmp_init_iv_and_aad(struct rtllib_hdr_4addr *hdr,
{ u8 *pn, u8 *iv, u8 *aad)
int i;
for (i = 0; i < len; i++)
b[i] ^= a[i];
}
static void ccmp_init_blocks(struct crypto_tfm *tfm,
struct rtllib_hdr_4addr *hdr,
u8 *pn, size_t dlen, u8 *b0, u8 *auth,
u8 *s0)
{ {
u8 *pos, qc = 0; u8 *pos, qc = 0;
size_t aad_len; size_t aad_len;
u16 fc; u16 fc;
int a4_included, qc_included; int a4_included, qc_included;
u8 aad[2 * AES_BLOCK_LEN];
fc = le16_to_cpu(hdr->frame_ctl); fc = le16_to_cpu(hdr->frame_ctl);
a4_included = ((fc & (RTLLIB_FCTL_TODS | RTLLIB_FCTL_FROMDS)) == a4_included = ((fc & (RTLLIB_FCTL_TODS | RTLLIB_FCTL_FROMDS)) ==
@ -128,18 +109,19 @@ static void ccmp_init_blocks(struct crypto_tfm *tfm,
qc = *pos & 0x0f; qc = *pos & 0x0f;
aad_len += 2; aad_len += 2;
} }
/* CCM Initial Block: /* In CCM, the initial vectors (IV) used for CTR mode encryption and CBC
* Flag (Include authentication header, M=3 (8-octet MIC), * mode authentication are not allowed to collide, yet both are derived
* L=1 (2-octet Dlen)) * from the same vector. We only set L := 1 here to indicate that the
* Nonce: 0x00 | A2 | PN * data size can be represented in (L+1) bytes. The CCM layer will take
* Dlen * care of storing the data length in the top (L+1) bytes and setting
* and clearing the other bits as is required to derive the two IVs.
*/ */
b0[0] = 0x59; iv[0] = 0x1;
b0[1] = qc;
memcpy(b0 + 2, hdr->addr2, ETH_ALEN); /* Nonce: QC | A2 | PN */
memcpy(b0 + 8, pn, CCMP_PN_LEN); iv[1] = qc;
b0[14] = (dlen >> 8) & 0xff; memcpy(iv + 2, hdr->addr2, ETH_ALEN);
b0[15] = dlen & 0xff; memcpy(iv + 8, pn, CCMP_PN_LEN);
/* AAD: /* AAD:
* FC with bits 4..6 and 11..13 masked to zero; 14 is always one * FC with bits 4..6 and 11..13 masked to zero; 14 is always one
@ -149,31 +131,21 @@ static void ccmp_init_blocks(struct crypto_tfm *tfm,
* QC (if present) * QC (if present)
*/ */
pos = (u8 *) hdr; pos = (u8 *) hdr;
aad[0] = 0; /* aad_len >> 8 */ aad[0] = pos[0] & 0x8f;
aad[1] = aad_len & 0xff; aad[1] = pos[1] & 0xc7;
aad[2] = pos[0] & 0x8f; memcpy(aad + 2, hdr->addr1, 3 * ETH_ALEN);
aad[3] = pos[1] & 0xc7;
memcpy(aad + 4, hdr->addr1, 3 * ETH_ALEN);
pos = (u8 *) &hdr->seq_ctl; pos = (u8 *) &hdr->seq_ctl;
aad[22] = pos[0] & 0x0f; aad[20] = pos[0] & 0x0f;
aad[23] = 0; /* all bits masked */ aad[21] = 0; /* all bits masked */
memset(aad + 24, 0, 8); memset(aad + 22, 0, 8);
if (a4_included) if (a4_included)
memcpy(aad + 24, hdr->addr4, ETH_ALEN); memcpy(aad + 22, hdr->addr4, ETH_ALEN);
if (qc_included) { if (qc_included) {
aad[a4_included ? 30 : 24] = qc; aad[a4_included ? 28 : 22] = qc;
/* rest of QC masked */ /* rest of QC masked */
} }
/* Start with the first block and AAD */ return aad_len;
rtllib_ccmp_aes_encrypt(tfm, b0, auth);
xor_block(auth, aad, AES_BLOCK_LEN);
rtllib_ccmp_aes_encrypt(tfm, auth, auth);
xor_block(auth, &aad[AES_BLOCK_LEN], AES_BLOCK_LEN);
rtllib_ccmp_aes_encrypt(tfm, auth, auth);
b0[0] &= 0x07;
b0[14] = b0[15] = 0;
rtllib_ccmp_aes_encrypt(tfm, b0, s0);
} }
@ -213,40 +185,37 @@ static int rtllib_ccmp_encrypt(struct sk_buff *skb, int hdr_len, void *priv)
*pos++ = key->tx_pn[1]; *pos++ = key->tx_pn[1];
*pos++ = key->tx_pn[0]; *pos++ = key->tx_pn[0];
hdr = (struct rtllib_hdr_4addr *) skb->data; hdr = (struct rtllib_hdr_4addr *) skb->data;
if (!tcb_desc->bHwSec) { if (!tcb_desc->bHwSec) {
int blocks, last, len; struct aead_request *req;
u8 *mic; struct scatterlist sg[2];
u8 *b0 = key->tx_b0; u8 *aad = key->tx_aad;
u8 *b = key->tx_b; u8 iv[AES_BLOCK_LEN];
u8 *e = key->tx_e; int aad_len, ret;
u8 *s0 = key->tx_s0; int data_len = skb->len - hdr_len - CCMP_HDR_LEN;
mic = skb_put(skb, CCMP_MIC_LEN); req = aead_request_alloc(key->tfm, GFP_ATOMIC);
if (!req)
return -ENOMEM;
ccmp_init_blocks(key->tfm, hdr, key->tx_pn, data_len, aad_len = ccmp_init_iv_and_aad(hdr, key->tx_pn, iv, aad);
b0, b, s0);
blocks = DIV_ROUND_UP(data_len, AES_BLOCK_LEN); skb_put(skb, CCMP_MIC_LEN);
last = data_len % AES_BLOCK_LEN; sg_init_table(sg, 2);
sg_set_buf(&sg[0], aad, aad_len);
sg_set_buf(&sg[1], skb->data + hdr_len + CCMP_HDR_LEN,
data_len + CCMP_MIC_LEN);
for (i = 1; i <= blocks; i++) { aead_request_set_callback(req, 0, NULL, NULL);
len = (i == blocks && last) ? last : AES_BLOCK_LEN; aead_request_set_ad(req, aad_len);
/* Authentication */ aead_request_set_crypt(req, sg, sg, data_len, iv);
xor_block(b, pos, len);
rtllib_ccmp_aes_encrypt(key->tfm, b, b);
/* Encryption, with counter */
b0[14] = (i >> 8) & 0xff;
b0[15] = i & 0xff;
rtllib_ccmp_aes_encrypt(key->tfm, b0, e);
xor_block(pos, e, len);
pos += len;
}
for (i = 0; i < CCMP_MIC_LEN; i++) ret = crypto_aead_encrypt(req);
mic[i] = b[i] ^ s0[i]; aead_request_free(req);
return ret;
} }
return 0; return 0;
} }
@ -302,35 +271,31 @@ static int rtllib_ccmp_decrypt(struct sk_buff *skb, int hdr_len, void *priv)
return -4; return -4;
} }
if (!tcb_desc->bHwSec) { if (!tcb_desc->bHwSec) {
size_t data_len = skb->len - hdr_len - CCMP_HDR_LEN - size_t data_len = skb->len - hdr_len - CCMP_HDR_LEN;
CCMP_MIC_LEN; struct aead_request *req;
u8 *mic = skb->data + skb->len - CCMP_MIC_LEN; struct scatterlist sg[2];
u8 *b0 = key->rx_b0; u8 *aad = key->rx_aad;
u8 *b = key->rx_b; u8 iv[AES_BLOCK_LEN];
u8 *a = key->rx_a; int aad_len, ret;
int i, blocks, last, len;
req = aead_request_alloc(key->tfm, GFP_ATOMIC);
if(!req)
return -ENOMEM;
ccmp_init_blocks(key->tfm, hdr, pn, data_len, b0, a, b); aad_len = ccmp_init_iv_and_aad(hdr, pn, iv, aad);
xor_block(mic, b, CCMP_MIC_LEN);
blocks = DIV_ROUND_UP(data_len, AES_BLOCK_LEN); sg_init_table(sg, 2);
last = data_len % AES_BLOCK_LEN; sg_set_buf(&sg[0], aad, aad_len);
sg_set_buf(&sg[1], pos, data_len);
for (i = 1; i <= blocks; i++) { aead_request_set_callback(req, 0, NULL, NULL);
len = (i == blocks && last) ? last : AES_BLOCK_LEN; aead_request_set_ad(req, aad_len);
/* Decrypt, with counter */ aead_request_set_crypt(req, sg, sg, data_len, iv);
b0[14] = (i >> 8) & 0xff;
b0[15] = i & 0xff;
rtllib_ccmp_aes_encrypt(key->tfm, b0, b);
xor_block(pos, b, len);
/* Authentication */
xor_block(a, pos, len);
rtllib_ccmp_aes_encrypt(key->tfm, a, a);
pos += len;
}
if (memcmp(mic, a, CCMP_MIC_LEN) != 0) { ret = crypto_aead_decrypt(req);
aead_request_free(req);
if (ret) {
if (net_ratelimit()) { if (net_ratelimit()) {
pr_debug("CCMP: decrypt failed: STA= %pM\n", pr_debug("CCMP: decrypt failed: STA= %pM\n",
hdr->addr2); hdr->addr2);
@ -354,7 +319,7 @@ static int rtllib_ccmp_set_key(void *key, int len, u8 *seq, void *priv)
{ {
struct rtllib_ccmp_data *data = priv; struct rtllib_ccmp_data *data = priv;
int keyidx; int keyidx;
struct crypto_tfm *tfm = data->tfm; struct crypto_aead *tfm = data->tfm;
keyidx = data->key_idx; keyidx = data->key_idx;
memset(data, 0, sizeof(*data)); memset(data, 0, sizeof(*data));
@ -371,7 +336,9 @@ static int rtllib_ccmp_set_key(void *key, int len, u8 *seq, void *priv)
data->rx_pn[4] = seq[1]; data->rx_pn[4] = seq[1];
data->rx_pn[5] = seq[0]; data->rx_pn[5] = seq[0];
} }
crypto_cipher_setkey((void *)data->tfm, data->key, CCMP_TK_LEN); if (crypto_aead_setauthsize(data->tfm, CCMP_MIC_LEN) ||
crypto_aead_setkey(data->tfm, data->key, CCMP_TK_LEN))
return -1;
} else if (len == 0) { } else if (len == 0) {
data->key_set = 0; data->key_set = 0;
} else { } else {