mirror of
https://github.com/torvalds/linux.git
synced 2024-11-21 19:41:42 +00:00
crypto: lib/sha256 - Use generic code from sha256_base
Instead of duplicating the sha256 block processing code, reuse the common code from crypto/sha256_base.h. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
parent
70d391a863
commit
6c19f3bfff
@ -8,13 +8,12 @@
|
||||
#ifndef _CRYPTO_SHA256_BASE_H
|
||||
#define _CRYPTO_SHA256_BASE_H
|
||||
|
||||
#include <asm/byteorder.h>
|
||||
#include <asm/unaligned.h>
|
||||
#include <crypto/internal/hash.h>
|
||||
#include <crypto/sha2.h>
|
||||
#include <linux/crypto.h>
|
||||
#include <linux/module.h>
|
||||
#include <linux/string.h>
|
||||
|
||||
#include <asm/unaligned.h>
|
||||
#include <linux/types.h>
|
||||
|
||||
typedef void (sha256_block_fn)(struct sha256_state *sst, u8 const *src,
|
||||
int blocks);
|
||||
@ -35,12 +34,11 @@ static inline int sha256_base_init(struct shash_desc *desc)
|
||||
return 0;
|
||||
}
|
||||
|
||||
static inline int sha256_base_do_update(struct shash_desc *desc,
|
||||
const u8 *data,
|
||||
unsigned int len,
|
||||
sha256_block_fn *block_fn)
|
||||
static inline int lib_sha256_base_do_update(struct sha256_state *sctx,
|
||||
const u8 *data,
|
||||
unsigned int len,
|
||||
sha256_block_fn *block_fn)
|
||||
{
|
||||
struct sha256_state *sctx = shash_desc_ctx(desc);
|
||||
unsigned int partial = sctx->count % SHA256_BLOCK_SIZE;
|
||||
|
||||
sctx->count += len;
|
||||
@ -73,11 +71,20 @@ static inline int sha256_base_do_update(struct shash_desc *desc,
|
||||
return 0;
|
||||
}
|
||||
|
||||
static inline int sha256_base_do_finalize(struct shash_desc *desc,
|
||||
sha256_block_fn *block_fn)
|
||||
static inline int sha256_base_do_update(struct shash_desc *desc,
|
||||
const u8 *data,
|
||||
unsigned int len,
|
||||
sha256_block_fn *block_fn)
|
||||
{
|
||||
struct sha256_state *sctx = shash_desc_ctx(desc);
|
||||
|
||||
return lib_sha256_base_do_update(sctx, data, len, block_fn);
|
||||
}
|
||||
|
||||
static inline int lib_sha256_base_do_finalize(struct sha256_state *sctx,
|
||||
sha256_block_fn *block_fn)
|
||||
{
|
||||
const int bit_offset = SHA256_BLOCK_SIZE - sizeof(__be64);
|
||||
struct sha256_state *sctx = shash_desc_ctx(desc);
|
||||
__be64 *bits = (__be64 *)(sctx->buf + bit_offset);
|
||||
unsigned int partial = sctx->count % SHA256_BLOCK_SIZE;
|
||||
|
||||
@ -96,10 +103,17 @@ static inline int sha256_base_do_finalize(struct shash_desc *desc,
|
||||
return 0;
|
||||
}
|
||||
|
||||
static inline int sha256_base_finish(struct shash_desc *desc, u8 *out)
|
||||
static inline int sha256_base_do_finalize(struct shash_desc *desc,
|
||||
sha256_block_fn *block_fn)
|
||||
{
|
||||
unsigned int digest_size = crypto_shash_digestsize(desc->tfm);
|
||||
struct sha256_state *sctx = shash_desc_ctx(desc);
|
||||
|
||||
return lib_sha256_base_do_finalize(sctx, block_fn);
|
||||
}
|
||||
|
||||
static inline int lib_sha256_base_finish(struct sha256_state *sctx, u8 *out,
|
||||
unsigned int digest_size)
|
||||
{
|
||||
__be32 *digest = (__be32 *)out;
|
||||
int i;
|
||||
|
||||
@ -110,4 +124,12 @@ static inline int sha256_base_finish(struct shash_desc *desc, u8 *out)
|
||||
return 0;
|
||||
}
|
||||
|
||||
static inline int sha256_base_finish(struct shash_desc *desc, u8 *out)
|
||||
{
|
||||
unsigned int digest_size = crypto_shash_digestsize(desc->tfm);
|
||||
struct sha256_state *sctx = shash_desc_ctx(desc);
|
||||
|
||||
return lib_sha256_base_finish(sctx, out, digest_size);
|
||||
}
|
||||
|
||||
#endif /* _CRYPTO_SHA256_BASE_H */
|
||||
|
@ -11,12 +11,11 @@
|
||||
* Copyright (c) 2014 Red Hat Inc.
|
||||
*/
|
||||
|
||||
#include <linux/bitops.h>
|
||||
#include <linux/export.h>
|
||||
#include <asm/unaligned.h>
|
||||
#include <crypto/sha256_base.h>
|
||||
#include <linux/kernel.h>
|
||||
#include <linux/module.h>
|
||||
#include <linux/string.h>
|
||||
#include <crypto/sha2.h>
|
||||
#include <asm/unaligned.h>
|
||||
|
||||
static const u32 SHA256_K[] = {
|
||||
0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5,
|
||||
@ -119,74 +118,40 @@ static void sha256_transform(u32 *state, const u8 *input, u32 *W)
|
||||
state[4] += e; state[5] += f; state[6] += g; state[7] += h;
|
||||
}
|
||||
|
||||
void sha256_update(struct sha256_state *sctx, const u8 *data, unsigned int len)
|
||||
static void sha256_transform_blocks(struct sha256_state *sctx,
|
||||
const u8 *input, int blocks)
|
||||
{
|
||||
unsigned int partial, done;
|
||||
const u8 *src;
|
||||
u32 W[64];
|
||||
|
||||
partial = sctx->count & 0x3f;
|
||||
sctx->count += len;
|
||||
done = 0;
|
||||
src = data;
|
||||
do {
|
||||
sha256_transform(sctx->state, input, W);
|
||||
input += SHA256_BLOCK_SIZE;
|
||||
} while (--blocks);
|
||||
|
||||
if ((partial + len) > 63) {
|
||||
if (partial) {
|
||||
done = -partial;
|
||||
memcpy(sctx->buf + partial, data, done + 64);
|
||||
src = sctx->buf;
|
||||
}
|
||||
memzero_explicit(W, sizeof(W));
|
||||
}
|
||||
|
||||
do {
|
||||
sha256_transform(sctx->state, src, W);
|
||||
done += 64;
|
||||
src = data + done;
|
||||
} while (done + 63 < len);
|
||||
|
||||
memzero_explicit(W, sizeof(W));
|
||||
|
||||
partial = 0;
|
||||
}
|
||||
memcpy(sctx->buf + partial, src, len - done);
|
||||
void sha256_update(struct sha256_state *sctx, const u8 *data, unsigned int len)
|
||||
{
|
||||
lib_sha256_base_do_update(sctx, data, len, sha256_transform_blocks);
|
||||
}
|
||||
EXPORT_SYMBOL(sha256_update);
|
||||
|
||||
static void __sha256_final(struct sha256_state *sctx, u8 *out, int digest_words)
|
||||
static void __sha256_final(struct sha256_state *sctx, u8 *out, int digest_size)
|
||||
{
|
||||
__be32 *dst = (__be32 *)out;
|
||||
__be64 bits;
|
||||
unsigned int index, pad_len;
|
||||
int i;
|
||||
static const u8 padding[64] = { 0x80, };
|
||||
|
||||
/* Save number of bits */
|
||||
bits = cpu_to_be64(sctx->count << 3);
|
||||
|
||||
/* Pad out to 56 mod 64. */
|
||||
index = sctx->count & 0x3f;
|
||||
pad_len = (index < 56) ? (56 - index) : ((64+56) - index);
|
||||
sha256_update(sctx, padding, pad_len);
|
||||
|
||||
/* Append length (before padding) */
|
||||
sha256_update(sctx, (const u8 *)&bits, sizeof(bits));
|
||||
|
||||
/* Store state in digest */
|
||||
for (i = 0; i < digest_words; i++)
|
||||
put_unaligned_be32(sctx->state[i], &dst[i]);
|
||||
|
||||
/* Zeroize sensitive information. */
|
||||
memzero_explicit(sctx, sizeof(*sctx));
|
||||
lib_sha256_base_do_finalize(sctx, sha256_transform_blocks);
|
||||
lib_sha256_base_finish(sctx, out, digest_size);
|
||||
}
|
||||
|
||||
void sha256_final(struct sha256_state *sctx, u8 *out)
|
||||
{
|
||||
__sha256_final(sctx, out, 8);
|
||||
__sha256_final(sctx, out, 32);
|
||||
}
|
||||
EXPORT_SYMBOL(sha256_final);
|
||||
|
||||
void sha224_final(struct sha256_state *sctx, u8 *out)
|
||||
{
|
||||
__sha256_final(sctx, out, 7);
|
||||
__sha256_final(sctx, out, 28);
|
||||
}
|
||||
EXPORT_SYMBOL(sha224_final);
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user