Merge branch 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto fixes from Herbert Xu: - vmalloc stack regression in CCM - Build problem in CRC32 on ARM - Memory leak in cavium - Missing Kconfig dependencies in atmel and mediatek - XTS Regression on some platforms (s390 and ppc) - Memory overrun in CCM test vector * 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: crypto: vmx - Use skcipher for xts fallback crypto: vmx - Use skcipher for cbc fallback crypto: testmgr - Pad aes_ccm_enc_tv_template vector crypto: arm/crc32 - add build time test for CRC instruction support crypto: arm/crc32 - fix build error with outdated binutils crypto: ccm - move cbcmac input off the stack crypto: xts - Propagate NEED_FALLBACK bit crypto: api - Add crypto_requires_off helper crypto: atmel - CRYPTO_DEV_MEDIATEK should depend on HAS_DMA crypto: atmel - CRYPTO_DEV_ATMEL_TDES and CRYPTO_DEV_ATMEL_SHA should depend on HAS_DMA crypto: cavium - fix leak on curr if curr->head fails to be allocated crypto: cavium - Fix couple of static checker errors
This commit is contained in:
		
						commit
						33a8b3e99d
					
				| @ -15,7 +15,17 @@ ce-obj-$(CONFIG_CRYPTO_SHA1_ARM_CE) += sha1-arm-ce.o | ||||
| ce-obj-$(CONFIG_CRYPTO_SHA2_ARM_CE) += sha2-arm-ce.o | ||||
| ce-obj-$(CONFIG_CRYPTO_GHASH_ARM_CE) += ghash-arm-ce.o | ||||
| ce-obj-$(CONFIG_CRYPTO_CRCT10DIF_ARM_CE) += crct10dif-arm-ce.o | ||||
| ce-obj-$(CONFIG_CRYPTO_CRC32_ARM_CE) += crc32-arm-ce.o | ||||
| crc-obj-$(CONFIG_CRYPTO_CRC32_ARM_CE) += crc32-arm-ce.o | ||||
| 
 | ||||
| ifneq ($(crc-obj-y)$(crc-obj-m),) | ||||
| ifeq ($(call as-instr,.arch armv8-a\n.arch_extension crc,y,n),y) | ||||
| ce-obj-y += $(crc-obj-y) | ||||
| ce-obj-m += $(crc-obj-m) | ||||
| else | ||||
| $(warning These CRC Extensions modules need binutils 2.23 or higher) | ||||
| $(warning $(crc-obj-y) $(crc-obj-m)) | ||||
| endif | ||||
| endif | ||||
| 
 | ||||
| ifneq ($(ce-obj-y)$(ce-obj-m),) | ||||
| ifeq ($(call as-instr,.fpu crypto-neon-fp-armv8,y,n),y) | ||||
|  | ||||
| @ -135,7 +135,7 @@ ENTRY(crc32c_pmull_le) | ||||
| 	vld1.8		{q3-q4}, [BUF, :128]! | ||||
| 	vmov.i8		qzr, #0 | ||||
| 	vmov.i8		qCONSTANT, #0 | ||||
| 	vmov		dCONSTANTl[0], CRC | ||||
| 	vmov.32		dCONSTANTl[0], CRC | ||||
| 	veor.8		d2, d2, dCONSTANTl | ||||
| 	sub		LEN, LEN, #0x40 | ||||
| 	cmp		LEN, #0x40 | ||||
|  | ||||
| @ -45,6 +45,7 @@ struct crypto_rfc4309_req_ctx { | ||||
| 
 | ||||
| struct crypto_ccm_req_priv_ctx { | ||||
| 	u8 odata[16]; | ||||
| 	u8 idata[16]; | ||||
| 	u8 auth_tag[16]; | ||||
| 	u32 flags; | ||||
| 	struct scatterlist src[3]; | ||||
| @ -183,8 +184,8 @@ static int crypto_ccm_auth(struct aead_request *req, struct scatterlist *plain, | ||||
| 	AHASH_REQUEST_ON_STACK(ahreq, ctx->mac); | ||||
| 	unsigned int assoclen = req->assoclen; | ||||
| 	struct scatterlist sg[3]; | ||||
| 	u8 odata[16]; | ||||
| 	u8 idata[16]; | ||||
| 	u8 *odata = pctx->odata; | ||||
| 	u8 *idata = pctx->idata; | ||||
| 	int ilen, err; | ||||
| 
 | ||||
| 	/* format control data for input */ | ||||
|  | ||||
| @ -22691,7 +22691,7 @@ static struct aead_testvec aes_ccm_enc_tv_template[] = { | ||||
| 			  "\x09\x75\x9a\x9b\x3c\x9b\x27\x39", | ||||
| 		.klen	= 32, | ||||
| 		.iv	= "\x03\xf9\xd9\x4e\x63\xb5\x3d\x9d" | ||||
| 			  "\x43\xf6\x1e\x50", | ||||
| 			  "\x43\xf6\x1e\x50\0\0\0\0", | ||||
| 		.assoc	= "\x57\xf5\x6b\x8b\x57\x5c\x3d\x3b" | ||||
| 			  "\x13\x02\x01\x0c\x83\x4c\x96\x35" | ||||
| 			  "\x8e\xd6\x39\xcf\x7d\x14\x9b\x94" | ||||
|  | ||||
							
								
								
									
										14
									
								
								crypto/xts.c
									
									
									
									
									
								
							
							
						
						
									
										14
									
								
								crypto/xts.c
									
									
									
									
									
								
							| @ -463,6 +463,7 @@ static int create(struct crypto_template *tmpl, struct rtattr **tb) | ||||
| 	struct xts_instance_ctx *ctx; | ||||
| 	struct skcipher_alg *alg; | ||||
| 	const char *cipher_name; | ||||
| 	u32 mask; | ||||
| 	int err; | ||||
| 
 | ||||
| 	algt = crypto_get_attr_type(tb); | ||||
| @ -483,18 +484,19 @@ static int create(struct crypto_template *tmpl, struct rtattr **tb) | ||||
| 	ctx = skcipher_instance_ctx(inst); | ||||
| 
 | ||||
| 	crypto_set_skcipher_spawn(&ctx->spawn, skcipher_crypto_instance(inst)); | ||||
| 	err = crypto_grab_skcipher(&ctx->spawn, cipher_name, 0, | ||||
| 				   crypto_requires_sync(algt->type, | ||||
| 							algt->mask)); | ||||
| 
 | ||||
| 	mask = crypto_requires_off(algt->type, algt->mask, | ||||
| 				   CRYPTO_ALG_NEED_FALLBACK | | ||||
| 				   CRYPTO_ALG_ASYNC); | ||||
| 
 | ||||
| 	err = crypto_grab_skcipher(&ctx->spawn, cipher_name, 0, mask); | ||||
| 	if (err == -ENOENT) { | ||||
| 		err = -ENAMETOOLONG; | ||||
| 		if (snprintf(ctx->name, CRYPTO_MAX_ALG_NAME, "ecb(%s)", | ||||
| 			     cipher_name) >= CRYPTO_MAX_ALG_NAME) | ||||
| 			goto err_free_inst; | ||||
| 
 | ||||
| 		err = crypto_grab_skcipher(&ctx->spawn, ctx->name, 0, | ||||
| 					   crypto_requires_sync(algt->type, | ||||
| 								algt->mask)); | ||||
| 		err = crypto_grab_skcipher(&ctx->spawn, ctx->name, 0, mask); | ||||
| 	} | ||||
| 
 | ||||
| 	if (err) | ||||
|  | ||||
| @ -459,6 +459,7 @@ config CRYPTO_DEV_ATMEL_AES | ||||
| 
 | ||||
| config CRYPTO_DEV_ATMEL_TDES | ||||
| 	tristate "Support for Atmel DES/TDES hw accelerator" | ||||
| 	depends on HAS_DMA | ||||
| 	depends on ARCH_AT91 || COMPILE_TEST | ||||
| 	select CRYPTO_DES | ||||
| 	select CRYPTO_BLKCIPHER | ||||
| @ -472,6 +473,7 @@ config CRYPTO_DEV_ATMEL_TDES | ||||
| 
 | ||||
| config CRYPTO_DEV_ATMEL_SHA | ||||
| 	tristate "Support for Atmel SHA hw accelerator" | ||||
| 	depends on HAS_DMA | ||||
| 	depends on ARCH_AT91 || COMPILE_TEST | ||||
| 	select CRYPTO_HASH | ||||
| 	help | ||||
| @ -583,6 +585,7 @@ config CRYPTO_DEV_ROCKCHIP | ||||
| 
 | ||||
| config CRYPTO_DEV_MEDIATEK | ||||
| 	tristate "MediaTek's EIP97 Cryptographic Engine driver" | ||||
| 	depends on HAS_DMA | ||||
| 	depends on (ARM && ARCH_MEDIATEK) || COMPILE_TEST | ||||
| 	select CRYPTO_AES | ||||
| 	select CRYPTO_AEAD | ||||
|  | ||||
| @ -242,6 +242,7 @@ static int alloc_command_queues(struct cpt_vf *cptvf, | ||||
| 			if (!curr->head) { | ||||
| 				dev_err(&pdev->dev, "Command Q (%d) chunk (%d) allocation failed\n", | ||||
| 					i, queue->nchunks); | ||||
| 				kfree(curr); | ||||
| 				goto cmd_qfail; | ||||
| 			} | ||||
| 
 | ||||
| @ -815,8 +816,10 @@ static void cptvf_remove(struct pci_dev *pdev) | ||||
| { | ||||
| 	struct cpt_vf *cptvf = pci_get_drvdata(pdev); | ||||
| 
 | ||||
| 	if (!cptvf) | ||||
| 	if (!cptvf) { | ||||
| 		dev_err(&pdev->dev, "Invalid CPT-VF device\n"); | ||||
| 		return; | ||||
| 	} | ||||
| 
 | ||||
| 	/* Convey DOWN to PF */ | ||||
| 	if (cptvf_send_vf_down(cptvf)) { | ||||
|  | ||||
| @ -330,8 +330,8 @@ void do_post_process(struct cpt_vf *cptvf, struct cpt_info_buffer *info) | ||||
| { | ||||
| 	struct pci_dev *pdev = cptvf->pdev; | ||||
| 
 | ||||
| 	if (!info || !cptvf) { | ||||
| 		dev_err(&pdev->dev, "Input params are incorrect for post processing\n"); | ||||
| 	if (!info) { | ||||
| 		dev_err(&pdev->dev, "incorrect cpt_info_buffer for post processing\n"); | ||||
| 		return; | ||||
| 	} | ||||
| 
 | ||||
|  | ||||
| @ -27,11 +27,12 @@ | ||||
| #include <asm/switch_to.h> | ||||
| #include <crypto/aes.h> | ||||
| #include <crypto/scatterwalk.h> | ||||
| #include <crypto/skcipher.h> | ||||
| 
 | ||||
| #include "aesp8-ppc.h" | ||||
| 
 | ||||
| struct p8_aes_cbc_ctx { | ||||
| 	struct crypto_blkcipher *fallback; | ||||
| 	struct crypto_skcipher *fallback; | ||||
| 	struct aes_key enc_key; | ||||
| 	struct aes_key dec_key; | ||||
| }; | ||||
| @ -39,7 +40,7 @@ struct p8_aes_cbc_ctx { | ||||
| static int p8_aes_cbc_init(struct crypto_tfm *tfm) | ||||
| { | ||||
| 	const char *alg; | ||||
| 	struct crypto_blkcipher *fallback; | ||||
| 	struct crypto_skcipher *fallback; | ||||
| 	struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm); | ||||
| 
 | ||||
| 	if (!(alg = crypto_tfm_alg_name(tfm))) { | ||||
| @ -47,8 +48,9 @@ static int p8_aes_cbc_init(struct crypto_tfm *tfm) | ||||
| 		return -ENOENT; | ||||
| 	} | ||||
| 
 | ||||
| 	fallback = | ||||
| 	    crypto_alloc_blkcipher(alg, 0, CRYPTO_ALG_NEED_FALLBACK); | ||||
| 	fallback = crypto_alloc_skcipher(alg, 0, | ||||
| 			CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK); | ||||
| 
 | ||||
| 	if (IS_ERR(fallback)) { | ||||
| 		printk(KERN_ERR | ||||
| 		       "Failed to allocate transformation for '%s': %ld\n", | ||||
| @ -56,11 +58,12 @@ static int p8_aes_cbc_init(struct crypto_tfm *tfm) | ||||
| 		return PTR_ERR(fallback); | ||||
| 	} | ||||
| 	printk(KERN_INFO "Using '%s' as fallback implementation.\n", | ||||
| 	       crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback)); | ||||
| 		crypto_skcipher_driver_name(fallback)); | ||||
| 
 | ||||
| 	crypto_blkcipher_set_flags( | ||||
| 
 | ||||
| 	crypto_skcipher_set_flags( | ||||
| 		fallback, | ||||
| 		crypto_blkcipher_get_flags((struct crypto_blkcipher *)tfm)); | ||||
| 		crypto_skcipher_get_flags((struct crypto_skcipher *)tfm)); | ||||
| 	ctx->fallback = fallback; | ||||
| 
 | ||||
| 	return 0; | ||||
| @ -71,7 +74,7 @@ static void p8_aes_cbc_exit(struct crypto_tfm *tfm) | ||||
| 	struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm); | ||||
| 
 | ||||
| 	if (ctx->fallback) { | ||||
| 		crypto_free_blkcipher(ctx->fallback); | ||||
| 		crypto_free_skcipher(ctx->fallback); | ||||
| 		ctx->fallback = NULL; | ||||
| 	} | ||||
| } | ||||
| @ -91,7 +94,7 @@ static int p8_aes_cbc_setkey(struct crypto_tfm *tfm, const u8 *key, | ||||
| 	pagefault_enable(); | ||||
| 	preempt_enable(); | ||||
| 
 | ||||
| 	ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen); | ||||
| 	ret += crypto_skcipher_setkey(ctx->fallback, key, keylen); | ||||
| 	return ret; | ||||
| } | ||||
| 
 | ||||
| @ -103,15 +106,14 @@ static int p8_aes_cbc_encrypt(struct blkcipher_desc *desc, | ||||
| 	struct blkcipher_walk walk; | ||||
| 	struct p8_aes_cbc_ctx *ctx = | ||||
| 		crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); | ||||
| 	struct blkcipher_desc fallback_desc = { | ||||
| 		.tfm = ctx->fallback, | ||||
| 		.info = desc->info, | ||||
| 		.flags = desc->flags | ||||
| 	}; | ||||
| 
 | ||||
| 	if (in_interrupt()) { | ||||
| 		ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src, | ||||
| 					       nbytes); | ||||
| 		SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback); | ||||
| 		skcipher_request_set_tfm(req, ctx->fallback); | ||||
| 		skcipher_request_set_callback(req, desc->flags, NULL, NULL); | ||||
| 		skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); | ||||
| 		ret = crypto_skcipher_encrypt(req); | ||||
| 		skcipher_request_zero(req); | ||||
| 	} else { | ||||
| 		preempt_disable(); | ||||
| 		pagefault_disable(); | ||||
| @ -144,15 +146,14 @@ static int p8_aes_cbc_decrypt(struct blkcipher_desc *desc, | ||||
| 	struct blkcipher_walk walk; | ||||
| 	struct p8_aes_cbc_ctx *ctx = | ||||
| 		crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); | ||||
| 	struct blkcipher_desc fallback_desc = { | ||||
| 		.tfm = ctx->fallback, | ||||
| 		.info = desc->info, | ||||
| 		.flags = desc->flags | ||||
| 	}; | ||||
| 
 | ||||
| 	if (in_interrupt()) { | ||||
| 		ret = crypto_blkcipher_decrypt(&fallback_desc, dst, src, | ||||
| 					       nbytes); | ||||
| 		SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback); | ||||
| 		skcipher_request_set_tfm(req, ctx->fallback); | ||||
| 		skcipher_request_set_callback(req, desc->flags, NULL, NULL); | ||||
| 		skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); | ||||
| 		ret = crypto_skcipher_decrypt(req); | ||||
| 		skcipher_request_zero(req); | ||||
| 	} else { | ||||
| 		preempt_disable(); | ||||
| 		pagefault_disable(); | ||||
|  | ||||
| @ -28,11 +28,12 @@ | ||||
| #include <crypto/aes.h> | ||||
| #include <crypto/scatterwalk.h> | ||||
| #include <crypto/xts.h> | ||||
| #include <crypto/skcipher.h> | ||||
| 
 | ||||
| #include "aesp8-ppc.h" | ||||
| 
 | ||||
| struct p8_aes_xts_ctx { | ||||
| 	struct crypto_blkcipher *fallback; | ||||
| 	struct crypto_skcipher *fallback; | ||||
| 	struct aes_key enc_key; | ||||
| 	struct aes_key dec_key; | ||||
| 	struct aes_key tweak_key; | ||||
| @ -41,7 +42,7 @@ struct p8_aes_xts_ctx { | ||||
| static int p8_aes_xts_init(struct crypto_tfm *tfm) | ||||
| { | ||||
| 	const char *alg; | ||||
| 	struct crypto_blkcipher *fallback; | ||||
| 	struct crypto_skcipher *fallback; | ||||
| 	struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm); | ||||
| 
 | ||||
| 	if (!(alg = crypto_tfm_alg_name(tfm))) { | ||||
| @ -49,8 +50,8 @@ static int p8_aes_xts_init(struct crypto_tfm *tfm) | ||||
| 		return -ENOENT; | ||||
| 	} | ||||
| 
 | ||||
| 	fallback = | ||||
| 		crypto_alloc_blkcipher(alg, 0, CRYPTO_ALG_NEED_FALLBACK); | ||||
| 	fallback = crypto_alloc_skcipher(alg, 0, | ||||
| 			CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK); | ||||
| 	if (IS_ERR(fallback)) { | ||||
| 		printk(KERN_ERR | ||||
| 			"Failed to allocate transformation for '%s': %ld\n", | ||||
| @ -58,11 +59,11 @@ static int p8_aes_xts_init(struct crypto_tfm *tfm) | ||||
| 		return PTR_ERR(fallback); | ||||
| 	} | ||||
| 	printk(KERN_INFO "Using '%s' as fallback implementation.\n", | ||||
| 		crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback)); | ||||
| 		crypto_skcipher_driver_name(fallback)); | ||||
| 
 | ||||
| 	crypto_blkcipher_set_flags( | ||||
| 	crypto_skcipher_set_flags( | ||||
| 		fallback, | ||||
| 		crypto_blkcipher_get_flags((struct crypto_blkcipher *)tfm)); | ||||
| 		crypto_skcipher_get_flags((struct crypto_skcipher *)tfm)); | ||||
| 	ctx->fallback = fallback; | ||||
| 
 | ||||
| 	return 0; | ||||
| @ -73,7 +74,7 @@ static void p8_aes_xts_exit(struct crypto_tfm *tfm) | ||||
| 	struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm); | ||||
| 
 | ||||
| 	if (ctx->fallback) { | ||||
| 		crypto_free_blkcipher(ctx->fallback); | ||||
| 		crypto_free_skcipher(ctx->fallback); | ||||
| 		ctx->fallback = NULL; | ||||
| 	} | ||||
| } | ||||
| @ -98,7 +99,7 @@ static int p8_aes_xts_setkey(struct crypto_tfm *tfm, const u8 *key, | ||||
| 	pagefault_enable(); | ||||
| 	preempt_enable(); | ||||
| 
 | ||||
| 	ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen); | ||||
| 	ret += crypto_skcipher_setkey(ctx->fallback, key, keylen); | ||||
| 	return ret; | ||||
| } | ||||
| 
 | ||||
| @ -113,15 +114,14 @@ static int p8_aes_xts_crypt(struct blkcipher_desc *desc, | ||||
| 	struct blkcipher_walk walk; | ||||
| 	struct p8_aes_xts_ctx *ctx = | ||||
| 		crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); | ||||
| 	struct blkcipher_desc fallback_desc = { | ||||
| 		.tfm = ctx->fallback, | ||||
| 		.info = desc->info, | ||||
| 		.flags = desc->flags | ||||
| 	}; | ||||
| 
 | ||||
| 	if (in_interrupt()) { | ||||
| 		ret = enc ? crypto_blkcipher_encrypt(&fallback_desc, dst, src, nbytes) : | ||||
|                             crypto_blkcipher_decrypt(&fallback_desc, dst, src, nbytes); | ||||
| 		SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback); | ||||
| 		skcipher_request_set_tfm(req, ctx->fallback); | ||||
| 		skcipher_request_set_callback(req, desc->flags, NULL, NULL); | ||||
| 		skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); | ||||
| 		ret = enc? crypto_skcipher_encrypt(req) : crypto_skcipher_decrypt(req); | ||||
| 		skcipher_request_zero(req); | ||||
| 	} else { | ||||
| 		preempt_disable(); | ||||
| 		pagefault_disable(); | ||||
|  | ||||
| @ -360,13 +360,18 @@ static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb, | ||||
| 	return crypto_attr_alg(tb[1], type, mask); | ||||
| } | ||||
| 
 | ||||
| static inline int crypto_requires_off(u32 type, u32 mask, u32 off) | ||||
| { | ||||
| 	return (type ^ off) & mask & off; | ||||
| } | ||||
| 
 | ||||
| /*
 | ||||
|  * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms. | ||||
|  * Otherwise returns zero. | ||||
|  */ | ||||
| static inline int crypto_requires_sync(u32 type, u32 mask) | ||||
| { | ||||
| 	return (type ^ CRYPTO_ALG_ASYNC) & mask & CRYPTO_ALG_ASYNC; | ||||
| 	return crypto_requires_off(type, mask, CRYPTO_ALG_ASYNC); | ||||
| } | ||||
| 
 | ||||
| noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size); | ||||
|  | ||||
		Loading…
	
		Reference in New Issue
	
	Block a user