diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2015-08-31 17:38:39 -0700 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2015-08-31 17:38:39 -0700 |
commit | d4c90396ed7ef9b4e4d221e008e54be8bea8307f (patch) | |
tree | 5611f1f27eec16edfeb6a3fd73a8ef7dbfd037b4 /arch/arm64 | |
parent | f36fc04e4cdda9e4c72ee504e7dc638f9a168863 (diff) | |
parent | bf433416e67597ba105ece55b3136557874945db (diff) |
Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto updates from Herbert Xu:
"Here is the crypto update for 4.3:
API:
- the AEAD interface transition is now complete.
- add top-level skcipher interface.
Drivers:
- x86-64 acceleration for chacha20/poly1305.
- add sunxi-ss Allwinner Security System crypto accelerator.
- add RSA algorithm to qat driver.
- add SRIOV support to qat driver.
- add LS1021A support to caam.
- add i.MX6 support to caam"
* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (163 commits)
crypto: algif_aead - fix for multiple operations on AF_ALG sockets
crypto: qat - enable legacy VFs
MPI: Fix mpi_read_buffer
crypto: qat - silence a static checker warning
crypto: vmx - Fixing opcode issue
crypto: caam - Use the preferred style for memory allocations
crypto: caam - Propagate the real error code in caam_probe
crypto: caam - Fix the error handling in caam_probe
crypto: caam - fix writing to JQCR_MS when using service interface
crypto: hash - Add AHASH_REQUEST_ON_STACK
crypto: testmgr - Use new skcipher interface
crypto: skcipher - Add top-level skcipher interface
crypto: cmac - allow usage in FIPS mode
crypto: sahara - Use dmam_alloc_coherent
crypto: caam - Add support for LS1021A
crypto: qat - Don't move data inside output buffer
crypto: vmx - Fixing GHASH Key issue on little endian
crypto: vmx - Fixing AES-CTR counter bug
crypto: null - Add missing Kconfig tristate for NULL2
crypto: nx - Add forward declaration for struct crypto_aead
...
Diffstat (limited to 'arch/arm64')
-rw-r--r-- | arch/arm64/crypto/aes-ce-ccm-glue.c | 68 |
1 files changed, 42 insertions, 26 deletions
diff --git a/arch/arm64/crypto/aes-ce-ccm-glue.c b/arch/arm64/crypto/aes-ce-ccm-glue.c index 3303e8a7b837..f4bf2f2a014c 100644 --- a/arch/arm64/crypto/aes-ce-ccm-glue.c +++ b/arch/arm64/crypto/aes-ce-ccm-glue.c @@ -124,7 +124,7 @@ static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[]) ce_aes_ccm_auth_data(mac, (u8 *)<ag, ltag.len, &macp, ctx->key_enc, num_rounds(ctx)); - scatterwalk_start(&walk, req->assoc); + scatterwalk_start(&walk, req->src); do { u32 n = scatterwalk_clamp(&walk, len); @@ -151,6 +151,10 @@ static int ccm_encrypt(struct aead_request *req) struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead); struct blkcipher_desc desc = { .info = req->iv }; struct blkcipher_walk walk; + struct scatterlist srcbuf[2]; + struct scatterlist dstbuf[2]; + struct scatterlist *src; + struct scatterlist *dst; u8 __aligned(8) mac[AES_BLOCK_SIZE]; u8 buf[AES_BLOCK_SIZE]; u32 len = req->cryptlen; @@ -168,7 +172,12 @@ static int ccm_encrypt(struct aead_request *req) /* preserve the original iv for the final round */ memcpy(buf, req->iv, AES_BLOCK_SIZE); - blkcipher_walk_init(&walk, req->dst, req->src, len); + src = scatterwalk_ffwd(srcbuf, req->src, req->assoclen); + dst = src; + if (req->src != req->dst) + dst = scatterwalk_ffwd(dstbuf, req->dst, req->assoclen); + + blkcipher_walk_init(&walk, dst, src, len); err = blkcipher_aead_walk_virt_block(&desc, &walk, aead, AES_BLOCK_SIZE); @@ -194,7 +203,7 @@ static int ccm_encrypt(struct aead_request *req) return err; /* copy authtag to end of dst */ - scatterwalk_map_and_copy(mac, req->dst, req->cryptlen, + scatterwalk_map_and_copy(mac, dst, req->cryptlen, crypto_aead_authsize(aead), 1); return 0; @@ -207,6 +216,10 @@ static int ccm_decrypt(struct aead_request *req) unsigned int authsize = crypto_aead_authsize(aead); struct blkcipher_desc desc = { .info = req->iv }; struct blkcipher_walk walk; + struct scatterlist srcbuf[2]; + struct scatterlist dstbuf[2]; + struct scatterlist *src; + struct scatterlist *dst; u8 __aligned(8) mac[AES_BLOCK_SIZE]; u8 buf[AES_BLOCK_SIZE]; u32 len = req->cryptlen - authsize; @@ -224,7 +237,12 @@ static int ccm_decrypt(struct aead_request *req) /* preserve the original iv for the final round */ memcpy(buf, req->iv, AES_BLOCK_SIZE); - blkcipher_walk_init(&walk, req->dst, req->src, len); + src = scatterwalk_ffwd(srcbuf, req->src, req->assoclen); + dst = src; + if (req->src != req->dst) + dst = scatterwalk_ffwd(dstbuf, req->dst, req->assoclen); + + blkcipher_walk_init(&walk, dst, src, len); err = blkcipher_aead_walk_virt_block(&desc, &walk, aead, AES_BLOCK_SIZE); @@ -250,44 +268,42 @@ static int ccm_decrypt(struct aead_request *req) return err; /* compare calculated auth tag with the stored one */ - scatterwalk_map_and_copy(buf, req->src, req->cryptlen - authsize, + scatterwalk_map_and_copy(buf, src, req->cryptlen - authsize, authsize, 0); - if (memcmp(mac, buf, authsize)) + if (crypto_memneq(mac, buf, authsize)) return -EBADMSG; return 0; } -static struct crypto_alg ccm_aes_alg = { - .cra_name = "ccm(aes)", - .cra_driver_name = "ccm-aes-ce", - .cra_priority = 300, - .cra_flags = CRYPTO_ALG_TYPE_AEAD, - .cra_blocksize = 1, - .cra_ctxsize = sizeof(struct crypto_aes_ctx), - .cra_alignmask = 7, - .cra_type = &crypto_aead_type, - .cra_module = THIS_MODULE, - .cra_aead = { - .ivsize = AES_BLOCK_SIZE, - .maxauthsize = AES_BLOCK_SIZE, - .setkey = ccm_setkey, - .setauthsize = ccm_setauthsize, - .encrypt = ccm_encrypt, - .decrypt = ccm_decrypt, - } +static struct aead_alg ccm_aes_alg = { + .base = { + .cra_name = "ccm(aes)", + .cra_driver_name = "ccm-aes-ce", + .cra_priority = 300, + .cra_blocksize = 1, + .cra_ctxsize = sizeof(struct crypto_aes_ctx), + .cra_alignmask = 7, + .cra_module = THIS_MODULE, + }, + .ivsize = AES_BLOCK_SIZE, + .maxauthsize = AES_BLOCK_SIZE, + .setkey = ccm_setkey, + .setauthsize = ccm_setauthsize, + .encrypt = ccm_encrypt, + .decrypt = ccm_decrypt, }; static int __init aes_mod_init(void) { if (!(elf_hwcap & HWCAP_AES)) return -ENODEV; - return crypto_register_alg(&ccm_aes_alg); + return crypto_register_aead(&ccm_aes_alg); } static void __exit aes_mod_exit(void) { - crypto_unregister_alg(&ccm_aes_alg); + crypto_unregister_aead(&ccm_aes_alg); } module_init(aes_mod_init); |