diff options
author | Herbert Xu <herbert@gondor.apana.org.au> | 2023-09-14 16:28:24 +0800 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2023-09-20 13:15:29 +0800 |
commit | 31865c4c4db2b742fec6ccbff80483fa3e7ab9b9 (patch) | |
tree | c16772ec1fe97269b9b5c0267811f86ee035affc /crypto/skcipher.c | |
parent | b64d143b752932ef483d0ed8d00958f1832dd6bc (diff) |
crypto: skcipher - Add lskcipher
Add a new API type lskcipher designed for taking straight kernel
pointers instead of SG lists. Its relationship to skcipher will
be analogous to that between shash and ahash.
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto/skcipher.c')
-rw-r--r-- | crypto/skcipher.c | 75 |
1 files changed, 58 insertions, 17 deletions
diff --git a/crypto/skcipher.c b/crypto/skcipher.c index 7b275716cf4e..b9496dc8a609 100644 --- a/crypto/skcipher.c +++ b/crypto/skcipher.c @@ -24,8 +24,9 @@ #include <linux/slab.h> #include <linux/string.h> #include <net/netlink.h> +#include "skcipher.h" -#include "internal.h" +#define CRYPTO_ALG_TYPE_SKCIPHER_MASK 0x0000000e enum { SKCIPHER_WALK_PHYS = 1 << 0, @@ -43,6 +44,8 @@ struct skcipher_walk_buffer { u8 buffer[]; }; +static const struct crypto_type crypto_skcipher_type; + static int skcipher_walk_next(struct skcipher_walk *walk); static inline void skcipher_map_src(struct skcipher_walk *walk) @@ -89,11 +92,7 @@ static inline struct skcipher_alg *__crypto_skcipher_alg( static inline struct crypto_istat_cipher *skcipher_get_stat( struct skcipher_alg *alg) { -#ifdef CONFIG_CRYPTO_STATS - return &alg->stat; -#else - return NULL; -#endif + return skcipher_get_stat_common(&alg->co); } static inline int crypto_skcipher_errstat(struct skcipher_alg *alg, int err) @@ -468,6 +467,7 @@ static int skcipher_walk_skcipher(struct skcipher_walk *walk, struct skcipher_request *req) { struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); + struct skcipher_alg *alg = crypto_skcipher_alg(tfm); walk->total = req->cryptlen; walk->nbytes = 0; @@ -485,10 +485,14 @@ static int skcipher_walk_skcipher(struct skcipher_walk *walk, SKCIPHER_WALK_SLEEP : 0; walk->blocksize = crypto_skcipher_blocksize(tfm); - walk->stride = crypto_skcipher_walksize(tfm); walk->ivsize = crypto_skcipher_ivsize(tfm); walk->alignmask = crypto_skcipher_alignmask(tfm); + if (alg->co.base.cra_type != &crypto_skcipher_type) + walk->stride = alg->co.chunksize; + else + walk->stride = alg->walksize; + return skcipher_walk_first(walk); } @@ -616,6 +620,11 @@ int crypto_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key, unsigned long alignmask = crypto_skcipher_alignmask(tfm); int err; + if (cipher->co.base.cra_type != &crypto_skcipher_type) { + err = crypto_lskcipher_setkey_sg(tfm, key, keylen); + goto out; + } + if (keylen < cipher->min_keysize || keylen > cipher->max_keysize) return -EINVAL; @@ -624,6 +633,7 @@ int crypto_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key, else err = cipher->setkey(tfm, key, keylen); +out: if (unlikely(err)) { skcipher_set_needkey(tfm); return err; @@ -649,6 +659,8 @@ int crypto_skcipher_encrypt(struct skcipher_request *req) if (crypto_skcipher_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) ret = -ENOKEY; + else if (alg->co.base.cra_type != &crypto_skcipher_type) + ret = crypto_lskcipher_encrypt_sg(req); else ret = alg->encrypt(req); @@ -671,6 +683,8 @@ int crypto_skcipher_decrypt(struct skcipher_request *req) if (crypto_skcipher_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) ret = -ENOKEY; + else if (alg->co.base.cra_type != &crypto_skcipher_type) + ret = crypto_lskcipher_decrypt_sg(req); else ret = alg->decrypt(req); @@ -693,6 +707,9 @@ static int crypto_skcipher_init_tfm(struct crypto_tfm *tfm) skcipher_set_needkey(skcipher); + if (tfm->__crt_alg->cra_type != &crypto_skcipher_type) + return crypto_init_lskcipher_ops_sg(tfm); + if (alg->exit) skcipher->base.exit = crypto_skcipher_exit_tfm; @@ -702,6 +719,14 @@ static int crypto_skcipher_init_tfm(struct crypto_tfm *tfm) return 0; } +static unsigned int crypto_skcipher_extsize(struct crypto_alg *alg) +{ + if (alg->cra_type != &crypto_skcipher_type) + return sizeof(struct crypto_lskcipher *); + + return crypto_alg_extsize(alg); +} + static void crypto_skcipher_free_instance(struct crypto_instance *inst) { struct skcipher_instance *skcipher = @@ -770,7 +795,7 @@ static int __maybe_unused crypto_skcipher_report_stat( } static const struct crypto_type crypto_skcipher_type = { - .extsize = crypto_alg_extsize, + .extsize = crypto_skcipher_extsize, .init_tfm = crypto_skcipher_init_tfm, .free = crypto_skcipher_free_instance, #ifdef CONFIG_PROC_FS @@ -783,7 +808,7 @@ static const struct crypto_type crypto_skcipher_type = { .report_stat = crypto_skcipher_report_stat, #endif .maskclear = ~CRYPTO_ALG_TYPE_MASK, - .maskset = CRYPTO_ALG_TYPE_MASK, + .maskset = CRYPTO_ALG_TYPE_SKCIPHER_MASK, .type = CRYPTO_ALG_TYPE_SKCIPHER, .tfmsize = offsetof(struct crypto_skcipher, base), }; @@ -834,23 +859,18 @@ int crypto_has_skcipher(const char *alg_name, u32 type, u32 mask) } EXPORT_SYMBOL_GPL(crypto_has_skcipher); -static int skcipher_prepare_alg(struct skcipher_alg *alg) +int skcipher_prepare_alg_common(struct skcipher_alg_common *alg) { - struct crypto_istat_cipher *istat = skcipher_get_stat(alg); + struct crypto_istat_cipher *istat = skcipher_get_stat_common(alg); struct crypto_alg *base = &alg->base; - if (alg->ivsize > PAGE_SIZE / 8 || alg->chunksize > PAGE_SIZE / 8 || - alg->walksize > PAGE_SIZE / 8) + if (alg->ivsize > PAGE_SIZE / 8 || alg->chunksize > PAGE_SIZE / 8) return -EINVAL; if (!alg->chunksize) alg->chunksize = base->cra_blocksize; - if (!alg->walksize) - alg->walksize = alg->chunksize; - base->cra_type = &crypto_skcipher_type; base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; - base->cra_flags |= CRYPTO_ALG_TYPE_SKCIPHER; if (IS_ENABLED(CONFIG_CRYPTO_STATS)) memset(istat, 0, sizeof(*istat)); @@ -858,6 +878,27 @@ static int skcipher_prepare_alg(struct skcipher_alg *alg) return 0; } +static int skcipher_prepare_alg(struct skcipher_alg *alg) +{ + struct crypto_alg *base = &alg->base; + int err; + + err = skcipher_prepare_alg_common(&alg->co); + if (err) + return err; + + if (alg->walksize > PAGE_SIZE / 8) + return -EINVAL; + + if (!alg->walksize) + alg->walksize = alg->chunksize; + + base->cra_type = &crypto_skcipher_type; + base->cra_flags |= CRYPTO_ALG_TYPE_SKCIPHER; + + return 0; +} + int crypto_register_skcipher(struct skcipher_alg *alg) { struct crypto_alg *base = &alg->base; |