From f2ac72e8268d9559c3114d5a22679f91f80a2238 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Tue, 7 Jul 2009 12:30:33 +0800 Subject: crypto: api - Add new template create function This patch introduces the template->create function intended to replace the existing alloc function. The intention is for create to handle the registration directly, whereas currently the caller of alloc has to handle the registration. This allows type-specific code to be run prior to registration. Signed-off-by: Herbert Xu --- include/crypto/algapi.h | 1 + 1 file changed, 1 insertion(+) (limited to 'include/crypto') diff --git a/include/crypto/algapi.h b/include/crypto/algapi.h index 010545436efa..ce010a346420 100644 --- a/include/crypto/algapi.h +++ b/include/crypto/algapi.h @@ -52,6 +52,7 @@ struct crypto_template { struct crypto_instance *(*alloc)(struct rtattr **tb); void (*free)(struct crypto_instance *inst); + int (*create)(struct crypto_template *tmpl, struct rtattr **tb); char name[CRYPTO_MAX_ALG_NAME]; }; -- cgit v1.2.3-58-ga151 From 70ec7bb91ad0d6cce84c8e17f8cbb608dda7b18c Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Tue, 7 Jul 2009 14:07:37 +0800 Subject: crypto: api - Add crypto_alloc_instance2 This patch adds a new argument to crypto_alloc_instance which sets aside some space before the instance for use by algorithms such as shash that place type-specific data before crypto_alg. For compatibility the function has been renamed so that existing users aren't affected. Signed-off-by: Herbert Xu --- crypto/algapi.c | 37 +++++++++++++++++++++++++++++++------ include/crypto/algapi.h | 2 ++ 2 files changed, 33 insertions(+), 6 deletions(-) (limited to 'include/crypto') diff --git a/crypto/algapi.c b/crypto/algapi.c index 56c62e2858d5..429f1a003b06 100644 --- a/crypto/algapi.c +++ b/crypto/algapi.c @@ -627,17 +627,20 @@ int crypto_attr_u32(struct rtattr *rta, u32 *num) } EXPORT_SYMBOL_GPL(crypto_attr_u32); -struct crypto_instance *crypto_alloc_instance(const char *name, - struct crypto_alg *alg) +void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg, + unsigned int head) { struct crypto_instance *inst; - struct crypto_spawn *spawn; + char *p; int err; - inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); - if (!inst) + p = kzalloc(head + sizeof(*inst) + sizeof(struct crypto_spawn), + GFP_KERNEL); + if (!p) return ERR_PTR(-ENOMEM); + inst = (void *)(p + head); + err = -ENAMETOOLONG; if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name, alg->cra_name) >= CRYPTO_MAX_ALG_NAME) @@ -647,6 +650,25 @@ struct crypto_instance *crypto_alloc_instance(const char *name, name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) goto err_free_inst; + return p; + +err_free_inst: + kfree(p); + return ERR_PTR(err); +} +EXPORT_SYMBOL_GPL(crypto_alloc_instance2); + +struct crypto_instance *crypto_alloc_instance(const char *name, + struct crypto_alg *alg) +{ + struct crypto_instance *inst; + struct crypto_spawn *spawn; + int err; + + inst = crypto_alloc_instance2(name, alg, 0); + if (IS_ERR(inst)) + goto out; + spawn = crypto_instance_ctx(inst); err = crypto_init_spawn(spawn, alg, inst, CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); @@ -658,7 +680,10 @@ struct crypto_instance *crypto_alloc_instance(const char *name, err_free_inst: kfree(inst); - return ERR_PTR(err); + inst = ERR_PTR(err); + +out: + return inst; } EXPORT_SYMBOL_GPL(crypto_alloc_instance); diff --git a/include/crypto/algapi.h b/include/crypto/algapi.h index ce010a346420..99bb29723130 100644 --- a/include/crypto/algapi.h +++ b/include/crypto/algapi.h @@ -132,6 +132,8 @@ int crypto_check_attr_type(struct rtattr **tb, u32 type); const char *crypto_attr_alg_name(struct rtattr *rta); struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask); int crypto_attr_u32(struct rtattr *rta, u32 *num); +void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg, + unsigned int head); struct crypto_instance *crypto_alloc_instance(const char *name, struct crypto_alg *alg); -- cgit v1.2.3-58-ga151 From 2e4fddd8e420e8f531a34e7a97f9cdb851a6ad13 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Tue, 7 Jul 2009 15:17:12 +0800 Subject: crypto: shash - Add shash_instance This patch adds shash_instance and the associated alloc/free functions. This is meant to be an instance that with a shash algorithm under it. Note that the instance itself doesn't have to be shash. Signed-off-by: Herbert Xu --- crypto/shash.c | 7 +++++++ include/crypto/internal/hash.h | 26 ++++++++++++++++++++++++++ 2 files changed, 33 insertions(+) (limited to 'include/crypto') diff --git a/crypto/shash.c b/crypto/shash.c index 2ccc8b0076ce..8f9d8831e293 100644 --- a/crypto/shash.c +++ b/crypto/shash.c @@ -502,5 +502,12 @@ int crypto_unregister_shash(struct shash_alg *alg) } EXPORT_SYMBOL_GPL(crypto_unregister_shash); +void shash_free_instance(struct crypto_instance *inst) +{ + crypto_drop_spawn(crypto_instance_ctx(inst)); + kfree(shash_instance(inst)); +} +EXPORT_SYMBOL_GPL(shash_free_instance); + MODULE_LICENSE("GPL"); MODULE_DESCRIPTION("Synchronous cryptographic hash type"); diff --git a/include/crypto/internal/hash.h b/include/crypto/internal/hash.h index 82b70564bcab..44d3bcdce6e2 100644 --- a/include/crypto/internal/hash.h +++ b/include/crypto/internal/hash.h @@ -34,6 +34,10 @@ struct crypto_hash_walk { unsigned int flags; }; +struct shash_instance { + struct shash_alg alg; +}; + extern const struct crypto_type crypto_ahash_type; int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err); @@ -46,6 +50,8 @@ int crypto_hash_walk_first_compat(struct hash_desc *hdesc, int crypto_register_shash(struct shash_alg *alg); int crypto_unregister_shash(struct shash_alg *alg); +void shash_free_instance(struct crypto_instance *inst); + static inline void *crypto_ahash_ctx(struct crypto_ahash *tfm) { return crypto_tfm_ctx(&tfm->base); @@ -80,5 +86,25 @@ static inline void *crypto_shash_ctx(struct crypto_shash *tfm) return crypto_tfm_ctx(&tfm->base); } +static inline struct crypto_instance *shash_crypto_instance( + struct shash_instance *inst) +{ + return container_of(&inst->alg.base, struct crypto_instance, alg); +} + +static inline struct shash_instance *shash_instance( + struct crypto_instance *inst) +{ + return container_of(__crypto_shash_alg(&inst->alg), + struct shash_instance, alg); +} + +static inline struct shash_instance *shash_alloc_instance( + const char *name, struct crypto_alg *alg) +{ + return crypto_alloc_instance2(name, alg, + sizeof(struct shash_alg) - sizeof(*alg)); +} + #endif /* _CRYPTO_INTERNAL_HASH_H */ -- cgit v1.2.3-58-ga151 From 97eedce1a64a57648ac5e39f03825528c47ba72e Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Wed, 8 Jul 2009 15:55:52 +0800 Subject: crypto: api - Add new style spawn support This patch modifies the spawn infrastructure to support new style algorithms like shash. In particular, this means storing the frontend type in the spawn and using crypto_create_tfm to allocate the tfm. Signed-off-by: Herbert Xu --- crypto/algapi.c | 55 ++++++++++++++++++++++++++++++++++++++++++++++--- include/crypto/algapi.h | 6 ++++++ 2 files changed, 58 insertions(+), 3 deletions(-) (limited to 'include/crypto') diff --git a/crypto/algapi.c b/crypto/algapi.c index 429f1a003b06..17a5fff8f777 100644 --- a/crypto/algapi.c +++ b/crypto/algapi.c @@ -488,6 +488,23 @@ int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg, } EXPORT_SYMBOL_GPL(crypto_init_spawn); +int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg, + struct crypto_instance *inst, + const struct crypto_type *frontend) +{ + int err = -EINVAL; + + if (frontend && (alg->cra_flags ^ frontend->type) & frontend->maskset) + goto out; + + spawn->frontend = frontend; + err = crypto_init_spawn(spawn, alg, inst, frontend->maskset); + +out: + return err; +} +EXPORT_SYMBOL_GPL(crypto_init_spawn2); + void crypto_drop_spawn(struct crypto_spawn *spawn) { down_write(&crypto_alg_sem); @@ -496,12 +513,10 @@ void crypto_drop_spawn(struct crypto_spawn *spawn) } EXPORT_SYMBOL_GPL(crypto_drop_spawn); -struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, - u32 mask) +static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn) { struct crypto_alg *alg; struct crypto_alg *alg2; - struct crypto_tfm *tfm; down_read(&crypto_alg_sem); alg = spawn->alg; @@ -516,6 +531,19 @@ struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, return ERR_PTR(-EAGAIN); } + return alg; +} + +struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, + u32 mask) +{ + struct crypto_alg *alg; + struct crypto_tfm *tfm; + + alg = crypto_spawn_alg(spawn); + if (IS_ERR(alg)) + return ERR_CAST(alg); + tfm = ERR_PTR(-EINVAL); if (unlikely((alg->cra_flags ^ type) & mask)) goto out_put_alg; @@ -532,6 +560,27 @@ out_put_alg: } EXPORT_SYMBOL_GPL(crypto_spawn_tfm); +void *crypto_spawn_tfm2(struct crypto_spawn *spawn) +{ + struct crypto_alg *alg; + struct crypto_tfm *tfm; + + alg = crypto_spawn_alg(spawn); + if (IS_ERR(alg)) + return ERR_CAST(alg); + + tfm = crypto_create_tfm(alg, spawn->frontend); + if (IS_ERR(tfm)) + goto out_put_alg; + + return tfm; + +out_put_alg: + crypto_mod_put(alg); + return tfm; +} +EXPORT_SYMBOL_GPL(crypto_spawn_tfm2); + int crypto_register_notifier(struct notifier_block *nb) { return blocking_notifier_chain_register(&crypto_chain, nb); diff --git a/include/crypto/algapi.h b/include/crypto/algapi.h index 99bb29723130..c9bff92a9e0e 100644 --- a/include/crypto/algapi.h +++ b/include/crypto/algapi.h @@ -61,6 +61,7 @@ struct crypto_spawn { struct list_head list; struct crypto_alg *alg; struct crypto_instance *inst; + const struct crypto_type *frontend; u32 mask; }; @@ -117,9 +118,14 @@ struct crypto_template *crypto_lookup_template(const char *name); int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg, struct crypto_instance *inst, u32 mask); +int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg, + struct crypto_instance *inst, + const struct crypto_type *frontend); + void crypto_drop_spawn(struct crypto_spawn *spawn); struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, u32 mask); +void *crypto_spawn_tfm2(struct crypto_spawn *spawn); static inline void crypto_set_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst) -- cgit v1.2.3-58-ga151 From 942969992d86330c9700e2cd9afe8a6bea42df78 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Wed, 8 Jul 2009 17:21:37 +0800 Subject: crypto: shash - Add spawn support This patch adds the functions needed to create and use shash spawns, i.e., to use shash algorithms in a template. Signed-off-by: Herbert Xu --- crypto/shash.c | 9 +++++++++ include/crypto/internal/hash.h | 14 ++++++++++++++ 2 files changed, 23 insertions(+) (limited to 'include/crypto') diff --git a/crypto/shash.c b/crypto/shash.c index 8f9d8831e293..97f6c8ba103a 100644 --- a/crypto/shash.c +++ b/crypto/shash.c @@ -509,5 +509,14 @@ void shash_free_instance(struct crypto_instance *inst) } EXPORT_SYMBOL_GPL(shash_free_instance); +int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn, + struct shash_alg *alg, + struct crypto_instance *inst) +{ + return crypto_init_spawn2(&spawn->base, &alg->base, inst, + &crypto_shash_type); +} +EXPORT_SYMBOL_GPL(crypto_init_shash_spawn); + MODULE_LICENSE("GPL"); MODULE_DESCRIPTION("Synchronous cryptographic hash type"); diff --git a/include/crypto/internal/hash.h b/include/crypto/internal/hash.h index 44d3bcdce6e2..6cc824b79331 100644 --- a/include/crypto/internal/hash.h +++ b/include/crypto/internal/hash.h @@ -38,6 +38,10 @@ struct shash_instance { struct shash_alg alg; }; +struct crypto_shash_spawn { + struct crypto_spawn base; +}; + extern const struct crypto_type crypto_ahash_type; int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err); @@ -52,6 +56,10 @@ int crypto_unregister_shash(struct shash_alg *alg); void shash_free_instance(struct crypto_instance *inst); +int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn, + struct shash_alg *alg, + struct crypto_instance *inst); + static inline void *crypto_ahash_ctx(struct crypto_ahash *tfm) { return crypto_tfm_ctx(&tfm->base); @@ -106,5 +114,11 @@ static inline struct shash_instance *shash_alloc_instance( sizeof(struct shash_alg) - sizeof(*alg)); } +static inline struct crypto_shash *crypto_spawn_shash( + struct crypto_shash_spawn *spawn) +{ + return crypto_spawn_tfm2(&spawn->base); +} + #endif /* _CRYPTO_INTERNAL_HASH_H */ -- cgit v1.2.3-58-ga151 From d06854f0243d91badabaab14503f7f3bb770061d Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Wed, 8 Jul 2009 17:53:16 +0800 Subject: crypto: api - Add crypto_attr_alg2 helper This patch adds the helper crypto_attr_alg2 which is similar to crypto_attr_alg but takes an extra frontend argument. This is intended to be used by new style algorithm types such as shash. Signed-off-by: Herbert Xu --- crypto/algapi.c | 8 +++++--- crypto/api.c | 31 ++++++++++++++++++++++--------- crypto/internal.h | 3 +++ include/crypto/algapi.h | 11 ++++++++++- 4 files changed, 40 insertions(+), 13 deletions(-) (limited to 'include/crypto') diff --git a/crypto/algapi.c b/crypto/algapi.c index 17a5fff8f777..2154815201a7 100644 --- a/crypto/algapi.c +++ b/crypto/algapi.c @@ -644,7 +644,9 @@ const char *crypto_attr_alg_name(struct rtattr *rta) } EXPORT_SYMBOL_GPL(crypto_attr_alg_name); -struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask) +struct crypto_alg *crypto_attr_alg2(struct rtattr *rta, + const struct crypto_type *frontend, + u32 type, u32 mask) { const char *name; int err; @@ -654,9 +656,9 @@ struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask) if (IS_ERR(name)) return ERR_PTR(err); - return crypto_alg_mod_lookup(name, type, mask); + return crypto_find_alg(name, frontend, type, mask); } -EXPORT_SYMBOL_GPL(crypto_attr_alg); +EXPORT_SYMBOL_GPL(crypto_attr_alg2); int crypto_attr_u32(struct rtattr *rta, u32 *num) { diff --git a/crypto/api.c b/crypto/api.c index d5944f92b416..ba81221b3bef 100644 --- a/crypto/api.c +++ b/crypto/api.c @@ -503,6 +503,27 @@ out: } EXPORT_SYMBOL_GPL(crypto_create_tfm); +struct crypto_alg *crypto_find_alg(const char *alg_name, + const struct crypto_type *frontend, + u32 type, u32 mask) +{ + struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) = + crypto_alg_mod_lookup; + + if (frontend) { + type &= frontend->maskclear; + mask &= frontend->maskclear; + type |= frontend->type; + mask |= frontend->maskset; + + if (frontend->lookup) + lookup = frontend->lookup; + } + + return lookup(alg_name, type, mask); +} +EXPORT_SYMBOL_GPL(crypto_find_alg); + /* * crypto_alloc_tfm - Locate algorithm and allocate transform * @alg_name: Name of algorithm @@ -526,21 +547,13 @@ EXPORT_SYMBOL_GPL(crypto_create_tfm); void *crypto_alloc_tfm(const char *alg_name, const struct crypto_type *frontend, u32 type, u32 mask) { - struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask); void *tfm; int err; - type &= frontend->maskclear; - mask &= frontend->maskclear; - type |= frontend->type; - mask |= frontend->maskset; - - lookup = frontend->lookup ?: crypto_alg_mod_lookup; - for (;;) { struct crypto_alg *alg; - alg = lookup(alg_name, type, mask); + alg = crypto_find_alg(alg_name, frontend, type, mask); if (IS_ERR(alg)) { err = PTR_ERR(alg); goto err; diff --git a/crypto/internal.h b/crypto/internal.h index 95baaea21fbc..7efa4d0533ff 100644 --- a/crypto/internal.h +++ b/crypto/internal.h @@ -106,6 +106,9 @@ struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type, u32 mask); void *crypto_create_tfm(struct crypto_alg *alg, const struct crypto_type *frontend); +struct crypto_alg *crypto_find_alg(const char *alg_name, + const struct crypto_type *frontend, + u32 type, u32 mask); void *crypto_alloc_tfm(const char *alg_name, const struct crypto_type *frontend, u32 type, u32 mask); diff --git a/include/crypto/algapi.h b/include/crypto/algapi.h index c9bff92a9e0e..1d15a926041e 100644 --- a/include/crypto/algapi.h +++ b/include/crypto/algapi.h @@ -136,7 +136,16 @@ static inline void crypto_set_spawn(struct crypto_spawn *spawn, struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb); int crypto_check_attr_type(struct rtattr **tb, u32 type); const char *crypto_attr_alg_name(struct rtattr *rta); -struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask); +struct crypto_alg *crypto_attr_alg2(struct rtattr *rta, + const struct crypto_type *frontend, + u32 type, u32 mask); + +static inline struct crypto_alg *crypto_attr_alg(struct rtattr *rta, + u32 type, u32 mask) +{ + return crypto_attr_alg2(rta, NULL, type, mask); +} + int crypto_attr_u32(struct rtattr *rta, u32 *num); void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg, unsigned int head); -- cgit v1.2.3-58-ga151 From 7d6f56400a695af497a8b7c23ea0ff9c3d9d99f4 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Wed, 8 Jul 2009 17:56:28 +0800 Subject: crypto: shash - Add shash_attr_alg2 helper This patch adds the helper shash_attr_alg2 which locates a shash algorithm based on the information in the given attribute. Signed-off-by: Herbert Xu --- crypto/shash.c | 10 ++++++++++ include/crypto/internal/hash.h | 2 ++ 2 files changed, 12 insertions(+) (limited to 'include/crypto') diff --git a/crypto/shash.c b/crypto/shash.c index 97f6c8ba103a..21bcff6be5b0 100644 --- a/crypto/shash.c +++ b/crypto/shash.c @@ -518,5 +518,15 @@ int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn, } EXPORT_SYMBOL_GPL(crypto_init_shash_spawn); +struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask) +{ + struct crypto_alg *alg; + + alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask); + return IS_ERR(alg) ? ERR_CAST(alg) : + container_of(alg, struct shash_alg, base); +} +EXPORT_SYMBOL_GPL(shash_attr_alg); + MODULE_LICENSE("GPL"); MODULE_DESCRIPTION("Synchronous cryptographic hash type"); diff --git a/include/crypto/internal/hash.h b/include/crypto/internal/hash.h index 6cc824b79331..e2ab35a74c62 100644 --- a/include/crypto/internal/hash.h +++ b/include/crypto/internal/hash.h @@ -60,6 +60,8 @@ int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn, struct shash_alg *alg, struct crypto_instance *inst); +struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask); + static inline void *crypto_ahash_ctx(struct crypto_ahash *tfm) { return crypto_tfm_ctx(&tfm->base); -- cgit v1.2.3-58-ga151 From 619a6ebd2547f3a8ec2fbc5245daaa1f2056eb32 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Wed, 8 Jul 2009 18:46:23 +0800 Subject: crypto: shash - Add shash_register_instance This patch adds shash_register_instance so that shash instances can be registered without bypassing the shash checks applied to normal algorithms. Signed-off-by: Herbert Xu --- crypto/shash.c | 26 +++++++++++++++++++++++++- include/crypto/internal/hash.h | 3 ++- 2 files changed, 27 insertions(+), 2 deletions(-) (limited to 'include/crypto') diff --git a/crypto/shash.c b/crypto/shash.c index 21bcff6be5b0..3d242425d692 100644 --- a/crypto/shash.c +++ b/crypto/shash.c @@ -480,7 +480,7 @@ struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type, } EXPORT_SYMBOL_GPL(crypto_alloc_shash); -int crypto_register_shash(struct shash_alg *alg) +static int shash_prepare_alg(struct shash_alg *alg) { struct crypto_alg *base = &alg->base; @@ -491,6 +491,17 @@ int crypto_register_shash(struct shash_alg *alg) base->cra_type = &crypto_shash_type; base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; base->cra_flags |= CRYPTO_ALG_TYPE_SHASH; + return 0; +} + +int crypto_register_shash(struct shash_alg *alg) +{ + struct crypto_alg *base = &alg->base; + int err; + + err = shash_prepare_alg(alg); + if (err) + return err; return crypto_register_alg(base); } @@ -502,6 +513,19 @@ int crypto_unregister_shash(struct shash_alg *alg) } EXPORT_SYMBOL_GPL(crypto_unregister_shash); +int shash_register_instance(struct crypto_template *tmpl, + struct shash_instance *inst) +{ + int err; + + err = shash_prepare_alg(&inst->alg); + if (err) + return err; + + return crypto_register_instance(tmpl, shash_crypto_instance(inst)); +} +EXPORT_SYMBOL_GPL(shash_register_instance); + void shash_free_instance(struct crypto_instance *inst) { crypto_drop_spawn(crypto_instance_ctx(inst)); diff --git a/include/crypto/internal/hash.h b/include/crypto/internal/hash.h index e2ab35a74c62..fa5c9fb7ce5a 100644 --- a/include/crypto/internal/hash.h +++ b/include/crypto/internal/hash.h @@ -53,7 +53,8 @@ int crypto_hash_walk_first_compat(struct hash_desc *hdesc, int crypto_register_shash(struct shash_alg *alg); int crypto_unregister_shash(struct shash_alg *alg); - +int shash_register_instance(struct crypto_template *tmpl, + struct shash_instance *inst); void shash_free_instance(struct crypto_instance *inst); int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn, -- cgit v1.2.3-58-ga151 From cde6263fa954dfc03ebe169aa3f7f71176d7901b Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Wed, 8 Jul 2009 22:32:07 +0800 Subject: crypto: shash - Add crypto_shash_ctx_aligned This patch adds crypto_shash_ctx_aligned which will be needed by hmac after its conversion to shash. Signed-off-by: Herbert Xu --- include/crypto/internal/hash.h | 5 +++++ 1 file changed, 5 insertions(+) (limited to 'include/crypto') diff --git a/include/crypto/internal/hash.h b/include/crypto/internal/hash.h index fa5c9fb7ce5a..f1041140d3d9 100644 --- a/include/crypto/internal/hash.h +++ b/include/crypto/internal/hash.h @@ -123,5 +123,10 @@ static inline struct crypto_shash *crypto_spawn_shash( return crypto_spawn_tfm2(&spawn->base); } +static inline void *crypto_shash_ctx_aligned(struct crypto_shash *tfm) +{ + return crypto_tfm_ctx_aligned(&tfm->base); +} + #endif /* _CRYPTO_INTERNAL_HASH_H */ -- cgit v1.2.3-58-ga151 From 0390e6aecf571ddac934e6c0644bb4038167b698 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Wed, 8 Jul 2009 22:36:36 +0800 Subject: crypto: shash - Add __crypto_shash_cast This patch adds __crypto_shash_cast which turns a crypto_tfm into crypto_shash. It's analogous to the other __crypto_*_cast functions. It hasn't been needed until now since no existing shash algorithms have had an init function. Signed-off-by: Herbert Xu --- include/crypto/internal/hash.h | 5 +++++ 1 file changed, 5 insertions(+) (limited to 'include/crypto') diff --git a/include/crypto/internal/hash.h b/include/crypto/internal/hash.h index f1041140d3d9..3af34ca8e8bc 100644 --- a/include/crypto/internal/hash.h +++ b/include/crypto/internal/hash.h @@ -128,5 +128,10 @@ static inline void *crypto_shash_ctx_aligned(struct crypto_shash *tfm) return crypto_tfm_ctx_aligned(&tfm->base); } +static inline struct crypto_shash *__crypto_shash_cast(struct crypto_tfm *tfm) +{ + return (struct crypto_shash *)tfm; +} + #endif /* _CRYPTO_INTERNAL_HASH_H */ -- cgit v1.2.3-58-ga151 From ef5d590c9b9a00b017683006fe4cf959b9532336 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Thu, 9 Jul 2009 11:32:55 +0800 Subject: crypto: shash - Add shash_instance_ctx This patch adds the helper shash_instance_ctx which is the shash analogue of crypto_instance_ctx. Signed-off-by: Herbert Xu --- include/crypto/internal/hash.h | 5 +++++ 1 file changed, 5 insertions(+) (limited to 'include/crypto') diff --git a/include/crypto/internal/hash.h b/include/crypto/internal/hash.h index 3af34ca8e8bc..069b93e1a8ec 100644 --- a/include/crypto/internal/hash.h +++ b/include/crypto/internal/hash.h @@ -110,6 +110,11 @@ static inline struct shash_instance *shash_instance( struct shash_instance, alg); } +static inline void *shash_instance_ctx(struct shash_instance *inst) +{ + return crypto_instance_ctx(shash_crypto_instance(inst)); +} + static inline struct shash_instance *shash_alloc_instance( const char *name, struct crypto_alg *alg) { -- cgit v1.2.3-58-ga151 From 99d27e1c59e34869605de625b033c52163f5bfa7 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Thu, 9 Jul 2009 20:30:57 +0800 Subject: crypto: shash - Export/import hash state only This patch replaces the full descriptor export with an export of the partial hash state. This allows the use of a consistent export format across all implementations of a given algorithm. This is useful because a number of cases require the use of the partial hash state, e.g., PadLock can use the SHA1 hash state to get around the fact that it can only hash contiguous data chunks. Signed-off-by: Herbert Xu --- crypto/shash.c | 25 ++++++++++++++----------- include/crypto/hash.h | 18 ++++++++++++++---- 2 files changed, 28 insertions(+), 15 deletions(-) (limited to 'include/crypto') diff --git a/crypto/shash.c b/crypto/shash.c index 23e05a1e9038..14a3b707a31f 100644 --- a/crypto/shash.c +++ b/crypto/shash.c @@ -172,19 +172,15 @@ int crypto_shash_digest(struct shash_desc *desc, const u8 *data, } EXPORT_SYMBOL_GPL(crypto_shash_digest); -int crypto_shash_import(struct shash_desc *desc, const u8 *in) +static int shash_no_export(struct shash_desc *desc, void *out) { - struct crypto_shash *tfm = desc->tfm; - struct shash_alg *alg = crypto_shash_alg(tfm); - - memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(tfm)); - - if (alg->reinit) - return alg->reinit(desc); + return -ENOSYS; +} - return 0; +static int shash_no_import(struct shash_desc *desc, const void *in) +{ + return -ENOSYS; } -EXPORT_SYMBOL_GPL(crypto_shash_import); static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen) @@ -484,12 +480,19 @@ static int shash_prepare_alg(struct shash_alg *alg) struct crypto_alg *base = &alg->base; if (alg->digestsize > PAGE_SIZE / 8 || - alg->descsize > PAGE_SIZE / 8) + alg->descsize > PAGE_SIZE / 8 || + alg->statesize > PAGE_SIZE / 8) return -EINVAL; base->cra_type = &crypto_shash_type; base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; base->cra_flags |= CRYPTO_ALG_TYPE_SHASH; + + if (!alg->import) + alg->import = shash_no_import; + if (!alg->export) + alg->export = shash_no_export; + return 0; } diff --git a/include/crypto/hash.h b/include/crypto/hash.h index d56bb71617c3..3c4cce6a425c 100644 --- a/include/crypto/hash.h +++ b/include/crypto/hash.h @@ -24,7 +24,6 @@ struct shash_desc { struct shash_alg { int (*init)(struct shash_desc *desc); - int (*reinit)(struct shash_desc *desc); int (*update)(struct shash_desc *desc, const u8 *data, unsigned int len); int (*final)(struct shash_desc *desc, u8 *out); @@ -32,11 +31,14 @@ struct shash_alg { unsigned int len, u8 *out); int (*digest)(struct shash_desc *desc, const u8 *data, unsigned int len, u8 *out); + int (*export)(struct shash_desc *desc, void *out); + int (*import)(struct shash_desc *desc, const void *in); int (*setkey)(struct crypto_shash *tfm, const u8 *key, unsigned int keylen); unsigned int descsize; unsigned int digestsize; + unsigned int statesize; struct crypto_alg base; }; @@ -251,6 +253,11 @@ static inline unsigned int crypto_shash_digestsize(struct crypto_shash *tfm) return crypto_shash_alg(tfm)->digestsize; } +static inline unsigned int crypto_shash_statesize(struct crypto_shash *tfm) +{ + return crypto_shash_alg(tfm)->statesize; +} + static inline u32 crypto_shash_get_flags(struct crypto_shash *tfm) { return crypto_tfm_get_flags(crypto_shash_tfm(tfm)); @@ -281,12 +288,15 @@ int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key, int crypto_shash_digest(struct shash_desc *desc, const u8 *data, unsigned int len, u8 *out); -static inline void crypto_shash_export(struct shash_desc *desc, u8 *out) +static inline int crypto_shash_export(struct shash_desc *desc, void *out) { - memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm)); + return crypto_shash_alg(desc->tfm)->export(desc, out); } -int crypto_shash_import(struct shash_desc *desc, const u8 *in); +static inline int crypto_shash_import(struct shash_desc *desc, const void *in) +{ + return crypto_shash_alg(desc->tfm)->import(desc, in); +} static inline int crypto_shash_init(struct shash_desc *desc) { -- cgit v1.2.3-58-ga151 From e2a7ce4e185a94462698cc0e5192495ee3d22a2f Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Thu, 9 Jul 2009 21:27:13 +0800 Subject: crypto: sha1_generic - Add export/import support This patch adds export/import support to sha1_generic. The exported type is defined by struct sha1_state, which is basically the entire descriptor state of sha1_generic. Signed-off-by: Herbert Xu --- crypto/sha1_generic.c | 41 +++++++++++++++++++++++++---------------- include/crypto/sha.h | 8 ++++++++ 2 files changed, 33 insertions(+), 16 deletions(-) (limited to 'include/crypto') diff --git a/crypto/sha1_generic.c b/crypto/sha1_generic.c index 9efef20454cb..0416091bf45a 100644 --- a/crypto/sha1_generic.c +++ b/crypto/sha1_generic.c @@ -25,31 +25,21 @@ #include #include -struct sha1_ctx { - u64 count; - u32 state[5]; - u8 buffer[64]; -}; - static int sha1_init(struct shash_desc *desc) { - struct sha1_ctx *sctx = shash_desc_ctx(desc); + struct sha1_state *sctx = shash_desc_ctx(desc); - static const struct sha1_ctx initstate = { - 0, - { SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4 }, - { 0, } + *sctx = (struct sha1_state){ + .state = { SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4 }, }; - *sctx = initstate; - return 0; } static int sha1_update(struct shash_desc *desc, const u8 *data, unsigned int len) { - struct sha1_ctx *sctx = shash_desc_ctx(desc); + struct sha1_state *sctx = shash_desc_ctx(desc); unsigned int partial, done; const u8 *src; @@ -85,7 +75,7 @@ static int sha1_update(struct shash_desc *desc, const u8 *data, /* Add padding and return the message digest. */ static int sha1_final(struct shash_desc *desc, u8 *out) { - struct sha1_ctx *sctx = shash_desc_ctx(desc); + struct sha1_state *sctx = shash_desc_ctx(desc); __be32 *dst = (__be32 *)out; u32 i, index, padlen; __be64 bits; @@ -111,12 +101,31 @@ static int sha1_final(struct shash_desc *desc, u8 *out) return 0; } +static int sha1_export(struct shash_desc *desc, void *out) +{ + struct sha1_state *sctx = shash_desc_ctx(desc); + + memcpy(out, sctx, sizeof(*sctx)); + return 0; +} + +static int sha1_import(struct shash_desc *desc, const void *in) +{ + struct sha1_state *sctx = shash_desc_ctx(desc); + + memcpy(sctx, in, sizeof(*sctx)); + return 0; +} + static struct shash_alg alg = { .digestsize = SHA1_DIGEST_SIZE, .init = sha1_init, .update = sha1_update, .final = sha1_final, - .descsize = sizeof(struct sha1_ctx), + .export = sha1_export, + .import = sha1_import, + .descsize = sizeof(struct sha1_state), + .statesize = sizeof(struct sha1_state), .base = { .cra_name = "sha1", .cra_driver_name= "sha1-generic", diff --git a/include/crypto/sha.h b/include/crypto/sha.h index c0ccc2b1a2d8..922a248bd04d 100644 --- a/include/crypto/sha.h +++ b/include/crypto/sha.h @@ -5,6 +5,8 @@ #ifndef _CRYPTO_SHA_H #define _CRYPTO_SHA_H +#include + #define SHA1_DIGEST_SIZE 20 #define SHA1_BLOCK_SIZE 64 @@ -62,4 +64,10 @@ #define SHA512_H6 0x1f83d9abfb41bd6bULL #define SHA512_H7 0x5be0cd19137e2179ULL +struct sha1_state { + u64 count; + u32 state[SHA1_DIGEST_SIZE / 4]; + u8 buffer[SHA1_BLOCK_SIZE]; +}; + #endif -- cgit v1.2.3-58-ga151 From 9b2fda7b94a769af13c24582739e50664b0e27a8 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Fri, 10 Jul 2009 13:00:27 +0800 Subject: crypto: sha256_generic - Add export/import support This patch adds export/import support to sha256_generic. The exported type is defined by struct sha256_state, which is basically the entire descriptor state of sha256_generic. Signed-off-by: Herbert Xu --- crypto/sha256_generic.c | 37 +++++++++++++++++++++++++------------ include/crypto/sha.h | 6 ++++++ 2 files changed, 31 insertions(+), 12 deletions(-) (limited to 'include/crypto') diff --git a/crypto/sha256_generic.c b/crypto/sha256_generic.c index e58c71bdf333..c48459ebf05b 100644 --- a/crypto/sha256_generic.c +++ b/crypto/sha256_generic.c @@ -25,12 +25,6 @@ #include #include -struct sha256_ctx { - u64 count; - u32 state[8]; - u8 buf[128]; -}; - static inline u32 Ch(u32 x, u32 y, u32 z) { return z ^ (x & (y ^ z)); @@ -222,7 +216,7 @@ static void sha256_transform(u32 *state, const u8 *input) static int sha224_init(struct shash_desc *desc) { - struct sha256_ctx *sctx = shash_desc_ctx(desc); + struct sha256_state *sctx = shash_desc_ctx(desc); sctx->state[0] = SHA224_H0; sctx->state[1] = SHA224_H1; sctx->state[2] = SHA224_H2; @@ -238,7 +232,7 @@ static int sha224_init(struct shash_desc *desc) static int sha256_init(struct shash_desc *desc) { - struct sha256_ctx *sctx = shash_desc_ctx(desc); + struct sha256_state *sctx = shash_desc_ctx(desc); sctx->state[0] = SHA256_H0; sctx->state[1] = SHA256_H1; sctx->state[2] = SHA256_H2; @@ -255,7 +249,7 @@ static int sha256_init(struct shash_desc *desc) static int sha256_update(struct shash_desc *desc, const u8 *data, unsigned int len) { - struct sha256_ctx *sctx = shash_desc_ctx(desc); + struct sha256_state *sctx = shash_desc_ctx(desc); unsigned int partial, done; const u8 *src; @@ -286,7 +280,7 @@ static int sha256_update(struct shash_desc *desc, const u8 *data, static int sha256_final(struct shash_desc *desc, u8 *out) { - struct sha256_ctx *sctx = shash_desc_ctx(desc); + struct sha256_state *sctx = shash_desc_ctx(desc); __be32 *dst = (__be32 *)out; __be64 bits; unsigned int index, pad_len; @@ -326,12 +320,31 @@ static int sha224_final(struct shash_desc *desc, u8 *hash) return 0; } +static int sha256_export(struct shash_desc *desc, void *out) +{ + struct sha256_state *sctx = shash_desc_ctx(desc); + + memcpy(out, sctx, sizeof(*sctx)); + return 0; +} + +static int sha256_import(struct shash_desc *desc, const void *in) +{ + struct sha256_state *sctx = shash_desc_ctx(desc); + + memcpy(sctx, in, sizeof(*sctx)); + return 0; +} + static struct shash_alg sha256 = { .digestsize = SHA256_DIGEST_SIZE, .init = sha256_init, .update = sha256_update, .final = sha256_final, - .descsize = sizeof(struct sha256_ctx), + .export = sha256_export, + .import = sha256_import, + .descsize = sizeof(struct sha256_state), + .statesize = sizeof(struct sha256_state), .base = { .cra_name = "sha256", .cra_driver_name= "sha256-generic", @@ -346,7 +359,7 @@ static struct shash_alg sha224 = { .init = sha224_init, .update = sha256_update, .final = sha224_final, - .descsize = sizeof(struct sha256_ctx), + .descsize = sizeof(struct sha256_state), .base = { .cra_name = "sha224", .cra_driver_name= "sha224-generic", diff --git a/include/crypto/sha.h b/include/crypto/sha.h index 922a248bd04d..88ef5eb9514d 100644 --- a/include/crypto/sha.h +++ b/include/crypto/sha.h @@ -70,4 +70,10 @@ struct sha1_state { u8 buffer[SHA1_BLOCK_SIZE]; }; +struct sha256_state { + u64 count; + u32 state[SHA256_DIGEST_SIZE / 4]; + u8 buf[SHA256_BLOCK_SIZE]; +}; + #endif -- cgit v1.2.3-58-ga151 From aef73cfcb913eae3d0deeb60eb385f75039db40b Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Sat, 11 Jul 2009 22:22:14 +0800 Subject: crypto: async - Use kzfree for requests This patch changes the kfree call to kzfree for async requests. As the request may contain sensitive data it needs to be zeroed before it can be reallocated by others. Signed-off-by: Herbert Xu --- include/crypto/hash.h | 2 +- include/linux/crypto.h | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) (limited to 'include/crypto') diff --git a/include/crypto/hash.h b/include/crypto/hash.h index 3c4cce6a425c..f74214a4b01b 100644 --- a/include/crypto/hash.h +++ b/include/crypto/hash.h @@ -186,7 +186,7 @@ static inline struct ahash_request *ahash_request_alloc( static inline void ahash_request_free(struct ahash_request *req) { - kfree(req); + kzfree(req); } static inline struct ahash_request *ahash_request_cast( diff --git a/include/linux/crypto.h b/include/linux/crypto.h index ec29fa268b94..274f9c7da90c 100644 --- a/include/linux/crypto.h +++ b/include/linux/crypto.h @@ -770,7 +770,7 @@ static inline struct ablkcipher_request *ablkcipher_request_alloc( static inline void ablkcipher_request_free(struct ablkcipher_request *req) { - kfree(req); + kzfree(req); } static inline void ablkcipher_request_set_callback( @@ -901,7 +901,7 @@ static inline struct aead_request *aead_request_alloc(struct crypto_aead *tfm, static inline void aead_request_free(struct aead_request *req) { - kfree(req); + kzfree(req); } static inline void aead_request_set_callback(struct aead_request *req, -- cgit v1.2.3-58-ga151 From 113adefc73c291f93f875fe515a46d8f76252fff Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Tue, 14 Jul 2009 12:50:12 +0800 Subject: crypto: shash - Make descsize a run-time attribute This patch changes descsize to a run-time attribute so that implementations can change it in their init functions. Signed-off-by: Herbert Xu --- crypto/shash.c | 39 ++++++++++++++++++++++++++++----------- include/crypto/hash.h | 3 ++- include/crypto/internal/hash.h | 2 +- 3 files changed, 31 insertions(+), 13 deletions(-) (limited to 'include/crypto') diff --git a/crypto/shash.c b/crypto/shash.c index 131e14d2b572..d8e0f8f8d672 100644 --- a/crypto/shash.c +++ b/crypto/shash.c @@ -300,14 +300,16 @@ static int crypto_init_shash_ops_async(struct crypto_tfm *tfm) static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key, unsigned int keylen) { - struct shash_desc *desc = crypto_hash_ctx(tfm); + struct shash_desc **descp = crypto_hash_ctx(tfm); + struct shash_desc *desc = *descp; return crypto_shash_setkey(desc->tfm, key, keylen); } static int shash_compat_init(struct hash_desc *hdesc) { - struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm); + struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm); + struct shash_desc *desc = *descp; desc->flags = hdesc->flags; @@ -317,7 +319,8 @@ static int shash_compat_init(struct hash_desc *hdesc) static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg, unsigned int len) { - struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm); + struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm); + struct shash_desc *desc = *descp; struct crypto_hash_walk walk; int nbytes; @@ -330,7 +333,9 @@ static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg, static int shash_compat_final(struct hash_desc *hdesc, u8 *out) { - return crypto_shash_final(crypto_hash_ctx(hdesc->tfm), out); + struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm); + + return crypto_shash_final(*descp, out); } static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg, @@ -340,7 +345,8 @@ static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg, int err; if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { - struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm); + struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm); + struct shash_desc *desc = *descp; void *data; desc->flags = hdesc->flags; @@ -368,9 +374,11 @@ out: static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm) { - struct shash_desc *desc= crypto_tfm_ctx(tfm); + struct shash_desc **descp = crypto_tfm_ctx(tfm); + struct shash_desc *desc = *descp; crypto_free_shash(desc->tfm); + kzfree(desc); } static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm) @@ -378,8 +386,9 @@ static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm) struct hash_tfm *crt = &tfm->crt_hash; struct crypto_alg *calg = tfm->__crt_alg; struct shash_alg *alg = __crypto_shash_alg(calg); - struct shash_desc *desc = crypto_tfm_ctx(tfm); + struct shash_desc **descp = crypto_tfm_ctx(tfm); struct crypto_shash *shash; + struct shash_desc *desc; if (!crypto_mod_get(calg)) return -EAGAIN; @@ -390,6 +399,14 @@ static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm) return PTR_ERR(shash); } + desc = kmalloc(sizeof(*desc) + crypto_shash_descsize(shash), + GFP_KERNEL); + if (!desc) { + crypto_free_shash(shash); + return -ENOMEM; + } + + *descp = desc; desc->tfm = shash; tfm->exit = crypto_exit_shash_ops_compat; @@ -419,11 +436,9 @@ static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask) static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type, u32 mask) { - struct shash_alg *salg = __crypto_shash_alg(alg); - switch (mask & CRYPTO_ALG_TYPE_MASK) { case CRYPTO_ALG_TYPE_HASH_MASK: - return sizeof(struct shash_desc) + salg->descsize; + return sizeof(struct shash_desc *); case CRYPTO_ALG_TYPE_AHASH_MASK: return sizeof(struct crypto_shash *); } @@ -434,6 +449,9 @@ static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type, static int crypto_shash_init_tfm(struct crypto_tfm *tfm, const struct crypto_type *frontend) { + struct crypto_shash *hash = __crypto_shash_cast(tfm); + + hash->descsize = crypto_shash_alg(hash)->descsize; return 0; } @@ -452,7 +470,6 @@ static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) seq_printf(m, "type : shash\n"); seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); seq_printf(m, "digestsize : %u\n", salg->digestsize); - seq_printf(m, "descsize : %u\n", salg->descsize); } static const struct crypto_type crypto_shash_type = { diff --git a/include/crypto/hash.h b/include/crypto/hash.h index f74214a4b01b..fcc02d978231 100644 --- a/include/crypto/hash.h +++ b/include/crypto/hash.h @@ -48,6 +48,7 @@ struct crypto_ahash { }; struct crypto_shash { + unsigned int descsize; struct crypto_tfm base; }; @@ -275,7 +276,7 @@ static inline void crypto_shash_clear_flags(struct crypto_shash *tfm, u32 flags) static inline unsigned int crypto_shash_descsize(struct crypto_shash *tfm) { - return crypto_shash_alg(tfm)->descsize; + return tfm->descsize; } static inline void *shash_desc_ctx(struct shash_desc *desc) diff --git a/include/crypto/internal/hash.h b/include/crypto/internal/hash.h index 069b93e1a8ec..2d1b10f23c91 100644 --- a/include/crypto/internal/hash.h +++ b/include/crypto/internal/hash.h @@ -135,7 +135,7 @@ static inline void *crypto_shash_ctx_aligned(struct crypto_shash *tfm) static inline struct crypto_shash *__crypto_shash_cast(struct crypto_tfm *tfm) { - return (struct crypto_shash *)tfm; + return container_of(tfm, struct crypto_shash, base); } #endif /* _CRYPTO_INTERNAL_HASH_H */ -- cgit v1.2.3-58-ga151 From 7eddf95ec5440d60f10963f453e27f82f394044e Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Sun, 12 Jul 2009 21:25:20 +0800 Subject: crypto: shash - Export async functions This patch exports the async functions so that they can be reused by cryptd when it switches over to using shash. Signed-off-by: Herbert Xu --- crypto/shash.c | 42 ++++++++++++++++++++++-------------------- include/crypto/internal/hash.h | 3 +++ 2 files changed, 25 insertions(+), 20 deletions(-) (limited to 'include/crypto') diff --git a/crypto/shash.c b/crypto/shash.c index d8e0f8f8d672..3b1b06b3dcf3 100644 --- a/crypto/shash.c +++ b/crypto/shash.c @@ -202,9 +202,8 @@ static int shash_async_init(struct ahash_request *req) return crypto_shash_init(desc); } -static int shash_async_update(struct ahash_request *req) +int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc) { - struct shash_desc *desc = ahash_request_ctx(req); struct crypto_hash_walk walk; int nbytes; @@ -214,13 +213,19 @@ static int shash_async_update(struct ahash_request *req) return nbytes; } +EXPORT_SYMBOL_GPL(shash_ahash_update); + +static int shash_async_update(struct ahash_request *req) +{ + return shash_ahash_update(req, ahash_request_ctx(req)); +} static int shash_async_final(struct ahash_request *req) { return crypto_shash_final(ahash_request_ctx(req), req->result); } -static int shash_async_digest(struct ahash_request *req) +int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc) { struct scatterlist *sg = req->src; unsigned int offset = sg->offset; @@ -228,34 +233,31 @@ static int shash_async_digest(struct ahash_request *req) int err; if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { - struct crypto_shash **ctx = - crypto_ahash_ctx(crypto_ahash_reqtfm(req)); - struct shash_desc *desc = ahash_request_ctx(req); void *data; - desc->tfm = *ctx; - desc->flags = req->base.flags; - data = crypto_kmap(sg_page(sg), 0); err = crypto_shash_digest(desc, data + offset, nbytes, req->result); crypto_kunmap(data, 0); crypto_yield(desc->flags); - goto out; - } + } else + err = crypto_shash_init(desc) ?: + shash_ahash_update(req, desc) ?: + crypto_shash_final(desc, req->result); - err = shash_async_init(req); - if (err) - goto out; + return err; +} +EXPORT_SYMBOL_GPL(shash_ahash_digest); - err = shash_async_update(req); - if (err) - goto out; +static int shash_async_digest(struct ahash_request *req) +{ + struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); + struct shash_desc *desc = ahash_request_ctx(req); - err = shash_async_final(req); + desc->tfm = *ctx; + desc->flags = req->base.flags; -out: - return err; + return shash_ahash_digest(req, desc); } static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm) diff --git a/include/crypto/internal/hash.h b/include/crypto/internal/hash.h index 2d1b10f23c91..6e283495374e 100644 --- a/include/crypto/internal/hash.h +++ b/include/crypto/internal/hash.h @@ -63,6 +63,9 @@ int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn, struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask); +int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc); +int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc); + static inline void *crypto_ahash_ctx(struct crypto_ahash *tfm) { return crypto_tfm_ctx(&tfm->base); -- cgit v1.2.3-58-ga151 From fc00127fb67b2a7d2b66f0f4096a5367b581f045 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Sun, 12 Jul 2009 23:05:48 +0800 Subject: crypto: ahash - Add crypto_ahash_set_reqsize This patch adds the helper crypto_ahash_set_reqsize so that implementations do not directly access the crypto_ahash structure. Signed-off-by: Herbert Xu --- include/crypto/internal/hash.h | 6 ++++++ 1 file changed, 6 insertions(+) (limited to 'include/crypto') diff --git a/include/crypto/internal/hash.h b/include/crypto/internal/hash.h index 6e283495374e..5e45818f3351 100644 --- a/include/crypto/internal/hash.h +++ b/include/crypto/internal/hash.h @@ -77,6 +77,12 @@ static inline struct ahash_alg *crypto_ahash_alg( return &crypto_ahash_tfm(tfm)->__crt_alg->cra_ahash; } +static inline void crypto_ahash_set_reqsize(struct crypto_ahash *tfm, + unsigned int reqsize) +{ + crypto_ahash_crt(tfm)->reqsize = reqsize; +} + static inline int ahash_enqueue_request(struct crypto_queue *queue, struct ahash_request *request) { -- cgit v1.2.3-58-ga151 From 2ca33da1dea3ba53d1425226a6bac073c5e8568c Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Mon, 13 Jul 2009 20:46:25 +0800 Subject: crypto: api - Remove frontend argument from extsize/init_tfm As the extsize and init_tfm functions belong to the frontend the frontend argument is superfluous. Signed-off-by: Herbert Xu --- crypto/api.c | 4 ++-- crypto/pcompress.c | 6 ++---- crypto/shash.c | 6 ++---- include/crypto/algapi.h | 6 ++---- 4 files changed, 8 insertions(+), 14 deletions(-) (limited to 'include/crypto') diff --git a/crypto/api.c b/crypto/api.c index c8ac18f7ac1e..798526d90538 100644 --- a/crypto/api.c +++ b/crypto/api.c @@ -457,7 +457,7 @@ void *crypto_create_tfm(struct crypto_alg *alg, int err = -ENOMEM; tfmsize = frontend->tfmsize; - total = tfmsize + sizeof(*tfm) + frontend->extsize(alg, frontend); + total = tfmsize + sizeof(*tfm) + frontend->extsize(alg); mem = kzalloc(total, GFP_KERNEL); if (mem == NULL) @@ -466,7 +466,7 @@ void *crypto_create_tfm(struct crypto_alg *alg, tfm = (struct crypto_tfm *)(mem + tfmsize); tfm->__crt_alg = alg; - err = frontend->init_tfm(tfm, frontend); + err = frontend->init_tfm(tfm); if (err) goto out_free_tfm; diff --git a/crypto/pcompress.c b/crypto/pcompress.c index bcadc03726b7..f7c4a7d7412e 100644 --- a/crypto/pcompress.c +++ b/crypto/pcompress.c @@ -36,14 +36,12 @@ static int crypto_pcomp_init(struct crypto_tfm *tfm, u32 type, u32 mask) return 0; } -static unsigned int crypto_pcomp_extsize(struct crypto_alg *alg, - const struct crypto_type *frontend) +static unsigned int crypto_pcomp_extsize(struct crypto_alg *alg) { return alg->cra_ctxsize; } -static int crypto_pcomp_init_tfm(struct crypto_tfm *tfm, - const struct crypto_type *frontend) +static int crypto_pcomp_init_tfm(struct crypto_tfm *tfm) { return 0; } diff --git a/crypto/shash.c b/crypto/shash.c index 3b1b06b3dcf3..7063e1421504 100644 --- a/crypto/shash.c +++ b/crypto/shash.c @@ -448,8 +448,7 @@ static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type, return 0; } -static int crypto_shash_init_tfm(struct crypto_tfm *tfm, - const struct crypto_type *frontend) +static int crypto_shash_init_tfm(struct crypto_tfm *tfm) { struct crypto_shash *hash = __crypto_shash_cast(tfm); @@ -457,8 +456,7 @@ static int crypto_shash_init_tfm(struct crypto_tfm *tfm, return 0; } -static unsigned int crypto_shash_extsize(struct crypto_alg *alg, - const struct crypto_type *frontend) +static unsigned int crypto_shash_extsize(struct crypto_alg *alg) { return alg->cra_ctxsize; } diff --git a/include/crypto/algapi.h b/include/crypto/algapi.h index 1d15a926041e..7635fde7b1a2 100644 --- a/include/crypto/algapi.h +++ b/include/crypto/algapi.h @@ -22,11 +22,9 @@ struct seq_file; struct crypto_type { unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask); - unsigned int (*extsize)(struct crypto_alg *alg, - const struct crypto_type *frontend); + unsigned int (*extsize)(struct crypto_alg *alg); int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask); - int (*init_tfm)(struct crypto_tfm *tfm, - const struct crypto_type *frontend); + int (*init_tfm)(struct crypto_tfm *tfm); void (*show)(struct seq_file *m, struct crypto_alg *alg); struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask); -- cgit v1.2.3-58-ga151 From 88056ec346ccf41f63dbc7080b24b5fd19d1358d Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Tue, 14 Jul 2009 12:28:26 +0800 Subject: crypto: ahash - Convert to new style algorithms This patch converts crypto_ahash to the new style. The old ahash algorithm type is retained until the existing ahash implementations are also converted. All ahash users will automatically get the new crypto_ahash type. Signed-off-by: Herbert Xu --- crypto/ahash.c | 82 +++++++++++++++++++++---------- crypto/shash.c | 8 +-- include/crypto/hash.h | 109 +++++++++++++++++++++++++++++------------ include/crypto/internal/hash.h | 11 +++-- include/linux/crypto.h | 29 ++--------- 5 files changed, 148 insertions(+), 91 deletions(-) (limited to 'include/crypto') diff --git a/crypto/ahash.c b/crypto/ahash.c index f3476374f764..838519386215 100644 --- a/crypto/ahash.c +++ b/crypto/ahash.c @@ -24,6 +24,12 @@ #include "internal.h" +static inline struct ahash_alg *crypto_ahash_alg(struct crypto_ahash *hash) +{ + return container_of(crypto_hash_alg_common(hash), struct ahash_alg, + halg); +} + static int hash_walk_next(struct crypto_hash_walk *walk) { unsigned int alignmask = walk->alignmask; @@ -169,30 +175,11 @@ static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key, return -ENOSYS; } -int crypto_ahash_import(struct ahash_request *req, const u8 *in) -{ - struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); - struct ahash_alg *alg = crypto_ahash_alg(tfm); - - memcpy(ahash_request_ctx(req), in, crypto_ahash_reqsize(tfm)); - - if (alg->reinit) - alg->reinit(req); - - return 0; -} -EXPORT_SYMBOL_GPL(crypto_ahash_import); - -static unsigned int crypto_ahash_ctxsize(struct crypto_alg *alg, u32 type, - u32 mask) -{ - return alg->cra_ctxsize; -} - static int crypto_init_ahash_ops(struct crypto_tfm *tfm, u32 type, u32 mask) { - struct ahash_alg *alg = &tfm->__crt_alg->cra_ahash; - struct ahash_tfm *crt = &tfm->crt_ahash; + struct old_ahash_alg *alg = &tfm->__crt_alg->cra_ahash; + struct crypto_ahash *crt = __crypto_ahash_cast(tfm); + struct ahash_alg *nalg = crypto_ahash_alg(crt); if (alg->digestsize > PAGE_SIZE / 8) return -EINVAL; @@ -204,9 +191,42 @@ static int crypto_init_ahash_ops(struct crypto_tfm *tfm, u32 type, u32 mask) crt->setkey = alg->setkey ? ahash_setkey : ahash_nosetkey; crt->digestsize = alg->digestsize; + nalg->setkey = alg->setkey; + nalg->halg.digestsize = alg->digestsize; + return 0; } +static int crypto_ahash_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_ahash *hash = __crypto_ahash_cast(tfm); + struct ahash_alg *alg = crypto_ahash_alg(hash); + struct old_ahash_alg *oalg = crypto_old_ahash_alg(hash); + + if (tfm->__crt_alg->cra_type != &crypto_ahash_type) + return crypto_init_shash_ops_async(tfm); + + if (oalg->init) + return crypto_init_ahash_ops(tfm, 0, 0); + + hash->init = alg->init; + hash->update = alg->update; + hash->final = alg->final; + hash->digest = alg->digest; + hash->setkey = alg->setkey ? ahash_setkey : ahash_nosetkey; + hash->digestsize = alg->halg.digestsize; + + return 0; +} + +static unsigned int crypto_ahash_extsize(struct crypto_alg *alg) +{ + if (alg->cra_type == &crypto_ahash_type) + return alg->cra_ctxsize; + + return sizeof(struct crypto_shash *); +} + static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) __attribute__ ((unused)); static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) @@ -215,17 +235,29 @@ static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ? "yes" : "no"); seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); - seq_printf(m, "digestsize : %u\n", alg->cra_ahash.digestsize); + seq_printf(m, "digestsize : %u\n", + __crypto_hash_alg_common(alg)->digestsize); } const struct crypto_type crypto_ahash_type = { - .ctxsize = crypto_ahash_ctxsize, - .init = crypto_init_ahash_ops, + .extsize = crypto_ahash_extsize, + .init_tfm = crypto_ahash_init_tfm, #ifdef CONFIG_PROC_FS .show = crypto_ahash_show, #endif + .maskclear = ~CRYPTO_ALG_TYPE_MASK, + .maskset = CRYPTO_ALG_TYPE_AHASH_MASK, + .type = CRYPTO_ALG_TYPE_AHASH, + .tfmsize = offsetof(struct crypto_ahash, base), }; EXPORT_SYMBOL_GPL(crypto_ahash_type); +struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type, + u32 mask) +{ + return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask); +} +EXPORT_SYMBOL_GPL(crypto_alloc_ahash); + MODULE_LICENSE("GPL"); MODULE_DESCRIPTION("Asynchronous cryptographic hash type"); diff --git a/crypto/shash.c b/crypto/shash.c index 7063e1421504..615a5f4d9b7a 100644 --- a/crypto/shash.c +++ b/crypto/shash.c @@ -267,11 +267,11 @@ static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm) crypto_free_shash(*ctx); } -static int crypto_init_shash_ops_async(struct crypto_tfm *tfm) +int crypto_init_shash_ops_async(struct crypto_tfm *tfm) { struct crypto_alg *calg = tfm->__crt_alg; struct shash_alg *alg = __crypto_shash_alg(calg); - struct ahash_tfm *crt = &tfm->crt_ahash; + struct crypto_ahash *crt = __crypto_ahash_cast(tfm); struct crypto_shash **ctx = crypto_tfm_ctx(tfm); struct crypto_shash *shash; @@ -428,8 +428,6 @@ static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask) switch (mask & CRYPTO_ALG_TYPE_MASK) { case CRYPTO_ALG_TYPE_HASH_MASK: return crypto_init_shash_ops_compat(tfm); - case CRYPTO_ALG_TYPE_AHASH_MASK: - return crypto_init_shash_ops_async(tfm); } return -EINVAL; @@ -441,8 +439,6 @@ static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type, switch (mask & CRYPTO_ALG_TYPE_MASK) { case CRYPTO_ALG_TYPE_HASH_MASK: return sizeof(struct shash_desc *); - case CRYPTO_ALG_TYPE_AHASH_MASK: - return sizeof(struct crypto_shash *); } return 0; diff --git a/include/crypto/hash.h b/include/crypto/hash.h index fcc02d978231..262861d8f0cb 100644 --- a/include/crypto/hash.h +++ b/include/crypto/hash.h @@ -15,6 +15,39 @@ #include +struct crypto_ahash; + +struct hash_alg_common { + unsigned int digestsize; + unsigned int statesize; + + struct crypto_alg base; +}; + +struct ahash_request { + struct crypto_async_request base; + + unsigned int nbytes; + struct scatterlist *src; + u8 *result; + + void *__ctx[] CRYPTO_MINALIGN_ATTR; +}; + +struct ahash_alg { + int (*init)(struct ahash_request *req); + int (*update)(struct ahash_request *req); + int (*final)(struct ahash_request *req); + int (*finup)(struct ahash_request *req); + int (*digest)(struct ahash_request *req); + int (*export)(struct ahash_request *req, void *out); + int (*import)(struct ahash_request *req, const void *in); + int (*setkey)(struct crypto_ahash *tfm, const u8 *key, + unsigned int keylen); + + struct hash_alg_common halg; +}; + struct shash_desc { struct crypto_shash *tfm; u32 flags; @@ -37,6 +70,8 @@ struct shash_alg { unsigned int keylen); unsigned int descsize; + + /* These fields must match hash_alg_common. */ unsigned int digestsize; unsigned int statesize; @@ -44,6 +79,18 @@ struct shash_alg { }; struct crypto_ahash { + int (*init)(struct ahash_request *req); + int (*update)(struct ahash_request *req); + int (*final)(struct ahash_request *req); + int (*finup)(struct ahash_request *req); + int (*digest)(struct ahash_request *req); + int (*export)(struct ahash_request *req, void *out); + int (*import)(struct ahash_request *req, const void *in); + int (*setkey)(struct crypto_ahash *tfm, const u8 *key, + unsigned int keylen); + + unsigned int digestsize; + unsigned int reqsize; struct crypto_tfm base; }; @@ -54,19 +101,11 @@ struct crypto_shash { static inline struct crypto_ahash *__crypto_ahash_cast(struct crypto_tfm *tfm) { - return (struct crypto_ahash *)tfm; + return container_of(tfm, struct crypto_ahash, base); } -static inline struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, - u32 type, u32 mask) -{ - type &= ~CRYPTO_ALG_TYPE_MASK; - mask &= ~CRYPTO_ALG_TYPE_MASK; - type |= CRYPTO_ALG_TYPE_AHASH; - mask |= CRYPTO_ALG_TYPE_AHASH_MASK; - - return __crypto_ahash_cast(crypto_alloc_base(alg_name, type, mask)); -} +struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type, + u32 mask); static inline struct crypto_tfm *crypto_ahash_tfm(struct crypto_ahash *tfm) { @@ -75,7 +114,7 @@ static inline struct crypto_tfm *crypto_ahash_tfm(struct crypto_ahash *tfm) static inline void crypto_free_ahash(struct crypto_ahash *tfm) { - crypto_free_tfm(crypto_ahash_tfm(tfm)); + crypto_destroy_tfm(tfm, crypto_ahash_tfm(tfm)); } static inline unsigned int crypto_ahash_alignmask( @@ -84,14 +123,26 @@ static inline unsigned int crypto_ahash_alignmask( return crypto_tfm_alg_alignmask(crypto_ahash_tfm(tfm)); } -static inline struct ahash_tfm *crypto_ahash_crt(struct crypto_ahash *tfm) +static inline struct hash_alg_common *__crypto_hash_alg_common( + struct crypto_alg *alg) { - return &crypto_ahash_tfm(tfm)->crt_ahash; + return container_of(alg, struct hash_alg_common, base); +} + +static inline struct hash_alg_common *crypto_hash_alg_common( + struct crypto_ahash *tfm) +{ + return __crypto_hash_alg_common(crypto_ahash_tfm(tfm)->__crt_alg); } static inline unsigned int crypto_ahash_digestsize(struct crypto_ahash *tfm) { - return crypto_ahash_crt(tfm)->digestsize; + return tfm->digestsize; +} + +static inline unsigned int crypto_ahash_statesize(struct crypto_ahash *tfm) +{ + return crypto_hash_alg_common(tfm)->statesize; } static inline u32 crypto_ahash_get_flags(struct crypto_ahash *tfm) @@ -117,7 +168,7 @@ static inline struct crypto_ahash *crypto_ahash_reqtfm( static inline unsigned int crypto_ahash_reqsize(struct crypto_ahash *tfm) { - return crypto_ahash_crt(tfm)->reqsize; + return tfm->reqsize; } static inline void *ahash_request_ctx(struct ahash_request *req) @@ -128,41 +179,37 @@ static inline void *ahash_request_ctx(struct ahash_request *req) static inline int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen) { - struct ahash_tfm *crt = crypto_ahash_crt(tfm); - - return crt->setkey(tfm, key, keylen); + return tfm->setkey(tfm, key, keylen); } static inline int crypto_ahash_digest(struct ahash_request *req) { - struct ahash_tfm *crt = crypto_ahash_crt(crypto_ahash_reqtfm(req)); - return crt->digest(req); + return crypto_ahash_reqtfm(req)->digest(req); } -static inline void crypto_ahash_export(struct ahash_request *req, u8 *out) +static inline int crypto_ahash_export(struct ahash_request *req, void *out) { - memcpy(out, ahash_request_ctx(req), - crypto_ahash_reqsize(crypto_ahash_reqtfm(req))); + return crypto_ahash_reqtfm(req)->export(req, out); } -int crypto_ahash_import(struct ahash_request *req, const u8 *in); +static inline int crypto_ahash_import(struct ahash_request *req, const void *in) +{ + return crypto_ahash_reqtfm(req)->import(req, in); +} static inline int crypto_ahash_init(struct ahash_request *req) { - struct ahash_tfm *crt = crypto_ahash_crt(crypto_ahash_reqtfm(req)); - return crt->init(req); + return crypto_ahash_reqtfm(req)->init(req); } static inline int crypto_ahash_update(struct ahash_request *req) { - struct ahash_tfm *crt = crypto_ahash_crt(crypto_ahash_reqtfm(req)); - return crt->update(req); + return crypto_ahash_reqtfm(req)->update(req); } static inline int crypto_ahash_final(struct ahash_request *req) { - struct ahash_tfm *crt = crypto_ahash_crt(crypto_ahash_reqtfm(req)); - return crt->final(req); + return crypto_ahash_reqtfm(req)->final(req); } static inline void ahash_request_set_tfm(struct ahash_request *req, diff --git a/include/crypto/internal/hash.h b/include/crypto/internal/hash.h index 5e45818f3351..08bdffafefad 100644 --- a/include/crypto/internal/hash.h +++ b/include/crypto/internal/hash.h @@ -51,6 +51,9 @@ int crypto_hash_walk_first_compat(struct hash_desc *hdesc, struct crypto_hash_walk *walk, struct scatterlist *sg, unsigned int len); +int crypto_register_ahash(struct ahash_alg *alg); +int crypto_unregister_ahash(struct ahash_alg *alg); + int crypto_register_shash(struct shash_alg *alg); int crypto_unregister_shash(struct shash_alg *alg); int shash_register_instance(struct crypto_template *tmpl, @@ -66,12 +69,14 @@ struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask); int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc); int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc); +int crypto_init_shash_ops_async(struct crypto_tfm *tfm); + static inline void *crypto_ahash_ctx(struct crypto_ahash *tfm) { - return crypto_tfm_ctx(&tfm->base); + return crypto_tfm_ctx(crypto_ahash_tfm(tfm)); } -static inline struct ahash_alg *crypto_ahash_alg( +static inline struct old_ahash_alg *crypto_old_ahash_alg( struct crypto_ahash *tfm) { return &crypto_ahash_tfm(tfm)->__crt_alg->cra_ahash; @@ -80,7 +85,7 @@ static inline struct ahash_alg *crypto_ahash_alg( static inline void crypto_ahash_set_reqsize(struct crypto_ahash *tfm, unsigned int reqsize) { - crypto_ahash_crt(tfm)->reqsize = reqsize; + tfm->reqsize = reqsize; } static inline int ahash_enqueue_request(struct crypto_queue *queue, diff --git a/include/linux/crypto.h b/include/linux/crypto.h index 274f9c7da90c..9e7e9b62a3dc 100644 --- a/include/linux/crypto.h +++ b/include/linux/crypto.h @@ -120,6 +120,7 @@ struct crypto_rng; struct crypto_tfm; struct crypto_type; struct aead_givcrypt_request; +struct ahash_request; struct skcipher_givcrypt_request; typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err); @@ -146,16 +147,6 @@ struct ablkcipher_request { void *__ctx[] CRYPTO_MINALIGN_ATTR; }; -struct ahash_request { - struct crypto_async_request base; - - unsigned int nbytes; - struct scatterlist *src; - u8 *result; - - void *__ctx[] CRYPTO_MINALIGN_ATTR; -}; - /** * struct aead_request - AEAD request * @base: Common attributes for async crypto requests @@ -220,7 +211,7 @@ struct ablkcipher_alg { unsigned int ivsize; }; -struct ahash_alg { +struct old_ahash_alg { int (*init)(struct ahash_request *req); int (*reinit)(struct ahash_request *req); int (*update)(struct ahash_request *req); @@ -346,7 +337,7 @@ struct crypto_alg { struct cipher_alg cipher; struct digest_alg digest; struct hash_alg hash; - struct ahash_alg ahash; + struct old_ahash_alg ahash; struct compress_alg compress; struct rng_alg rng; } cra_u; @@ -433,18 +424,6 @@ struct hash_tfm { unsigned int digestsize; }; -struct ahash_tfm { - int (*init)(struct ahash_request *req); - int (*update)(struct ahash_request *req); - int (*final)(struct ahash_request *req); - int (*digest)(struct ahash_request *req); - int (*setkey)(struct crypto_ahash *tfm, const u8 *key, - unsigned int keylen); - - unsigned int digestsize; - unsigned int reqsize; -}; - struct compress_tfm { int (*cot_compress)(struct crypto_tfm *tfm, const u8 *src, unsigned int slen, @@ -465,7 +444,6 @@ struct rng_tfm { #define crt_blkcipher crt_u.blkcipher #define crt_cipher crt_u.cipher #define crt_hash crt_u.hash -#define crt_ahash crt_u.ahash #define crt_compress crt_u.compress #define crt_rng crt_u.rng @@ -479,7 +457,6 @@ struct crypto_tfm { struct blkcipher_tfm blkcipher; struct cipher_tfm cipher; struct hash_tfm hash; - struct ahash_tfm ahash; struct compress_tfm compress; struct rng_tfm rng; } crt_u; -- cgit v1.2.3-58-ga151 From 01c2dece4316dadc0f9fad1ad0b56d493980e492 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Tue, 14 Jul 2009 14:06:06 +0800 Subject: crypto: ahash - Add instance/spawn support This patch adds support for creating ahash instances and using ahash as spawns. Signed-off-by: Herbert Xu --- crypto/ahash.c | 72 ++++++++++++++++++++++++++++++++++++++++++ include/crypto/internal/hash.h | 51 ++++++++++++++++++++++++++++++ 2 files changed, 123 insertions(+) (limited to 'include/crypto') diff --git a/crypto/ahash.c b/crypto/ahash.c index 838519386215..7f599d26086a 100644 --- a/crypto/ahash.c +++ b/crypto/ahash.c @@ -259,5 +259,77 @@ struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type, } EXPORT_SYMBOL_GPL(crypto_alloc_ahash); +static int ahash_prepare_alg(struct ahash_alg *alg) +{ + struct crypto_alg *base = &alg->halg.base; + + if (alg->halg.digestsize > PAGE_SIZE / 8 || + alg->halg.statesize > PAGE_SIZE / 8) + return -EINVAL; + + base->cra_type = &crypto_ahash_type; + base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; + base->cra_flags |= CRYPTO_ALG_TYPE_AHASH; + + return 0; +} + +int crypto_register_ahash(struct ahash_alg *alg) +{ + struct crypto_alg *base = &alg->halg.base; + int err; + + err = ahash_prepare_alg(alg); + if (err) + return err; + + return crypto_register_alg(base); +} +EXPORT_SYMBOL_GPL(crypto_register_ahash); + +int crypto_unregister_ahash(struct ahash_alg *alg) +{ + return crypto_unregister_alg(&alg->halg.base); +} +EXPORT_SYMBOL_GPL(crypto_unregister_ahash); + +int ahash_register_instance(struct crypto_template *tmpl, + struct ahash_instance *inst) +{ + int err; + + err = ahash_prepare_alg(&inst->alg); + if (err) + return err; + + return crypto_register_instance(tmpl, ahash_crypto_instance(inst)); +} +EXPORT_SYMBOL_GPL(ahash_register_instance); + +void ahash_free_instance(struct crypto_instance *inst) +{ + crypto_drop_spawn(crypto_instance_ctx(inst)); + kfree(ahash_instance(inst)); +} +EXPORT_SYMBOL_GPL(ahash_free_instance); + +int crypto_init_ahash_spawn(struct crypto_ahash_spawn *spawn, + struct hash_alg_common *alg, + struct crypto_instance *inst) +{ + return crypto_init_spawn2(&spawn->base, &alg->base, inst, + &crypto_ahash_type); +} +EXPORT_SYMBOL_GPL(crypto_init_ahash_spawn); + +struct hash_alg_common *ahash_attr_alg(struct rtattr *rta, u32 type, u32 mask) +{ + struct crypto_alg *alg; + + alg = crypto_attr_alg2(rta, &crypto_ahash_type, type, mask); + return IS_ERR(alg) ? ERR_CAST(alg) : __crypto_hash_alg_common(alg); +} +EXPORT_SYMBOL_GPL(ahash_attr_alg); + MODULE_LICENSE("GPL"); MODULE_DESCRIPTION("Asynchronous cryptographic hash type"); diff --git a/include/crypto/internal/hash.h b/include/crypto/internal/hash.h index 08bdffafefad..34eda233ee67 100644 --- a/include/crypto/internal/hash.h +++ b/include/crypto/internal/hash.h @@ -34,10 +34,18 @@ struct crypto_hash_walk { unsigned int flags; }; +struct ahash_instance { + struct ahash_alg alg; +}; + struct shash_instance { struct shash_alg alg; }; +struct crypto_ahash_spawn { + struct crypto_spawn base; +}; + struct crypto_shash_spawn { struct crypto_spawn base; }; @@ -53,6 +61,15 @@ int crypto_hash_walk_first_compat(struct hash_desc *hdesc, int crypto_register_ahash(struct ahash_alg *alg); int crypto_unregister_ahash(struct ahash_alg *alg); +int ahash_register_instance(struct crypto_template *tmpl, + struct ahash_instance *inst); +void ahash_free_instance(struct crypto_instance *inst); + +int crypto_init_ahash_spawn(struct crypto_ahash_spawn *spawn, + struct hash_alg_common *alg, + struct crypto_instance *inst); + +struct hash_alg_common *ahash_attr_alg(struct rtattr *rta, u32 type, u32 mask); int crypto_register_shash(struct shash_alg *alg); int crypto_unregister_shash(struct shash_alg *alg); @@ -88,6 +105,40 @@ static inline void crypto_ahash_set_reqsize(struct crypto_ahash *tfm, tfm->reqsize = reqsize; } +static inline struct crypto_instance *ahash_crypto_instance( + struct ahash_instance *inst) +{ + return container_of(&inst->alg.halg.base, struct crypto_instance, alg); +} + +static inline struct ahash_instance *ahash_instance( + struct crypto_instance *inst) +{ + return container_of(&inst->alg, struct ahash_instance, alg.halg.base); +} + +static inline void *ahash_instance_ctx(struct ahash_instance *inst) +{ + return crypto_instance_ctx(ahash_crypto_instance(inst)); +} + +static inline unsigned int ahash_instance_headroom(void) +{ + return sizeof(struct ahash_alg) - sizeof(struct crypto_alg); +} + +static inline struct ahash_instance *ahash_alloc_instance( + const char *name, struct crypto_alg *alg) +{ + return crypto_alloc_instance2(name, alg, ahash_instance_headroom()); +} + +static inline struct crypto_ahash *crypto_spawn_ahash( + struct crypto_ahash_spawn *spawn) +{ + return crypto_spawn_tfm2(&spawn->base); +} + static inline int ahash_enqueue_request(struct crypto_queue *queue, struct ahash_request *request) { -- cgit v1.2.3-58-ga151 From 52861c7cd711fac97b37ae0f4842a9ad26ecae72 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Tue, 14 Jul 2009 18:30:24 +0800 Subject: crypto: hash - Add helpers to free spawns This patch adds the helpers crypto_drop_ahash and crypto_drop_shash so that these spawns can be dropped without ugly casts. Signed-off-by: Herbert Xu --- include/crypto/internal/hash.h | 10 ++++++++++ 1 file changed, 10 insertions(+) (limited to 'include/crypto') diff --git a/include/crypto/internal/hash.h b/include/crypto/internal/hash.h index 34eda233ee67..9d30316e9f10 100644 --- a/include/crypto/internal/hash.h +++ b/include/crypto/internal/hash.h @@ -69,6 +69,11 @@ int crypto_init_ahash_spawn(struct crypto_ahash_spawn *spawn, struct hash_alg_common *alg, struct crypto_instance *inst); +static inline void crypto_drop_ahash(struct crypto_ahash_spawn *spawn) +{ + crypto_drop_spawn(&spawn->base); +} + struct hash_alg_common *ahash_attr_alg(struct rtattr *rta, u32 type, u32 mask); int crypto_register_shash(struct shash_alg *alg); @@ -81,6 +86,11 @@ int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn, struct shash_alg *alg, struct crypto_instance *inst); +static inline void crypto_drop_shash(struct crypto_shash_spawn *spawn) +{ + crypto_drop_spawn(&spawn->base); +} + struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask); int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc); -- cgit v1.2.3-58-ga151 From 9cd899a32f611eb6328014f1d9e0ba31977812d9 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Tue, 14 Jul 2009 18:45:45 +0800 Subject: crypto: cryptd - Switch to template create API This patch changes cryptd to use the template->create function instead of alloc in anticipation for the switch to new style ahash algorithms. Signed-off-by: Herbert Xu --- crypto/cryptd.c | 53 +++++++++++++++++++++++++++---------------------- crypto/internal.h | 3 --- include/crypto/algapi.h | 3 +++ 3 files changed, 32 insertions(+), 27 deletions(-) (limited to 'include/crypto') diff --git a/crypto/cryptd.c b/crypto/cryptd.c index 6e6722edd0d9..ad58f513ba8b 100644 --- a/crypto/cryptd.c +++ b/crypto/cryptd.c @@ -287,8 +287,9 @@ out_free_inst: goto out; } -static struct crypto_instance *cryptd_alloc_blkcipher( - struct rtattr **tb, struct cryptd_queue *queue) +static int cryptd_create_blkcipher(struct crypto_template *tmpl, + struct rtattr **tb, + struct cryptd_queue *queue) { struct cryptd_instance_ctx *ctx; struct crypto_instance *inst; @@ -298,7 +299,7 @@ static struct crypto_instance *cryptd_alloc_blkcipher( alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_BLKCIPHER, CRYPTO_ALG_TYPE_MASK); if (IS_ERR(alg)) - return ERR_CAST(alg); + return PTR_ERR(alg); inst = cryptd_alloc_instance(alg, sizeof(*ctx)); if (IS_ERR(inst)) @@ -330,14 +331,16 @@ static struct crypto_instance *cryptd_alloc_blkcipher( inst->alg.cra_ablkcipher.encrypt = cryptd_blkcipher_encrypt_enqueue; inst->alg.cra_ablkcipher.decrypt = cryptd_blkcipher_decrypt_enqueue; + err = crypto_register_instance(tmpl, inst); + if (err) { + crypto_drop_spawn(&ctx->spawn); +out_free_inst: + kfree(inst); + } + out_put_alg: crypto_mod_put(alg); - return inst; - -out_free_inst: - kfree(inst); - inst = ERR_PTR(err); - goto out_put_alg; + return err; } static int cryptd_hash_init_tfm(struct crypto_tfm *tfm) @@ -502,8 +505,8 @@ static int cryptd_hash_digest_enqueue(struct ahash_request *req) return cryptd_hash_enqueue(req, cryptd_hash_digest); } -static struct crypto_instance *cryptd_alloc_hash( - struct rtattr **tb, struct cryptd_queue *queue) +static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb, + struct cryptd_queue *queue) { struct hashd_instance_ctx *ctx; struct crypto_instance *inst; @@ -513,7 +516,7 @@ static struct crypto_instance *cryptd_alloc_hash( salg = shash_attr_alg(tb[1], 0, 0); if (IS_ERR(salg)) - return ERR_CAST(salg); + return PTR_ERR(salg); alg = &salg->base; inst = cryptd_alloc_instance(alg, sizeof(*ctx)); @@ -542,34 +545,36 @@ static struct crypto_instance *cryptd_alloc_hash( inst->alg.cra_ahash.setkey = cryptd_hash_setkey; inst->alg.cra_ahash.digest = cryptd_hash_digest_enqueue; + err = crypto_register_instance(tmpl, inst); + if (err) { + crypto_drop_shash(&ctx->spawn); +out_free_inst: + kfree(inst); + } + out_put_alg: crypto_mod_put(alg); - return inst; - -out_free_inst: - kfree(inst); - inst = ERR_PTR(err); - goto out_put_alg; + return err; } static struct cryptd_queue queue; -static struct crypto_instance *cryptd_alloc(struct rtattr **tb) +static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb) { struct crypto_attr_type *algt; algt = crypto_get_attr_type(tb); if (IS_ERR(algt)) - return ERR_CAST(algt); + return PTR_ERR(algt); switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) { case CRYPTO_ALG_TYPE_BLKCIPHER: - return cryptd_alloc_blkcipher(tb, &queue); + return cryptd_create_blkcipher(tmpl, tb, &queue); case CRYPTO_ALG_TYPE_DIGEST: - return cryptd_alloc_hash(tb, &queue); + return cryptd_create_hash(tmpl, tb, &queue); } - return ERR_PTR(-EINVAL); + return -EINVAL; } static void cryptd_free(struct crypto_instance *inst) @@ -582,7 +587,7 @@ static void cryptd_free(struct crypto_instance *inst) static struct crypto_template cryptd_tmpl = { .name = "cryptd", - .alloc = cryptd_alloc, + .create = cryptd_create, .free = cryptd_free, .module = THIS_MODULE, }; diff --git a/crypto/internal.h b/crypto/internal.h index 030e22921c30..2d226362e594 100644 --- a/crypto/internal.h +++ b/crypto/internal.h @@ -97,9 +97,6 @@ struct crypto_alg *crypto_find_alg(const char *alg_name, void *crypto_alloc_tfm(const char *alg_name, const struct crypto_type *frontend, u32 type, u32 mask); -int crypto_register_instance(struct crypto_template *tmpl, - struct crypto_instance *inst); - int crypto_register_notifier(struct notifier_block *nb); int crypto_unregister_notifier(struct notifier_block *nb); int crypto_probing_notify(unsigned long val, void *v); diff --git a/include/crypto/algapi.h b/include/crypto/algapi.h index 7635fde7b1a2..9de6c38f4069 100644 --- a/include/crypto/algapi.h +++ b/include/crypto/algapi.h @@ -114,6 +114,9 @@ int crypto_register_template(struct crypto_template *tmpl); void crypto_unregister_template(struct crypto_template *tmpl); struct crypto_template *crypto_lookup_template(const char *name); +int crypto_register_instance(struct crypto_template *tmpl, + struct crypto_instance *inst); + int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg, struct crypto_instance *inst, u32 mask); int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg, -- cgit v1.2.3-58-ga151 From 4dc10c0142ce0af8c20ec44dc6928ae63ad4f73a Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Tue, 14 Jul 2009 20:21:46 +0800 Subject: crypto: crypto4xx - Switch to new style ahash This patch changes crypto4xx to use the new style ahash type. In particular, we now use ahash_alg to define ahash algorithms instead of crypto_alg. This is achieved by introducing a union that encapsulates the new type and the existing crypto_alg structure. They're told apart through a u32 field containing the type value. Signed-off-by: Herbert Xu --- drivers/crypto/amcc/crypto4xx_core.c | 85 +++++++++++++++++++++--------------- drivers/crypto/amcc/crypto4xx_core.h | 25 +++++++++-- include/crypto/internal/hash.h | 6 +++ 3 files changed, 77 insertions(+), 39 deletions(-) (limited to 'include/crypto') diff --git a/drivers/crypto/amcc/crypto4xx_core.c b/drivers/crypto/amcc/crypto4xx_core.c index cb7be4283aa0..857e35efedac 100644 --- a/drivers/crypto/amcc/crypto4xx_core.c +++ b/drivers/crypto/amcc/crypto4xx_core.c @@ -31,8 +31,6 @@ #include #include #include -#include -#include #include #include #include "crypto4xx_reg_def.h" @@ -998,11 +996,15 @@ static int crypto4xx_alg_init(struct crypto_tfm *tfm) ctx->sa_out_dma_addr = 0; ctx->sa_len = 0; - if (alg->cra_type == &crypto_ablkcipher_type) + switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) { + default: tfm->crt_ablkcipher.reqsize = sizeof(struct crypto4xx_ctx); - else if (alg->cra_type == &crypto_ahash_type) + break; + case CRYPTO_ALG_TYPE_AHASH: crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), sizeof(struct crypto4xx_ctx)); + break; + } return 0; } @@ -1016,7 +1018,8 @@ static void crypto4xx_alg_exit(struct crypto_tfm *tfm) } int crypto4xx_register_alg(struct crypto4xx_device *sec_dev, - struct crypto_alg *crypto_alg, int array_size) + struct crypto4xx_alg_common *crypto_alg, + int array_size) { struct crypto4xx_alg *alg; int i; @@ -1028,13 +1031,18 @@ int crypto4xx_register_alg(struct crypto4xx_device *sec_dev, return -ENOMEM; alg->alg = crypto_alg[i]; - INIT_LIST_HEAD(&alg->alg.cra_list); - if (alg->alg.cra_init == NULL) - alg->alg.cra_init = crypto4xx_alg_init; - if (alg->alg.cra_exit == NULL) - alg->alg.cra_exit = crypto4xx_alg_exit; alg->dev = sec_dev; - rc = crypto_register_alg(&alg->alg); + + switch (alg->alg.type) { + case CRYPTO_ALG_TYPE_AHASH: + rc = crypto_register_ahash(&alg->alg.u.hash); + break; + + default: + rc = crypto_register_alg(&alg->alg.u.cipher); + break; + } + if (rc) { list_del(&alg->entry); kfree(alg); @@ -1052,7 +1060,14 @@ static void crypto4xx_unregister_alg(struct crypto4xx_device *sec_dev) list_for_each_entry_safe(alg, tmp, &sec_dev->alg_list, entry) { list_del(&alg->entry); - crypto_unregister_alg(&alg->alg); + switch (alg->alg.type) { + case CRYPTO_ALG_TYPE_AHASH: + crypto_unregister_ahash(&alg->alg.u.hash); + break; + + default: + crypto_unregister_alg(&alg->alg.u.cipher); + } kfree(alg); } } @@ -1105,17 +1120,18 @@ static irqreturn_t crypto4xx_ce_interrupt_handler(int irq, void *data) /** * Supported Crypto Algorithms */ -struct crypto_alg crypto4xx_alg[] = { +struct crypto4xx_alg_common crypto4xx_alg[] = { /* Crypto AES modes */ - { + { .type = CRYPTO_ALG_TYPE_ABLKCIPHER, .u.cipher = { .cra_name = "cbc(aes)", .cra_driver_name = "cbc-aes-ppc4xx", .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY, .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, .cra_blocksize = AES_BLOCK_SIZE, .cra_ctxsize = sizeof(struct crypto4xx_ctx), - .cra_alignmask = 0, .cra_type = &crypto_ablkcipher_type, + .cra_init = crypto4xx_alg_init, + .cra_exit = crypto4xx_alg_exit, .cra_module = THIS_MODULE, .cra_u = { .ablkcipher = { @@ -1127,29 +1143,26 @@ struct crypto_alg crypto4xx_alg[] = { .decrypt = crypto4xx_decrypt, } } - }, + }}, /* Hash SHA1 */ - { - .cra_name = "sha1", - .cra_driver_name = "sha1-ppc4xx", - .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY, - .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC, - .cra_blocksize = SHA1_BLOCK_SIZE, - .cra_ctxsize = sizeof(struct crypto4xx_ctx), - .cra_alignmask = 0, - .cra_type = &crypto_ahash_type, - .cra_init = crypto4xx_sha1_alg_init, - .cra_module = THIS_MODULE, - .cra_u = { - .ahash = { - .digestsize = SHA1_DIGEST_SIZE, - .init = crypto4xx_hash_init, - .update = crypto4xx_hash_update, - .final = crypto4xx_hash_final, - .digest = crypto4xx_hash_digest, - } + { .type = CRYPTO_ALG_TYPE_AHASH, .u.hash = { + .init = crypto4xx_hash_init, + .update = crypto4xx_hash_update, + .final = crypto4xx_hash_final, + .digest = crypto4xx_hash_digest, + .halg.digestsize = SHA1_DIGEST_SIZE, + .halg.base = { + .cra_name = "sha1", + .cra_driver_name = "sha1-ppc4xx", + .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY, + .cra_flags = CRYPTO_ALG_ASYNC, + .cra_blocksize = SHA1_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct crypto4xx_ctx), + .cra_init = crypto4xx_sha1_alg_init, + .cra_exit = crypto4xx_alg_exit, + .cra_module = THIS_MODULE, } - }, + }}, }; /** diff --git a/drivers/crypto/amcc/crypto4xx_core.h b/drivers/crypto/amcc/crypto4xx_core.h index 1ef103449364..da9cbe3b9fc3 100644 --- a/drivers/crypto/amcc/crypto4xx_core.h +++ b/drivers/crypto/amcc/crypto4xx_core.h @@ -22,6 +22,8 @@ #ifndef __CRYPTO4XX_CORE_H__ #define __CRYPTO4XX_CORE_H__ +#include + #define PPC460SX_SDR0_SRST 0x201 #define PPC405EX_SDR0_SRST 0x200 #define PPC460EX_SDR0_SRST 0x201 @@ -138,14 +140,31 @@ struct crypto4xx_req_ctx { u16 sa_len; }; +struct crypto4xx_alg_common { + u32 type; + union { + struct crypto_alg cipher; + struct ahash_alg hash; + } u; +}; + struct crypto4xx_alg { struct list_head entry; - struct crypto_alg alg; + struct crypto4xx_alg_common alg; struct crypto4xx_device *dev; }; -#define crypto_alg_to_crypto4xx_alg(x) \ - container_of(x, struct crypto4xx_alg, alg) +static inline struct crypto4xx_alg *crypto_alg_to_crypto4xx_alg( + struct crypto_alg *x) +{ + switch (x->cra_flags & CRYPTO_ALG_TYPE_MASK) { + case CRYPTO_ALG_TYPE_AHASH: + return container_of(__crypto_ahash_alg(x), + struct crypto4xx_alg, alg.u.hash); + } + + return container_of(x, struct crypto4xx_alg, alg.u.cipher); +} extern int crypto4xx_alloc_sa(struct crypto4xx_ctx *ctx, u32 size); extern void crypto4xx_free_sa(struct crypto4xx_ctx *ctx); diff --git a/include/crypto/internal/hash.h b/include/crypto/internal/hash.h index 9d30316e9f10..e3a82514d61e 100644 --- a/include/crypto/internal/hash.h +++ b/include/crypto/internal/hash.h @@ -103,6 +103,12 @@ static inline void *crypto_ahash_ctx(struct crypto_ahash *tfm) return crypto_tfm_ctx(crypto_ahash_tfm(tfm)); } +static inline struct ahash_alg *__crypto_ahash_alg(struct crypto_alg *alg) +{ + return container_of(__crypto_hash_alg_common(alg), struct ahash_alg, + halg); +} + static inline struct old_ahash_alg *crypto_old_ahash_alg( struct crypto_ahash *tfm) { -- cgit v1.2.3-58-ga151 From 500b3e3c3dc8e4845b77ae81e5b7b085ab183ce6 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Tue, 14 Jul 2009 20:29:57 +0800 Subject: crypto: ahash - Remove old_ahash_alg Now that all ahash implementations have been converted to the new ahash type, we can remove old_ahash_alg and its associated support. Signed-off-by: Herbert Xu --- crypto/ahash.c | 27 --------------------------- crypto/shash.c | 2 -- include/crypto/hash.h | 3 +-- include/crypto/internal/hash.h | 6 ------ include/linux/crypto.h | 16 ---------------- 5 files changed, 1 insertion(+), 53 deletions(-) (limited to 'include/crypto') diff --git a/crypto/ahash.c b/crypto/ahash.c index 7f599d26086a..cc824ef25830 100644 --- a/crypto/ahash.c +++ b/crypto/ahash.c @@ -175,46 +175,19 @@ static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key, return -ENOSYS; } -static int crypto_init_ahash_ops(struct crypto_tfm *tfm, u32 type, u32 mask) -{ - struct old_ahash_alg *alg = &tfm->__crt_alg->cra_ahash; - struct crypto_ahash *crt = __crypto_ahash_cast(tfm); - struct ahash_alg *nalg = crypto_ahash_alg(crt); - - if (alg->digestsize > PAGE_SIZE / 8) - return -EINVAL; - - crt->init = alg->init; - crt->update = alg->update; - crt->final = alg->final; - crt->digest = alg->digest; - crt->setkey = alg->setkey ? ahash_setkey : ahash_nosetkey; - crt->digestsize = alg->digestsize; - - nalg->setkey = alg->setkey; - nalg->halg.digestsize = alg->digestsize; - - return 0; -} - static int crypto_ahash_init_tfm(struct crypto_tfm *tfm) { struct crypto_ahash *hash = __crypto_ahash_cast(tfm); struct ahash_alg *alg = crypto_ahash_alg(hash); - struct old_ahash_alg *oalg = crypto_old_ahash_alg(hash); if (tfm->__crt_alg->cra_type != &crypto_ahash_type) return crypto_init_shash_ops_async(tfm); - if (oalg->init) - return crypto_init_ahash_ops(tfm, 0, 0); - hash->init = alg->init; hash->update = alg->update; hash->final = alg->final; hash->digest = alg->digest; hash->setkey = alg->setkey ? ahash_setkey : ahash_nosetkey; - hash->digestsize = alg->halg.digestsize; return 0; } diff --git a/crypto/shash.c b/crypto/shash.c index 615a5f4d9b7a..fd92c03b38fc 100644 --- a/crypto/shash.c +++ b/crypto/shash.c @@ -270,7 +270,6 @@ static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm) int crypto_init_shash_ops_async(struct crypto_tfm *tfm) { struct crypto_alg *calg = tfm->__crt_alg; - struct shash_alg *alg = __crypto_shash_alg(calg); struct crypto_ahash *crt = __crypto_ahash_cast(tfm); struct crypto_shash **ctx = crypto_tfm_ctx(tfm); struct crypto_shash *shash; @@ -293,7 +292,6 @@ int crypto_init_shash_ops_async(struct crypto_tfm *tfm) crt->digest = shash_async_digest; crt->setkey = shash_async_setkey; - crt->digestsize = alg->digestsize; crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash); return 0; diff --git a/include/crypto/hash.h b/include/crypto/hash.h index 262861d8f0cb..45c2bddfdf32 100644 --- a/include/crypto/hash.h +++ b/include/crypto/hash.h @@ -89,7 +89,6 @@ struct crypto_ahash { int (*setkey)(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen); - unsigned int digestsize; unsigned int reqsize; struct crypto_tfm base; }; @@ -137,7 +136,7 @@ static inline struct hash_alg_common *crypto_hash_alg_common( static inline unsigned int crypto_ahash_digestsize(struct crypto_ahash *tfm) { - return tfm->digestsize; + return crypto_hash_alg_common(tfm)->digestsize; } static inline unsigned int crypto_ahash_statesize(struct crypto_ahash *tfm) diff --git a/include/crypto/internal/hash.h b/include/crypto/internal/hash.h index e3a82514d61e..179dd8fdfa14 100644 --- a/include/crypto/internal/hash.h +++ b/include/crypto/internal/hash.h @@ -109,12 +109,6 @@ static inline struct ahash_alg *__crypto_ahash_alg(struct crypto_alg *alg) halg); } -static inline struct old_ahash_alg *crypto_old_ahash_alg( - struct crypto_ahash *tfm) -{ - return &crypto_ahash_tfm(tfm)->__crt_alg->cra_ahash; -} - static inline void crypto_ahash_set_reqsize(struct crypto_ahash *tfm, unsigned int reqsize) { diff --git a/include/linux/crypto.h b/include/linux/crypto.h index 9e7e9b62a3dc..fd929889e8dc 100644 --- a/include/linux/crypto.h +++ b/include/linux/crypto.h @@ -115,12 +115,10 @@ struct crypto_async_request; struct crypto_aead; struct crypto_blkcipher; struct crypto_hash; -struct crypto_ahash; struct crypto_rng; struct crypto_tfm; struct crypto_type; struct aead_givcrypt_request; -struct ahash_request; struct skcipher_givcrypt_request; typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err); @@ -211,18 +209,6 @@ struct ablkcipher_alg { unsigned int ivsize; }; -struct old_ahash_alg { - int (*init)(struct ahash_request *req); - int (*reinit)(struct ahash_request *req); - int (*update)(struct ahash_request *req); - int (*final)(struct ahash_request *req); - int (*digest)(struct ahash_request *req); - int (*setkey)(struct crypto_ahash *tfm, const u8 *key, - unsigned int keylen); - - unsigned int digestsize; -}; - struct aead_alg { int (*setkey)(struct crypto_aead *tfm, const u8 *key, unsigned int keylen); @@ -309,7 +295,6 @@ struct rng_alg { #define cra_cipher cra_u.cipher #define cra_digest cra_u.digest #define cra_hash cra_u.hash -#define cra_ahash cra_u.ahash #define cra_compress cra_u.compress #define cra_rng cra_u.rng @@ -337,7 +322,6 @@ struct crypto_alg { struct cipher_alg cipher; struct digest_alg digest; struct hash_alg hash; - struct old_ahash_alg ahash; struct compress_alg compress; struct rng_alg rng; } cra_u; -- cgit v1.2.3-58-ga151 From 66f6ce5e52f2f209d5bf1f06167cec888f4f4c13 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Wed, 15 Jul 2009 12:40:40 +0800 Subject: crypto: ahash - Add unaligned handling and default operations This patch exports the finup operation where available and adds a default finup operation for ahash. The operations final, finup and digest also will now deal with unaligned result pointers by copying it. Finally export/import operations are will now be exported too. Signed-off-by: Herbert Xu --- crypto/ahash.c | 204 ++++++++++++++++++++++++++++++++++++++++- crypto/shash.c | 52 ++++++++++- include/crypto/hash.h | 23 ++--- include/crypto/internal/hash.h | 6 ++ 4 files changed, 263 insertions(+), 22 deletions(-) (limited to 'include/crypto') diff --git a/crypto/ahash.c b/crypto/ahash.c index a196055b73d3..ac0798d2824e 100644 --- a/crypto/ahash.c +++ b/crypto/ahash.c @@ -24,6 +24,13 @@ #include "internal.h" +struct ahash_request_priv { + crypto_completion_t complete; + void *data; + u8 *result; + void *ubuf[] CRYPTO_MINALIGN_ATTR; +}; + static inline struct ahash_alg *crypto_ahash_alg(struct crypto_ahash *hash) { return container_of(crypto_hash_alg_common(hash), struct ahash_alg, @@ -156,7 +163,7 @@ static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key, return ret; } -static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key, +int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen) { struct ahash_alg *ahash = crypto_ahash_alg(tfm); @@ -167,6 +174,7 @@ static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key, return ahash->setkey(tfm, key, keylen); } +EXPORT_SYMBOL_GPL(crypto_ahash_setkey); static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen) @@ -174,19 +182,209 @@ static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key, return -ENOSYS; } +static inline unsigned int ahash_align_buffer_size(unsigned len, + unsigned long mask) +{ + return len + (mask & ~(crypto_tfm_ctx_alignment() - 1)); +} + +static void ahash_op_unaligned_finish(struct ahash_request *req, int err) +{ + struct ahash_request_priv *priv = req->priv; + + if (err == -EINPROGRESS) + return; + + if (!err) + memcpy(priv->result, req->result, + crypto_ahash_digestsize(crypto_ahash_reqtfm(req))); + + kzfree(priv); +} + +static void ahash_op_unaligned_done(struct crypto_async_request *req, int err) +{ + struct ahash_request *areq = req->data; + struct ahash_request_priv *priv = areq->priv; + crypto_completion_t complete = priv->complete; + void *data = priv->data; + + ahash_op_unaligned_finish(areq, err); + + complete(data, err); +} + +static int ahash_op_unaligned(struct ahash_request *req, + int (*op)(struct ahash_request *)) +{ + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); + unsigned long alignmask = crypto_ahash_alignmask(tfm); + unsigned int ds = crypto_ahash_digestsize(tfm); + struct ahash_request_priv *priv; + int err; + + priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask), + (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? + GFP_ATOMIC : GFP_ATOMIC); + if (!priv) + return -ENOMEM; + + priv->result = req->result; + priv->complete = req->base.complete; + priv->data = req->base.data; + + req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1); + req->base.complete = ahash_op_unaligned_done; + req->base.data = req; + req->priv = priv; + + err = op(req); + ahash_op_unaligned_finish(req, err); + + return err; +} + +static int crypto_ahash_op(struct ahash_request *req, + int (*op)(struct ahash_request *)) +{ + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); + unsigned long alignmask = crypto_ahash_alignmask(tfm); + + if ((unsigned long)req->result & alignmask) + return ahash_op_unaligned(req, op); + + return op(req); +} + +int crypto_ahash_final(struct ahash_request *req) +{ + return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->final); +} +EXPORT_SYMBOL_GPL(crypto_ahash_final); + +int crypto_ahash_finup(struct ahash_request *req) +{ + return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->finup); +} +EXPORT_SYMBOL_GPL(crypto_ahash_finup); + +int crypto_ahash_digest(struct ahash_request *req) +{ + return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->digest); +} +EXPORT_SYMBOL_GPL(crypto_ahash_digest); + +static void ahash_def_finup_finish2(struct ahash_request *req, int err) +{ + struct ahash_request_priv *priv = req->priv; + + if (err == -EINPROGRESS) + return; + + if (!err) + memcpy(priv->result, req->result, + crypto_ahash_digestsize(crypto_ahash_reqtfm(req))); + + kzfree(priv); +} + +static void ahash_def_finup_done2(struct crypto_async_request *req, int err) +{ + struct ahash_request *areq = req->data; + struct ahash_request_priv *priv = areq->priv; + crypto_completion_t complete = priv->complete; + void *data = priv->data; + + ahash_def_finup_finish2(areq, err); + + complete(data, err); +} + +static int ahash_def_finup_finish1(struct ahash_request *req, int err) +{ + if (err) + goto out; + + req->base.complete = ahash_def_finup_done2; + req->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; + err = crypto_ahash_reqtfm(req)->final(req); + +out: + ahash_def_finup_finish2(req, err); + return err; +} + +static void ahash_def_finup_done1(struct crypto_async_request *req, int err) +{ + struct ahash_request *areq = req->data; + struct ahash_request_priv *priv = areq->priv; + crypto_completion_t complete = priv->complete; + void *data = priv->data; + + err = ahash_def_finup_finish1(areq, err); + + complete(data, err); +} + +static int ahash_def_finup(struct ahash_request *req) +{ + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); + unsigned long alignmask = crypto_ahash_alignmask(tfm); + unsigned int ds = crypto_ahash_digestsize(tfm); + struct ahash_request_priv *priv; + + priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask), + (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? + GFP_ATOMIC : GFP_ATOMIC); + if (!priv) + return -ENOMEM; + + priv->result = req->result; + priv->complete = req->base.complete; + priv->data = req->base.data; + + req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1); + req->base.complete = ahash_def_finup_done1; + req->base.data = req; + req->priv = priv; + + return ahash_def_finup_finish1(req, tfm->update(req)); +} + +static int ahash_no_export(struct ahash_request *req, void *out) +{ + return -ENOSYS; +} + +static int ahash_no_import(struct ahash_request *req, const void *in) +{ + return -ENOSYS; +} + static int crypto_ahash_init_tfm(struct crypto_tfm *tfm) { struct crypto_ahash *hash = __crypto_ahash_cast(tfm); struct ahash_alg *alg = crypto_ahash_alg(hash); + hash->setkey = ahash_nosetkey; + hash->export = ahash_no_export; + hash->import = ahash_no_import; + if (tfm->__crt_alg->cra_type != &crypto_ahash_type) return crypto_init_shash_ops_async(tfm); hash->init = alg->init; hash->update = alg->update; - hash->final = alg->final; + hash->final = alg->final; + hash->finup = alg->finup ?: ahash_def_finup; hash->digest = alg->digest; - hash->setkey = alg->setkey ? ahash_setkey : ahash_nosetkey; + + if (alg->setkey) + hash->setkey = alg->setkey; + if (alg->export) + hash->export = alg->export; + if (alg->import) + hash->import = alg->import; return 0; } diff --git a/crypto/shash.c b/crypto/shash.c index 171c8f052f89..834d9d24cdae 100644 --- a/crypto/shash.c +++ b/crypto/shash.c @@ -235,6 +235,33 @@ static int shash_async_final(struct ahash_request *req) return crypto_shash_final(ahash_request_ctx(req), req->result); } +int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc) +{ + struct crypto_hash_walk walk; + int nbytes; + + for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0; + nbytes = crypto_hash_walk_done(&walk, nbytes)) + nbytes = crypto_hash_walk_last(&walk) ? + crypto_shash_finup(desc, walk.data, nbytes, + req->result) : + crypto_shash_update(desc, walk.data, nbytes); + + return nbytes; +} +EXPORT_SYMBOL_GPL(shash_ahash_finup); + +static int shash_async_finup(struct ahash_request *req) +{ + struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); + struct shash_desc *desc = ahash_request_ctx(req); + + desc->tfm = *ctx; + desc->flags = req->base.flags; + + return shash_ahash_finup(req, desc); +} + int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc) { struct scatterlist *sg = req->src; @@ -252,8 +279,7 @@ int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc) crypto_yield(desc->flags); } else err = crypto_shash_init(desc) ?: - shash_ahash_update(req, desc) ?: - crypto_shash_final(desc, req->result); + shash_ahash_finup(req, desc); return err; } @@ -270,6 +296,16 @@ static int shash_async_digest(struct ahash_request *req) return shash_ahash_digest(req, desc); } +static int shash_async_export(struct ahash_request *req, void *out) +{ + return crypto_shash_export(ahash_request_ctx(req), out); +} + +static int shash_async_import(struct ahash_request *req, const void *in) +{ + return crypto_shash_import(ahash_request_ctx(req), in); +} + static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm) { struct crypto_shash **ctx = crypto_tfm_ctx(tfm); @@ -280,6 +316,7 @@ static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm) int crypto_init_shash_ops_async(struct crypto_tfm *tfm) { struct crypto_alg *calg = tfm->__crt_alg; + struct shash_alg *alg = __crypto_shash_alg(calg); struct crypto_ahash *crt = __crypto_ahash_cast(tfm); struct crypto_shash **ctx = crypto_tfm_ctx(tfm); struct crypto_shash *shash; @@ -298,9 +335,16 @@ int crypto_init_shash_ops_async(struct crypto_tfm *tfm) crt->init = shash_async_init; crt->update = shash_async_update; - crt->final = shash_async_final; + crt->final = shash_async_final; + crt->finup = shash_async_finup; crt->digest = shash_async_digest; - crt->setkey = shash_async_setkey; + + if (alg->setkey) + crt->setkey = shash_async_setkey; + if (alg->export) + crt->export = shash_async_export; + if (alg->setkey) + crt->import = shash_async_import; crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash); diff --git a/include/crypto/hash.h b/include/crypto/hash.h index 45c2bddfdf32..3e89ce16b59c 100644 --- a/include/crypto/hash.h +++ b/include/crypto/hash.h @@ -31,6 +31,9 @@ struct ahash_request { struct scatterlist *src; u8 *result; + /* This field may only be used by the ahash API code. */ + void *priv; + void *__ctx[] CRYPTO_MINALIGN_ATTR; }; @@ -175,16 +178,11 @@ static inline void *ahash_request_ctx(struct ahash_request *req) return req->__ctx; } -static inline int crypto_ahash_setkey(struct crypto_ahash *tfm, - const u8 *key, unsigned int keylen) -{ - return tfm->setkey(tfm, key, keylen); -} - -static inline int crypto_ahash_digest(struct ahash_request *req) -{ - return crypto_ahash_reqtfm(req)->digest(req); -} +int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key, + unsigned int keylen); +int crypto_ahash_finup(struct ahash_request *req); +int crypto_ahash_final(struct ahash_request *req); +int crypto_ahash_digest(struct ahash_request *req); static inline int crypto_ahash_export(struct ahash_request *req, void *out) { @@ -206,11 +204,6 @@ static inline int crypto_ahash_update(struct ahash_request *req) return crypto_ahash_reqtfm(req)->update(req); } -static inline int crypto_ahash_final(struct ahash_request *req) -{ - return crypto_ahash_reqtfm(req)->final(req); -} - static inline void ahash_request_set_tfm(struct ahash_request *req, struct crypto_ahash *tfm) { diff --git a/include/crypto/internal/hash.h b/include/crypto/internal/hash.h index 179dd8fdfa14..5bfad8c80595 100644 --- a/include/crypto/internal/hash.h +++ b/include/crypto/internal/hash.h @@ -59,6 +59,11 @@ int crypto_hash_walk_first_compat(struct hash_desc *hdesc, struct crypto_hash_walk *walk, struct scatterlist *sg, unsigned int len); +static inline int crypto_hash_walk_last(struct crypto_hash_walk *walk) +{ + return !(walk->entrylen | walk->total); +} + int crypto_register_ahash(struct ahash_alg *alg); int crypto_unregister_ahash(struct ahash_alg *alg); int ahash_register_instance(struct crypto_template *tmpl, @@ -94,6 +99,7 @@ static inline void crypto_drop_shash(struct crypto_shash_spawn *spawn) struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask); int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc); +int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc); int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc); int crypto_init_shash_ops_async(struct crypto_tfm *tfm); -- cgit v1.2.3-58-ga151 From fa64966473830219fe74952029ddb0e981a87749 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Wed, 15 Jul 2009 21:16:05 +0800 Subject: crypto: shash - Fix digest size offset When an shash algorithm is exported as ahash, ahash will access its digest size through hash_alg_common. That's why the shash layout needs to match hash_alg_common. This wasn't the case because the alignment weren't identical. This patch fixes the problem. Signed-off-by: Herbert Xu --- include/crypto/hash.h | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'include/crypto') diff --git a/include/crypto/hash.h b/include/crypto/hash.h index 3e89ce16b59c..26cb1eb16f4c 100644 --- a/include/crypto/hash.h +++ b/include/crypto/hash.h @@ -75,7 +75,8 @@ struct shash_alg { unsigned int descsize; /* These fields must match hash_alg_common. */ - unsigned int digestsize; + unsigned int digestsize + __attribute__ ((aligned(__alignof__(struct hash_alg_common)))); unsigned int statesize; struct crypto_alg base; -- cgit v1.2.3-58-ga151 From 1f38ad8389bbca038d320c29d30aa1d6ed96b48d Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Wed, 22 Jul 2009 11:48:18 +0800 Subject: crypto: sha512 - Export struct sha512_state This patch renames struct sha512_ctx and exports it as struct sha512_state so that other sha512 implementations can use it as the reference structure for exporting their state. Signed-off-by: Herbert Xu --- crypto/sha512_generic.c | 20 +++++++------------- include/crypto/sha.h | 6 ++++++ 2 files changed, 13 insertions(+), 13 deletions(-) (limited to 'include/crypto') diff --git a/crypto/sha512_generic.c b/crypto/sha512_generic.c index 3bea38d12242..4fe95eb03226 100644 --- a/crypto/sha512_generic.c +++ b/crypto/sha512_generic.c @@ -21,12 +21,6 @@ #include #include -struct sha512_ctx { - u64 state[8]; - u32 count[4]; - u8 buf[128]; -}; - static DEFINE_PER_CPU(u64[80], msg_schedule); static inline u64 Ch(u64 x, u64 y, u64 z) @@ -141,7 +135,7 @@ sha512_transform(u64 *state, const u8 *input) static int sha512_init(struct shash_desc *desc) { - struct sha512_ctx *sctx = shash_desc_ctx(desc); + struct sha512_state *sctx = shash_desc_ctx(desc); sctx->state[0] = SHA512_H0; sctx->state[1] = SHA512_H1; sctx->state[2] = SHA512_H2; @@ -158,7 +152,7 @@ sha512_init(struct shash_desc *desc) static int sha384_init(struct shash_desc *desc) { - struct sha512_ctx *sctx = shash_desc_ctx(desc); + struct sha512_state *sctx = shash_desc_ctx(desc); sctx->state[0] = SHA384_H0; sctx->state[1] = SHA384_H1; sctx->state[2] = SHA384_H2; @@ -175,7 +169,7 @@ sha384_init(struct shash_desc *desc) static int sha512_update(struct shash_desc *desc, const u8 *data, unsigned int len) { - struct sha512_ctx *sctx = shash_desc_ctx(desc); + struct sha512_state *sctx = shash_desc_ctx(desc); unsigned int i, index, part_len; @@ -214,7 +208,7 @@ sha512_update(struct shash_desc *desc, const u8 *data, unsigned int len) static int sha512_final(struct shash_desc *desc, u8 *hash) { - struct sha512_ctx *sctx = shash_desc_ctx(desc); + struct sha512_state *sctx = shash_desc_ctx(desc); static u8 padding[128] = { 0x80, }; __be64 *dst = (__be64 *)hash; __be32 bits[4]; @@ -240,7 +234,7 @@ sha512_final(struct shash_desc *desc, u8 *hash) dst[i] = cpu_to_be64(sctx->state[i]); /* Zeroize sensitive information. */ - memset(sctx, 0, sizeof(struct sha512_ctx)); + memset(sctx, 0, sizeof(struct sha512_state)); return 0; } @@ -262,7 +256,7 @@ static struct shash_alg sha512 = { .init = sha512_init, .update = sha512_update, .final = sha512_final, - .descsize = sizeof(struct sha512_ctx), + .descsize = sizeof(struct sha512_state), .base = { .cra_name = "sha512", .cra_flags = CRYPTO_ALG_TYPE_SHASH, @@ -276,7 +270,7 @@ static struct shash_alg sha384 = { .init = sha384_init, .update = sha512_update, .final = sha384_final, - .descsize = sizeof(struct sha512_ctx), + .descsize = sizeof(struct sha512_state), .base = { .cra_name = "sha384", .cra_flags = CRYPTO_ALG_TYPE_SHASH, diff --git a/include/crypto/sha.h b/include/crypto/sha.h index 88ef5eb9514d..45b25ccf7cc6 100644 --- a/include/crypto/sha.h +++ b/include/crypto/sha.h @@ -76,4 +76,10 @@ struct sha256_state { u8 buf[SHA256_BLOCK_SIZE]; }; +struct sha512_state { + u64 state[8]; + u32 count[4]; + u8 buf[128]; +}; + #endif -- cgit v1.2.3-58-ga151 From 13887ed6888dad1608eb9530ebd83b6ba29db577 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Wed, 22 Jul 2009 12:22:43 +0800 Subject: crypto: sha512_generic - Use 64-bit counters This patch replaces the 32-bit counters in sha512_generic with 64-bit counters. It also switches the bit count to the simpler byte count. Signed-off-by: Herbert Xu --- crypto/sha512_generic.c | 28 +++++++++++----------------- include/crypto/sha.h | 6 +++--- 2 files changed, 14 insertions(+), 20 deletions(-) (limited to 'include/crypto') diff --git a/crypto/sha512_generic.c b/crypto/sha512_generic.c index 4fe95eb03226..9ed9f60316e5 100644 --- a/crypto/sha512_generic.c +++ b/crypto/sha512_generic.c @@ -144,7 +144,7 @@ sha512_init(struct shash_desc *desc) sctx->state[5] = SHA512_H5; sctx->state[6] = SHA512_H6; sctx->state[7] = SHA512_H7; - sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0; + sctx->count[0] = sctx->count[1] = 0; return 0; } @@ -161,7 +161,7 @@ sha384_init(struct shash_desc *desc) sctx->state[5] = SHA384_H5; sctx->state[6] = SHA384_H6; sctx->state[7] = SHA384_H7; - sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0; + sctx->count[0] = sctx->count[1] = 0; return 0; } @@ -174,15 +174,11 @@ sha512_update(struct shash_desc *desc, const u8 *data, unsigned int len) unsigned int i, index, part_len; /* Compute number of bytes mod 128 */ - index = (unsigned int)((sctx->count[0] >> 3) & 0x7F); - - /* Update number of bits */ - if ((sctx->count[0] += (len << 3)) < (len << 3)) { - if ((sctx->count[1] += 1) < 1) - if ((sctx->count[2] += 1) < 1) - sctx->count[3]++; - sctx->count[1] += (len >> 29); - } + index = sctx->count[0] & 0x7f; + + /* Update number of bytes */ + if (!(sctx->count[0] += len)) + sctx->count[1]++; part_len = 128 - index; @@ -211,18 +207,16 @@ sha512_final(struct shash_desc *desc, u8 *hash) struct sha512_state *sctx = shash_desc_ctx(desc); static u8 padding[128] = { 0x80, }; __be64 *dst = (__be64 *)hash; - __be32 bits[4]; + __be64 bits[2]; unsigned int index, pad_len; int i; /* Save number of bits */ - bits[3] = cpu_to_be32(sctx->count[0]); - bits[2] = cpu_to_be32(sctx->count[1]); - bits[1] = cpu_to_be32(sctx->count[2]); - bits[0] = cpu_to_be32(sctx->count[3]); + bits[1] = cpu_to_be64(sctx->count[0] << 3); + bits[0] = cpu_to_be64(sctx->count[1] << 3 | sctx->count[0] >> 61); /* Pad out to 112 mod 128. */ - index = (sctx->count[0] >> 3) & 0x7f; + index = sctx->count[0] & 0x7f; pad_len = (index < 112) ? (112 - index) : ((128+112) - index); sha512_update(desc, padding, pad_len); diff --git a/include/crypto/sha.h b/include/crypto/sha.h index 45b25ccf7cc6..069e85ba97e1 100644 --- a/include/crypto/sha.h +++ b/include/crypto/sha.h @@ -77,9 +77,9 @@ struct sha256_state { }; struct sha512_state { - u64 state[8]; - u32 count[4]; - u8 buf[128]; + u64 count[2]; + u64 state[SHA512_DIGEST_SIZE / 8]; + u8 buf[SHA512_BLOCK_SIZE]; }; #endif -- cgit v1.2.3-58-ga151 From ab300465676b0c0559af62d57ec9a902f5680b03 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Fri, 24 Jul 2009 15:26:15 +0800 Subject: crypto: api - Fix aligned ctx helper The aligned ctx helper was using a bogus alignment value thas was one off the correct value. Fortunately the current users do not require anything beyond the natural alignment of the platform so this hasn't caused a problem. This patch fixes that and also removes the unnecessary minimum check since if the alignment is less than the natural alignment then the subsequent ALIGN operation should be a noop. Signed-off-by: Herbert Xu --- include/crypto/algapi.h | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) (limited to 'include/crypto') diff --git a/include/crypto/algapi.h b/include/crypto/algapi.h index 9de6c38f4069..3f388146a914 100644 --- a/include/crypto/algapi.h +++ b/include/crypto/algapi.h @@ -175,12 +175,8 @@ int blkcipher_walk_virt_block(struct blkcipher_desc *desc, static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm) { - unsigned long addr = (unsigned long)crypto_tfm_ctx(tfm); - unsigned long align = crypto_tfm_alg_alignmask(tfm); - - if (align <= crypto_tfm_ctx_alignment()) - align = 1; - return (void *)ALIGN(addr, align); + return PTR_ALIGN(crypto_tfm_ctx(tfm), + crypto_tfm_alg_alignmask(tfm) + 1); } static inline struct crypto_instance *crypto_tfm_alg_instance( -- cgit v1.2.3-58-ga151 From ace1366369841c9c3a9788f79baa4d73f1c53107 Mon Sep 17 00:00:00 2001 From: Huang Ying Date: Thu, 6 Aug 2009 15:35:20 +1000 Subject: crypto: cryptd - Add support to access underlaying shash cryptd_alloc_ahash() will allocate a cryptd-ed ahash for specified algorithm name. The new allocated one is guaranteed to be cryptd-ed ahash, so the shash underlying can be gotten via cryptd_ahash_child(). Signed-off-by: Huang Ying Signed-off-by: Herbert Xu --- crypto/cryptd.c | 35 +++++++++++++++++++++++++++++++++++ include/crypto/cryptd.h | 17 +++++++++++++++++ 2 files changed, 52 insertions(+) (limited to 'include/crypto') diff --git a/crypto/cryptd.c b/crypto/cryptd.c index 2eb705822f2b..35335825a4ef 100644 --- a/crypto/cryptd.c +++ b/crypto/cryptd.c @@ -682,6 +682,41 @@ void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm) } EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher); +struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name, + u32 type, u32 mask) +{ + char cryptd_alg_name[CRYPTO_MAX_ALG_NAME]; + struct crypto_ahash *tfm; + + if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME, + "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME) + return ERR_PTR(-EINVAL); + tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask); + if (IS_ERR(tfm)) + return ERR_CAST(tfm); + if (tfm->base.__crt_alg->cra_module != THIS_MODULE) { + crypto_free_ahash(tfm); + return ERR_PTR(-EINVAL); + } + + return __cryptd_ahash_cast(tfm); +} +EXPORT_SYMBOL_GPL(cryptd_alloc_ahash); + +struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm) +{ + struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base); + + return ctx->child; +} +EXPORT_SYMBOL_GPL(cryptd_ahash_child); + +void cryptd_free_ahash(struct cryptd_ahash *tfm) +{ + crypto_free_ahash(&tfm->base); +} +EXPORT_SYMBOL_GPL(cryptd_free_ahash); + static int __init cryptd_init(void) { int err; diff --git a/include/crypto/cryptd.h b/include/crypto/cryptd.h index 55fa7bbdbc71..2f65a6e8ea4d 100644 --- a/include/crypto/cryptd.h +++ b/include/crypto/cryptd.h @@ -7,6 +7,7 @@ #include #include +#include struct cryptd_ablkcipher { struct crypto_ablkcipher base; @@ -24,4 +25,20 @@ struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name, struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm); void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm); +struct cryptd_ahash { + struct crypto_ahash base; +}; + +static inline struct cryptd_ahash *__cryptd_ahash_cast( + struct crypto_ahash *tfm) +{ + return (struct cryptd_ahash *)tfm; +} + +/* alg_name should be algorithm to be cryptd-ed */ +struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name, + u32 type, u32 mask); +struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm); +void cryptd_free_ahash(struct cryptd_ahash *tfm); + #endif -- cgit v1.2.3-58-ga151 From f1939f7c56456d22a559d2c75156e91912a2e97e Mon Sep 17 00:00:00 2001 From: Shane Wang Date: Wed, 2 Sep 2009 20:05:22 +1000 Subject: crypto: vmac - New hash algorithm for intel_txt support This patch adds VMAC (a fast MAC) support into crypto framework. Signed-off-by: Shane Wang Signed-off-by: Joseph Cihula Signed-off-by: Herbert Xu --- crypto/Kconfig | 12 + crypto/Makefile | 1 + crypto/tcrypt.c | 4 + crypto/testmgr.c | 9 + crypto/testmgr.h | 16 ++ crypto/vmac.c | 678 ++++++++++++++++++++++++++++++++++++++++++++++++++ include/crypto/vmac.h | 61 +++++ 7 files changed, 781 insertions(+) create mode 100644 crypto/vmac.c create mode 100644 include/crypto/vmac.h (limited to 'include/crypto') diff --git a/crypto/Kconfig b/crypto/Kconfig index 762344202725..26b5dd0cb564 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -269,6 +269,18 @@ config CRYPTO_XCBC http://csrc.nist.gov/encryption/modes/proposedmodes/ xcbc-mac/xcbc-mac-spec.pdf +config CRYPTO_VMAC + tristate "VMAC support" + depends on EXPERIMENTAL + select CRYPTO_HASH + select CRYPTO_MANAGER + help + VMAC is a message authentication algorithm designed for + very high speed on 64-bit architectures. + + See also: + + comment "Digest" config CRYPTO_CRC32C diff --git a/crypto/Makefile b/crypto/Makefile index c2ca721eea9d..9e8f61908cb5 100644 --- a/crypto/Makefile +++ b/crypto/Makefile @@ -32,6 +32,7 @@ cryptomgr-objs := algboss.o testmgr.o obj-$(CONFIG_CRYPTO_MANAGER2) += cryptomgr.o obj-$(CONFIG_CRYPTO_HMAC) += hmac.o +obj-$(CONFIG_CRYPTO_VMAC) += vmac.o obj-$(CONFIG_CRYPTO_XCBC) += xcbc.o obj-$(CONFIG_CRYPTO_NULL) += crypto_null.o obj-$(CONFIG_CRYPTO_MD4) += md4.o diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c index 5a375e819d5d..aa3f84ccc786 100644 --- a/crypto/tcrypt.c +++ b/crypto/tcrypt.c @@ -719,6 +719,10 @@ static int do_test(int m) ret += tcrypt_test("hmac(rmd160)"); break; + case 109: + ret += tcrypt_test("vmac(aes)"); + break; + case 150: ret += tcrypt_test("ansi_cprng"); break; diff --git a/crypto/testmgr.c b/crypto/testmgr.c index 29b228d9b1a2..6d5b746637be 100644 --- a/crypto/testmgr.c +++ b/crypto/testmgr.c @@ -2247,6 +2247,15 @@ static const struct alg_test_desc alg_test_descs[] = { .count = TGR192_TEST_VECTORS } } + }, { + .alg = "vmac(aes)", + .test = alg_test_hash, + .suite = { + .hash = { + .vecs = aes_vmac128_tv_template, + .count = VMAC_AES_TEST_VECTORS + } + } }, { .alg = "wp256", .test = alg_test_hash, diff --git a/crypto/testmgr.h b/crypto/testmgr.h index 69316228fc19..9963b18983ab 100644 --- a/crypto/testmgr.h +++ b/crypto/testmgr.h @@ -1654,6 +1654,22 @@ static struct hash_testvec aes_xcbc128_tv_template[] = { } }; +#define VMAC_AES_TEST_VECTORS 1 +static char vmac_string[128] = {'\x01', '\x01', '\x01', '\x01', + '\x02', '\x03', '\x02', '\x02', + '\x02', '\x04', '\x01', '\x07', + '\x04', '\x01', '\x04', '\x03',}; +static struct hash_testvec aes_vmac128_tv_template[] = { + { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .plaintext = vmac_string, + .digest = "\xcb\xd7\x8a\xfd\xb7\x33\x79\xe7", + .psize = 128, + .ksize = 16, + }, +}; + /* * SHA384 HMAC test vectors from RFC4231 */ diff --git a/crypto/vmac.c b/crypto/vmac.c new file mode 100644 index 000000000000..0a9468e575de --- /dev/null +++ b/crypto/vmac.c @@ -0,0 +1,678 @@ +/* + * Modified to interface to the Linux kernel + * Copyright (c) 2009, Intel Corporation. + * + * This program is free software; you can redistribute it and/or modify it + * under the terms and conditions of the GNU General Public License, + * version 2, as published by the Free Software Foundation. + * + * This program is distributed in the hope it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for + * more details. + * + * You should have received a copy of the GNU General Public License along with + * this program; if not, write to the Free Software Foundation, Inc., 59 Temple + * Place - Suite 330, Boston, MA 02111-1307 USA. + */ + +/* -------------------------------------------------------------------------- + * VMAC and VHASH Implementation by Ted Krovetz (tdk@acm.org) and Wei Dai. + * This implementation is herby placed in the public domain. + * The authors offers no warranty. Use at your own risk. + * Please send bug reports to the authors. + * Last modified: 17 APR 08, 1700 PDT + * ----------------------------------------------------------------------- */ + +#include +#include +#include +#include +#include +#include +#include +#include + +/* + * Constants and masks + */ +#define UINT64_C(x) x##ULL +const u64 p64 = UINT64_C(0xfffffffffffffeff); /* 2^64 - 257 prime */ +const u64 m62 = UINT64_C(0x3fffffffffffffff); /* 62-bit mask */ +const u64 m63 = UINT64_C(0x7fffffffffffffff); /* 63-bit mask */ +const u64 m64 = UINT64_C(0xffffffffffffffff); /* 64-bit mask */ +const u64 mpoly = UINT64_C(0x1fffffff1fffffff); /* Poly key mask */ + +#ifdef __LITTLE_ENDIAN +#define INDEX_HIGH 1 +#define INDEX_LOW 0 +#else +#define INDEX_HIGH 0 +#define INDEX_LOW 1 +#endif + +/* + * The following routines are used in this implementation. They are + * written via macros to simulate zero-overhead call-by-reference. + * + * MUL64: 64x64->128-bit multiplication + * PMUL64: assumes top bits cleared on inputs + * ADD128: 128x128->128-bit addition + */ + +#define ADD128(rh, rl, ih, il) \ + do { \ + u64 _il = (il); \ + (rl) += (_il); \ + if ((rl) < (_il)) \ + (rh)++; \ + (rh) += (ih); \ + } while (0) + +#define MUL32(i1, i2) ((u64)(u32)(i1)*(u32)(i2)) + +#define PMUL64(rh, rl, i1, i2) /* Assumes m doesn't overflow */ \ + do { \ + u64 _i1 = (i1), _i2 = (i2); \ + u64 m = MUL32(_i1, _i2>>32) + MUL32(_i1>>32, _i2); \ + rh = MUL32(_i1>>32, _i2>>32); \ + rl = MUL32(_i1, _i2); \ + ADD128(rh, rl, (m >> 32), (m << 32)); \ + } while (0) + +#define MUL64(rh, rl, i1, i2) \ + do { \ + u64 _i1 = (i1), _i2 = (i2); \ + u64 m1 = MUL32(_i1, _i2>>32); \ + u64 m2 = MUL32(_i1>>32, _i2); \ + rh = MUL32(_i1>>32, _i2>>32); \ + rl = MUL32(_i1, _i2); \ + ADD128(rh, rl, (m1 >> 32), (m1 << 32)); \ + ADD128(rh, rl, (m2 >> 32), (m2 << 32)); \ + } while (0) + +/* + * For highest performance the L1 NH and L2 polynomial hashes should be + * carefully implemented to take advantage of one's target architechture. + * Here these two hash functions are defined multiple time; once for + * 64-bit architectures, once for 32-bit SSE2 architectures, and once + * for the rest (32-bit) architectures. + * For each, nh_16 *must* be defined (works on multiples of 16 bytes). + * Optionally, nh_vmac_nhbytes can be defined (for multiples of + * VMAC_NHBYTES), and nh_16_2 and nh_vmac_nhbytes_2 (versions that do two + * NH computations at once). + */ + +#ifdef CONFIG_64BIT + +#define nh_16(mp, kp, nw, rh, rl) \ + do { \ + int i; u64 th, tl; \ + rh = rl = 0; \ + for (i = 0; i < nw; i += 2) { \ + MUL64(th, tl, le64_to_cpup((mp)+i)+(kp)[i], \ + le64_to_cpup((mp)+i+1)+(kp)[i+1]); \ + ADD128(rh, rl, th, tl); \ + } \ + } while (0) + +#define nh_16_2(mp, kp, nw, rh, rl, rh1, rl1) \ + do { \ + int i; u64 th, tl; \ + rh1 = rl1 = rh = rl = 0; \ + for (i = 0; i < nw; i += 2) { \ + MUL64(th, tl, le64_to_cpup((mp)+i)+(kp)[i], \ + le64_to_cpup((mp)+i+1)+(kp)[i+1]); \ + ADD128(rh, rl, th, tl); \ + MUL64(th, tl, le64_to_cpup((mp)+i)+(kp)[i+2], \ + le64_to_cpup((mp)+i+1)+(kp)[i+3]); \ + ADD128(rh1, rl1, th, tl); \ + } \ + } while (0) + +#if (VMAC_NHBYTES >= 64) /* These versions do 64-bytes of message at a time */ +#define nh_vmac_nhbytes(mp, kp, nw, rh, rl) \ + do { \ + int i; u64 th, tl; \ + rh = rl = 0; \ + for (i = 0; i < nw; i += 8) { \ + MUL64(th, tl, le64_to_cpup((mp)+i)+(kp)[i], \ + le64_to_cpup((mp)+i+1)+(kp)[i+1]); \ + ADD128(rh, rl, th, tl); \ + MUL64(th, tl, le64_to_cpup((mp)+i+2)+(kp)[i+2], \ + le64_to_cpup((mp)+i+3)+(kp)[i+3]); \ + ADD128(rh, rl, th, tl); \ + MUL64(th, tl, le64_to_cpup((mp)+i+4)+(kp)[i+4], \ + le64_to_cpup((mp)+i+5)+(kp)[i+5]); \ + ADD128(rh, rl, th, tl); \ + MUL64(th, tl, le64_to_cpup((mp)+i+6)+(kp)[i+6], \ + le64_to_cpup((mp)+i+7)+(kp)[i+7]); \ + ADD128(rh, rl, th, tl); \ + } \ + } while (0) + +#define nh_vmac_nhbytes_2(mp, kp, nw, rh, rl, rh1, rl1) \ + do { \ + int i; u64 th, tl; \ + rh1 = rl1 = rh = rl = 0; \ + for (i = 0; i < nw; i += 8) { \ + MUL64(th, tl, le64_to_cpup((mp)+i)+(kp)[i], \ + le64_to_cpup((mp)+i+1)+(kp)[i+1]); \ + ADD128(rh, rl, th, tl); \ + MUL64(th, tl, le64_to_cpup((mp)+i)+(kp)[i+2], \ + le64_to_cpup((mp)+i+1)+(kp)[i+3]); \ + ADD128(rh1, rl1, th, tl); \ + MUL64(th, tl, le64_to_cpup((mp)+i+2)+(kp)[i+2], \ + le64_to_cpup((mp)+i+3)+(kp)[i+3]); \ + ADD128(rh, rl, th, tl); \ + MUL64(th, tl, le64_to_cpup((mp)+i+2)+(kp)[i+4], \ + le64_to_cpup((mp)+i+3)+(kp)[i+5]); \ + ADD128(rh1, rl1, th, tl); \ + MUL64(th, tl, le64_to_cpup((mp)+i+4)+(kp)[i+4], \ + le64_to_cpup((mp)+i+5)+(kp)[i+5]); \ + ADD128(rh, rl, th, tl); \ + MUL64(th, tl, le64_to_cpup((mp)+i+4)+(kp)[i+6], \ + le64_to_cpup((mp)+i+5)+(kp)[i+7]); \ + ADD128(rh1, rl1, th, tl); \ + MUL64(th, tl, le64_to_cpup((mp)+i+6)+(kp)[i+6], \ + le64_to_cpup((mp)+i+7)+(kp)[i+7]); \ + ADD128(rh, rl, th, tl); \ + MUL64(th, tl, le64_to_cpup((mp)+i+6)+(kp)[i+8], \ + le64_to_cpup((mp)+i+7)+(kp)[i+9]); \ + ADD128(rh1, rl1, th, tl); \ + } \ + } while (0) +#endif + +#define poly_step(ah, al, kh, kl, mh, ml) \ + do { \ + u64 t1h, t1l, t2h, t2l, t3h, t3l, z = 0; \ + /* compute ab*cd, put bd into result registers */ \ + PMUL64(t3h, t3l, al, kh); \ + PMUL64(t2h, t2l, ah, kl); \ + PMUL64(t1h, t1l, ah, 2*kh); \ + PMUL64(ah, al, al, kl); \ + /* add 2 * ac to result */ \ + ADD128(ah, al, t1h, t1l); \ + /* add together ad + bc */ \ + ADD128(t2h, t2l, t3h, t3l); \ + /* now (ah,al), (t2l,2*t2h) need summing */ \ + /* first add the high registers, carrying into t2h */ \ + ADD128(t2h, ah, z, t2l); \ + /* double t2h and add top bit of ah */ \ + t2h = 2 * t2h + (ah >> 63); \ + ah &= m63; \ + /* now add the low registers */ \ + ADD128(ah, al, mh, ml); \ + ADD128(ah, al, z, t2h); \ + } while (0) + +#else /* ! CONFIG_64BIT */ + +#ifndef nh_16 +#define nh_16(mp, kp, nw, rh, rl) \ + do { \ + u64 t1, t2, m1, m2, t; \ + int i; \ + rh = rl = t = 0; \ + for (i = 0; i < nw; i += 2) { \ + t1 = le64_to_cpup(mp+i) + kp[i]; \ + t2 = le64_to_cpup(mp+i+1) + kp[i+1]; \ + m2 = MUL32(t1 >> 32, t2); \ + m1 = MUL32(t1, t2 >> 32); \ + ADD128(rh, rl, MUL32(t1 >> 32, t2 >> 32), \ + MUL32(t1, t2)); \ + rh += (u64)(u32)(m1 >> 32) \ + + (u32)(m2 >> 32); \ + t += (u64)(u32)m1 + (u32)m2; \ + } \ + ADD128(rh, rl, (t >> 32), (t << 32)); \ + } while (0) +#endif + +static void poly_step_func(u64 *ahi, u64 *alo, + const u64 *kh, const u64 *kl, + const u64 *mh, const u64 *ml) +{ +#define a0 (*(((u32 *)alo)+INDEX_LOW)) +#define a1 (*(((u32 *)alo)+INDEX_HIGH)) +#define a2 (*(((u32 *)ahi)+INDEX_LOW)) +#define a3 (*(((u32 *)ahi)+INDEX_HIGH)) +#define k0 (*(((u32 *)kl)+INDEX_LOW)) +#define k1 (*(((u32 *)kl)+INDEX_HIGH)) +#define k2 (*(((u32 *)kh)+INDEX_LOW)) +#define k3 (*(((u32 *)kh)+INDEX_HIGH)) + + u64 p, q, t; + u32 t2; + + p = MUL32(a3, k3); + p += p; + p += *(u64 *)mh; + p += MUL32(a0, k2); + p += MUL32(a1, k1); + p += MUL32(a2, k0); + t = (u32)(p); + p >>= 32; + p += MUL32(a0, k3); + p += MUL32(a1, k2); + p += MUL32(a2, k1); + p += MUL32(a3, k0); + t |= ((u64)((u32)p & 0x7fffffff)) << 32; + p >>= 31; + p += (u64)(((u32 *)ml)[INDEX_LOW]); + p += MUL32(a0, k0); + q = MUL32(a1, k3); + q += MUL32(a2, k2); + q += MUL32(a3, k1); + q += q; + p += q; + t2 = (u32)(p); + p >>= 32; + p += (u64)(((u32 *)ml)[INDEX_HIGH]); + p += MUL32(a0, k1); + p += MUL32(a1, k0); + q = MUL32(a2, k3); + q += MUL32(a3, k2); + q += q; + p += q; + *(u64 *)(alo) = (p << 32) | t2; + p >>= 32; + *(u64 *)(ahi) = p + t; + +#undef a0 +#undef a1 +#undef a2 +#undef a3 +#undef k0 +#undef k1 +#undef k2 +#undef k3 +} + +#define poly_step(ah, al, kh, kl, mh, ml) \ + poly_step_func(&(ah), &(al), &(kh), &(kl), &(mh), &(ml)) + +#endif /* end of specialized NH and poly definitions */ + +/* At least nh_16 is defined. Defined others as needed here */ +#ifndef nh_16_2 +#define nh_16_2(mp, kp, nw, rh, rl, rh2, rl2) \ + do { \ + nh_16(mp, kp, nw, rh, rl); \ + nh_16(mp, ((kp)+2), nw, rh2, rl2); \ + } while (0) +#endif +#ifndef nh_vmac_nhbytes +#define nh_vmac_nhbytes(mp, kp, nw, rh, rl) \ + nh_16(mp, kp, nw, rh, rl) +#endif +#ifndef nh_vmac_nhbytes_2 +#define nh_vmac_nhbytes_2(mp, kp, nw, rh, rl, rh2, rl2) \ + do { \ + nh_vmac_nhbytes(mp, kp, nw, rh, rl); \ + nh_vmac_nhbytes(mp, ((kp)+2), nw, rh2, rl2); \ + } while (0) +#endif + +static void vhash_abort(struct vmac_ctx *ctx) +{ + ctx->polytmp[0] = ctx->polykey[0] ; + ctx->polytmp[1] = ctx->polykey[1] ; + ctx->first_block_processed = 0; +} + +static u64 l3hash(u64 p1, u64 p2, + u64 k1, u64 k2, u64 len) +{ + u64 rh, rl, t, z = 0; + + /* fully reduce (p1,p2)+(len,0) mod p127 */ + t = p1 >> 63; + p1 &= m63; + ADD128(p1, p2, len, t); + /* At this point, (p1,p2) is at most 2^127+(len<<64) */ + t = (p1 > m63) + ((p1 == m63) && (p2 == m64)); + ADD128(p1, p2, z, t); + p1 &= m63; + + /* compute (p1,p2)/(2^64-2^32) and (p1,p2)%(2^64-2^32) */ + t = p1 + (p2 >> 32); + t += (t >> 32); + t += (u32)t > 0xfffffffeu; + p1 += (t >> 32); + p2 += (p1 << 32); + + /* compute (p1+k1)%p64 and (p2+k2)%p64 */ + p1 += k1; + p1 += (0 - (p1 < k1)) & 257; + p2 += k2; + p2 += (0 - (p2 < k2)) & 257; + + /* compute (p1+k1)*(p2+k2)%p64 */ + MUL64(rh, rl, p1, p2); + t = rh >> 56; + ADD128(t, rl, z, rh); + rh <<= 8; + ADD128(t, rl, z, rh); + t += t << 8; + rl += t; + rl += (0 - (rl < t)) & 257; + rl += (0 - (rl > p64-1)) & 257; + return rl; +} + +static void vhash_update(const unsigned char *m, + unsigned int mbytes, /* Pos multiple of VMAC_NHBYTES */ + struct vmac_ctx *ctx) +{ + u64 rh, rl, *mptr; + const u64 *kptr = (u64 *)ctx->nhkey; + int i; + u64 ch, cl; + u64 pkh = ctx->polykey[0]; + u64 pkl = ctx->polykey[1]; + + mptr = (u64 *)m; + i = mbytes / VMAC_NHBYTES; /* Must be non-zero */ + + ch = ctx->polytmp[0]; + cl = ctx->polytmp[1]; + + if (!ctx->first_block_processed) { + ctx->first_block_processed = 1; + nh_vmac_nhbytes(mptr, kptr, VMAC_NHBYTES/8, rh, rl); + rh &= m62; + ADD128(ch, cl, rh, rl); + mptr += (VMAC_NHBYTES/sizeof(u64)); + i--; + } + + while (i--) { + nh_vmac_nhbytes(mptr, kptr, VMAC_NHBYTES/8, rh, rl); + rh &= m62; + poly_step(ch, cl, pkh, pkl, rh, rl); + mptr += (VMAC_NHBYTES/sizeof(u64)); + } + + ctx->polytmp[0] = ch; + ctx->polytmp[1] = cl; +} + +static u64 vhash(unsigned char m[], unsigned int mbytes, + u64 *tagl, struct vmac_ctx *ctx) +{ + u64 rh, rl, *mptr; + const u64 *kptr = (u64 *)ctx->nhkey; + int i, remaining; + u64 ch, cl; + u64 pkh = ctx->polykey[0]; + u64 pkl = ctx->polykey[1]; + + mptr = (u64 *)m; + i = mbytes / VMAC_NHBYTES; + remaining = mbytes % VMAC_NHBYTES; + + if (ctx->first_block_processed) { + ch = ctx->polytmp[0]; + cl = ctx->polytmp[1]; + } else if (i) { + nh_vmac_nhbytes(mptr, kptr, VMAC_NHBYTES/8, ch, cl); + ch &= m62; + ADD128(ch, cl, pkh, pkl); + mptr += (VMAC_NHBYTES/sizeof(u64)); + i--; + } else if (remaining) { + nh_16(mptr, kptr, 2*((remaining+15)/16), ch, cl); + ch &= m62; + ADD128(ch, cl, pkh, pkl); + mptr += (VMAC_NHBYTES/sizeof(u64)); + goto do_l3; + } else {/* Empty String */ + ch = pkh; cl = pkl; + goto do_l3; + } + + while (i--) { + nh_vmac_nhbytes(mptr, kptr, VMAC_NHBYTES/8, rh, rl); + rh &= m62; + poly_step(ch, cl, pkh, pkl, rh, rl); + mptr += (VMAC_NHBYTES/sizeof(u64)); + } + if (remaining) { + nh_16(mptr, kptr, 2*((remaining+15)/16), rh, rl); + rh &= m62; + poly_step(ch, cl, pkh, pkl, rh, rl); + } + +do_l3: + vhash_abort(ctx); + remaining *= 8; + return l3hash(ch, cl, ctx->l3key[0], ctx->l3key[1], remaining); +} + +static u64 vmac(unsigned char m[], unsigned int mbytes, + unsigned char n[16], u64 *tagl, + struct vmac_ctx_t *ctx) +{ + u64 *in_n, *out_p; + u64 p, h; + int i; + + in_n = ctx->__vmac_ctx.cached_nonce; + out_p = ctx->__vmac_ctx.cached_aes; + + i = n[15] & 1; + if ((*(u64 *)(n+8) != in_n[1]) || (*(u64 *)(n) != in_n[0])) { + in_n[0] = *(u64 *)(n); + in_n[1] = *(u64 *)(n+8); + ((unsigned char *)in_n)[15] &= 0xFE; + crypto_cipher_encrypt_one(ctx->child, + (unsigned char *)out_p, (unsigned char *)in_n); + + ((unsigned char *)in_n)[15] |= (unsigned char)(1-i); + } + p = be64_to_cpup(out_p + i); + h = vhash(m, mbytes, (u64 *)0, &ctx->__vmac_ctx); + return p + h; +} + +static int vmac_set_key(unsigned char user_key[], struct vmac_ctx_t *ctx) +{ + u64 in[2] = {0}, out[2]; + unsigned i; + int err = 0; + + err = crypto_cipher_setkey(ctx->child, user_key, VMAC_KEY_LEN); + if (err) + return err; + + /* Fill nh key */ + ((unsigned char *)in)[0] = 0x80; + for (i = 0; i < sizeof(ctx->__vmac_ctx.nhkey)/8; i += 2) { + crypto_cipher_encrypt_one(ctx->child, + (unsigned char *)out, (unsigned char *)in); + ctx->__vmac_ctx.nhkey[i] = be64_to_cpup(out); + ctx->__vmac_ctx.nhkey[i+1] = be64_to_cpup(out+1); + ((unsigned char *)in)[15] += 1; + } + + /* Fill poly key */ + ((unsigned char *)in)[0] = 0xC0; + in[1] = 0; + for (i = 0; i < sizeof(ctx->__vmac_ctx.polykey)/8; i += 2) { + crypto_cipher_encrypt_one(ctx->child, + (unsigned char *)out, (unsigned char *)in); + ctx->__vmac_ctx.polytmp[i] = + ctx->__vmac_ctx.polykey[i] = + be64_to_cpup(out) & mpoly; + ctx->__vmac_ctx.polytmp[i+1] = + ctx->__vmac_ctx.polykey[i+1] = + be64_to_cpup(out+1) & mpoly; + ((unsigned char *)in)[15] += 1; + } + + /* Fill ip key */ + ((unsigned char *)in)[0] = 0xE0; + in[1] = 0; + for (i = 0; i < sizeof(ctx->__vmac_ctx.l3key)/8; i += 2) { + do { + crypto_cipher_encrypt_one(ctx->child, + (unsigned char *)out, (unsigned char *)in); + ctx->__vmac_ctx.l3key[i] = be64_to_cpup(out); + ctx->__vmac_ctx.l3key[i+1] = be64_to_cpup(out+1); + ((unsigned char *)in)[15] += 1; + } while (ctx->__vmac_ctx.l3key[i] >= p64 + || ctx->__vmac_ctx.l3key[i+1] >= p64); + } + + /* Invalidate nonce/aes cache and reset other elements */ + ctx->__vmac_ctx.cached_nonce[0] = (u64)-1; /* Ensure illegal nonce */ + ctx->__vmac_ctx.cached_nonce[1] = (u64)0; /* Ensure illegal nonce */ + ctx->__vmac_ctx.first_block_processed = 0; + + return err; +} + +static int vmac_setkey(struct crypto_shash *parent, + const u8 *key, unsigned int keylen) +{ + struct vmac_ctx_t *ctx = crypto_shash_ctx(parent); + + if (keylen != VMAC_KEY_LEN) { + crypto_shash_set_flags(parent, CRYPTO_TFM_RES_BAD_KEY_LEN); + return -EINVAL; + } + + return vmac_set_key((u8 *)key, ctx); +} + +static int vmac_init(struct shash_desc *pdesc) +{ + struct crypto_shash *parent = pdesc->tfm; + struct vmac_ctx_t *ctx = crypto_shash_ctx(parent); + + memset(&ctx->__vmac_ctx, 0, sizeof(struct vmac_ctx)); + return 0; +} + +static int vmac_update(struct shash_desc *pdesc, const u8 *p, + unsigned int len) +{ + struct crypto_shash *parent = pdesc->tfm; + struct vmac_ctx_t *ctx = crypto_shash_ctx(parent); + + vhash_update(p, len, &ctx->__vmac_ctx); + + return 0; +} + +static int vmac_final(struct shash_desc *pdesc, u8 *out) +{ + struct crypto_shash *parent = pdesc->tfm; + struct vmac_ctx_t *ctx = crypto_shash_ctx(parent); + vmac_t mac; + u8 nonce[16] = {}; + + mac = vmac(NULL, 0, nonce, NULL, ctx); + memcpy(out, &mac, sizeof(vmac_t)); + memset(&mac, 0, sizeof(vmac_t)); + memset(&ctx->__vmac_ctx, 0, sizeof(struct vmac_ctx)); + return 0; +} + +static int vmac_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_cipher *cipher; + struct crypto_instance *inst = (void *)tfm->__crt_alg; + struct crypto_spawn *spawn = crypto_instance_ctx(inst); + struct vmac_ctx_t *ctx = crypto_tfm_ctx(tfm); + + cipher = crypto_spawn_cipher(spawn); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); + + ctx->child = cipher; + return 0; +} + +static void vmac_exit_tfm(struct crypto_tfm *tfm) +{ + struct vmac_ctx_t *ctx = crypto_tfm_ctx(tfm); + crypto_free_cipher(ctx->child); +} + +static int vmac_create(struct crypto_template *tmpl, struct rtattr **tb) +{ + struct shash_instance *inst; + struct crypto_alg *alg; + int err; + + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH); + if (err) + return err; + + alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, + CRYPTO_ALG_TYPE_MASK); + if (IS_ERR(alg)) + return PTR_ERR(alg); + + inst = shash_alloc_instance("vmac", alg); + err = PTR_ERR(inst); + if (IS_ERR(inst)) + goto out_put_alg; + + err = crypto_init_spawn(shash_instance_ctx(inst), alg, + shash_crypto_instance(inst), + CRYPTO_ALG_TYPE_MASK); + if (err) + goto out_free_inst; + + inst->alg.base.cra_priority = alg->cra_priority; + inst->alg.base.cra_blocksize = alg->cra_blocksize; + inst->alg.base.cra_alignmask = alg->cra_alignmask; + + inst->alg.digestsize = sizeof(vmac_t); + inst->alg.base.cra_ctxsize = sizeof(struct vmac_ctx_t); + inst->alg.base.cra_init = vmac_init_tfm; + inst->alg.base.cra_exit = vmac_exit_tfm; + + inst->alg.init = vmac_init; + inst->alg.update = vmac_update; + inst->alg.final = vmac_final; + inst->alg.setkey = vmac_setkey; + + err = shash_register_instance(tmpl, inst); + if (err) { +out_free_inst: + shash_free_instance(shash_crypto_instance(inst)); + } + +out_put_alg: + crypto_mod_put(alg); + return err; +} + +static struct crypto_template vmac_tmpl = { + .name = "vmac", + .create = vmac_create, + .free = shash_free_instance, + .module = THIS_MODULE, +}; + +static int __init vmac_module_init(void) +{ + return crypto_register_template(&vmac_tmpl); +} + +static void __exit vmac_module_exit(void) +{ + crypto_unregister_template(&vmac_tmpl); +} + +module_init(vmac_module_init); +module_exit(vmac_module_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("VMAC hash algorithm"); + diff --git a/include/crypto/vmac.h b/include/crypto/vmac.h new file mode 100644 index 000000000000..c4467c55df1e --- /dev/null +++ b/include/crypto/vmac.h @@ -0,0 +1,61 @@ +/* + * Modified to interface to the Linux kernel + * Copyright (c) 2009, Intel Corporation. + * + * This program is free software; you can redistribute it and/or modify it + * under the terms and conditions of the GNU General Public License, + * version 2, as published by the Free Software Foundation. + * + * This program is distributed in the hope it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for + * more details. + * + * You should have received a copy of the GNU General Public License along with + * this program; if not, write to the Free Software Foundation, Inc., 59 Temple + * Place - Suite 330, Boston, MA 02111-1307 USA. + */ + +#ifndef __CRYPTO_VMAC_H +#define __CRYPTO_VMAC_H + +/* -------------------------------------------------------------------------- + * VMAC and VHASH Implementation by Ted Krovetz (tdk@acm.org) and Wei Dai. + * This implementation is herby placed in the public domain. + * The authors offers no warranty. Use at your own risk. + * Please send bug reports to the authors. + * Last modified: 17 APR 08, 1700 PDT + * ----------------------------------------------------------------------- */ + +/* + * User definable settings. + */ +#define VMAC_TAG_LEN 64 +#define VMAC_KEY_SIZE 128/* Must be 128, 192 or 256 */ +#define VMAC_KEY_LEN (VMAC_KEY_SIZE/8) +#define VMAC_NHBYTES 128/* Must 2^i for any 3 < i < 13 Standard = 128*/ + +/* + * This implementation uses u32 and u64 as names for unsigned 32- + * and 64-bit integer types. These are defined in C99 stdint.h. The + * following may need adaptation if you are not running a C99 or + * Microsoft C environment. + */ +struct vmac_ctx { + u64 nhkey[(VMAC_NHBYTES/8)+2*(VMAC_TAG_LEN/64-1)]; + u64 polykey[2*VMAC_TAG_LEN/64]; + u64 l3key[2*VMAC_TAG_LEN/64]; + u64 polytmp[2*VMAC_TAG_LEN/64]; + u64 cached_nonce[2]; + u64 cached_aes[2]; + int first_block_processed; +}; + +typedef u64 vmac_t; + +struct vmac_ctx_t { + struct crypto_cipher *child; + struct vmac_ctx __vmac_ctx; +}; + +#endif /* __CRYPTO_VMAC_H */ -- cgit v1.2.3-58-ga151