mirror of
https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git/
synced 2025-04-19 20:58:31 +09:00
crypto: ahash - Disable request chaining
Disable hash request chaining in case a driver that copies an ahash_request object by hand accidentally triggers chaining. Reported-by: Manorit Chawdhry <m-chawdhry@ti.com> Fixes: f2ffe5a9183d ("crypto: hash - Add request chaining API") Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au> Tested-by: Manorit Chawdhry <m-chawdhry@ti.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
parent
9ae0c92fec
commit
b2e689baf2
@ -315,16 +315,7 @@ EXPORT_SYMBOL_GPL(crypto_ahash_setkey);
|
||||
|
||||
static bool ahash_request_hasvirt(struct ahash_request *req)
|
||||
{
|
||||
struct ahash_request *r2;
|
||||
|
||||
if (ahash_request_isvirt(req))
|
||||
return true;
|
||||
|
||||
list_for_each_entry(r2, &req->base.list, base.list)
|
||||
if (ahash_request_isvirt(r2))
|
||||
return true;
|
||||
|
||||
return false;
|
||||
return ahash_request_isvirt(req);
|
||||
}
|
||||
|
||||
static int ahash_reqchain_virt(struct ahash_save_req_state *state,
|
||||
@ -472,7 +463,6 @@ static int ahash_do_req_chain(struct ahash_request *req,
|
||||
bool update = op == crypto_ahash_alg(tfm)->update;
|
||||
struct ahash_save_req_state *state;
|
||||
struct ahash_save_req_state state0;
|
||||
struct ahash_request *r2;
|
||||
u8 *page = NULL;
|
||||
int err;
|
||||
|
||||
@ -509,7 +499,6 @@ static int ahash_do_req_chain(struct ahash_request *req,
|
||||
state->offset = 0;
|
||||
state->nbytes = 0;
|
||||
INIT_LIST_HEAD(&state->head);
|
||||
list_splice_init(&req->base.list, &state->head);
|
||||
|
||||
if (page)
|
||||
sg_init_one(&state->sg, page, PAGE_SIZE);
|
||||
@ -540,9 +529,6 @@ out_free_page:
|
||||
|
||||
out_set_chain:
|
||||
req->base.err = err;
|
||||
list_for_each_entry(r2, &req->base.list, base.list)
|
||||
r2->base.err = err;
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
@ -551,19 +537,10 @@ int crypto_ahash_init(struct ahash_request *req)
|
||||
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
|
||||
|
||||
if (likely(tfm->using_shash)) {
|
||||
struct ahash_request *r2;
|
||||
int err;
|
||||
|
||||
err = crypto_shash_init(prepare_shash_desc(req, tfm));
|
||||
req->base.err = err;
|
||||
|
||||
list_for_each_entry(r2, &req->base.list, base.list) {
|
||||
struct shash_desc *desc;
|
||||
|
||||
desc = prepare_shash_desc(r2, tfm);
|
||||
r2->base.err = crypto_shash_init(desc);
|
||||
}
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
@ -620,19 +597,10 @@ int crypto_ahash_update(struct ahash_request *req)
|
||||
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
|
||||
|
||||
if (likely(tfm->using_shash)) {
|
||||
struct ahash_request *r2;
|
||||
int err;
|
||||
|
||||
err = shash_ahash_update(req, ahash_request_ctx(req));
|
||||
req->base.err = err;
|
||||
|
||||
list_for_each_entry(r2, &req->base.list, base.list) {
|
||||
struct shash_desc *desc;
|
||||
|
||||
desc = ahash_request_ctx(r2);
|
||||
r2->base.err = shash_ahash_update(r2, desc);
|
||||
}
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
@ -645,19 +613,10 @@ int crypto_ahash_final(struct ahash_request *req)
|
||||
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
|
||||
|
||||
if (likely(tfm->using_shash)) {
|
||||
struct ahash_request *r2;
|
||||
int err;
|
||||
|
||||
err = crypto_shash_final(ahash_request_ctx(req), req->result);
|
||||
req->base.err = err;
|
||||
|
||||
list_for_each_entry(r2, &req->base.list, base.list) {
|
||||
struct shash_desc *desc;
|
||||
|
||||
desc = ahash_request_ctx(r2);
|
||||
r2->base.err = crypto_shash_final(desc, r2->result);
|
||||
}
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
@ -670,19 +629,10 @@ int crypto_ahash_finup(struct ahash_request *req)
|
||||
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
|
||||
|
||||
if (likely(tfm->using_shash)) {
|
||||
struct ahash_request *r2;
|
||||
int err;
|
||||
|
||||
err = shash_ahash_finup(req, ahash_request_ctx(req));
|
||||
req->base.err = err;
|
||||
|
||||
list_for_each_entry(r2, &req->base.list, base.list) {
|
||||
struct shash_desc *desc;
|
||||
|
||||
desc = ahash_request_ctx(r2);
|
||||
r2->base.err = shash_ahash_finup(r2, desc);
|
||||
}
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
@ -757,19 +707,10 @@ int crypto_ahash_digest(struct ahash_request *req)
|
||||
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
|
||||
|
||||
if (likely(tfm->using_shash)) {
|
||||
struct ahash_request *r2;
|
||||
int err;
|
||||
|
||||
err = shash_ahash_digest(req, prepare_shash_desc(req, tfm));
|
||||
req->base.err = err;
|
||||
|
||||
list_for_each_entry(r2, &req->base.list, base.list) {
|
||||
struct shash_desc *desc;
|
||||
|
||||
desc = prepare_shash_desc(r2, tfm);
|
||||
r2->base.err = shash_ahash_digest(r2, desc);
|
||||
}
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
@ -1133,20 +1074,5 @@ int ahash_register_instance(struct crypto_template *tmpl,
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(ahash_register_instance);
|
||||
|
||||
void ahash_request_free(struct ahash_request *req)
|
||||
{
|
||||
struct ahash_request *tmp;
|
||||
struct ahash_request *r2;
|
||||
|
||||
if (unlikely(!req))
|
||||
return;
|
||||
|
||||
list_for_each_entry_safe(r2, tmp, &req->base.list, base.list)
|
||||
kfree_sensitive(r2);
|
||||
|
||||
kfree_sensitive(req);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(ahash_request_free);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("Asynchronous cryptographic hash type");
|
||||
|
@ -10,6 +10,7 @@
|
||||
|
||||
#include <linux/atomic.h>
|
||||
#include <linux/crypto.h>
|
||||
#include <linux/slab.h>
|
||||
#include <linux/string.h>
|
||||
|
||||
/* Set this bit for virtual address instead of SG list. */
|
||||
@ -581,7 +582,10 @@ static inline struct ahash_request *ahash_request_alloc_noprof(
|
||||
* ahash_request_free() - zeroize and free the request data structure
|
||||
* @req: request data structure cipher handle to be freed
|
||||
*/
|
||||
void ahash_request_free(struct ahash_request *req);
|
||||
static inline void ahash_request_free(struct ahash_request *req)
|
||||
{
|
||||
kfree_sensitive(req);
|
||||
}
|
||||
|
||||
static inline struct ahash_request *ahash_request_cast(
|
||||
struct crypto_async_request *req)
|
||||
|
@ -249,7 +249,7 @@ static inline struct crypto_shash *__crypto_shash_cast(struct crypto_tfm *tfm)
|
||||
|
||||
static inline bool ahash_request_chained(struct ahash_request *req)
|
||||
{
|
||||
return crypto_request_chained(&req->base);
|
||||
return false;
|
||||
}
|
||||
|
||||
static inline bool ahash_request_isvirt(struct ahash_request *req)
|
||||
|
Loading…
x
Reference in New Issue
Block a user