summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAntoine Tenart <antoine.tenart@bootlin.com>2018-03-19 09:21:13 +0100
committerHerbert Xu <herbert@gondor.apana.org.au>2018-03-31 01:32:42 +0800
commitb869648c060fbb00bf6578d13cbe83e6f85914bc (patch)
tree56c77e43aee7dd8737cb9b910f39bf12b18aac74
parente9acf05255cbf72dc32169957d2895cbfd12c7e1 (diff)
crypto: inside-secure - move the digest to the request context
This patches moves the digest information from the transformation context to the request context. This fixes cases where HMAC init functions were called and override the digest value for a short period of time, as the HMAC init functions call the SHA init one which reset the value. This lead to a small percentage of HMAC being incorrectly computed under heavy load. Fixes: 1b44c5a60c13 ("crypto: inside-secure - add SafeXcel EIP197 crypto engine driver") Suggested-by: Ofer Heifetz <oferh@marvell.com> Signed-off-by: Antoine Tenart <antoine.tenart@bootlin.com> [Ofer here did all the work, from seeing the issue to understanding the root cause. I only made the patch.] Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r--drivers/crypto/inside-secure/safexcel_hash.c30
1 files changed, 18 insertions, 12 deletions
diff --git a/drivers/crypto/inside-secure/safexcel_hash.c b/drivers/crypto/inside-secure/safexcel_hash.c
index 6e9eb9bffec7..955b25ebffee 100644
--- a/drivers/crypto/inside-secure/safexcel_hash.c
+++ b/drivers/crypto/inside-secure/safexcel_hash.c
@@ -21,7 +21,6 @@ struct safexcel_ahash_ctx {
struct safexcel_crypto_priv *priv;
u32 alg;
- u32 digest;
u32 ipad[SHA1_DIGEST_SIZE / sizeof(u32)];
u32 opad[SHA1_DIGEST_SIZE / sizeof(u32)];
@@ -36,6 +35,8 @@ struct safexcel_ahash_req {
int nents;
dma_addr_t result_dma;
+ u32 digest;
+
u8 state_sz; /* expected sate size, only set once */
u32 state[SHA256_DIGEST_SIZE / sizeof(u32)] __aligned(sizeof(u32));
@@ -53,6 +54,8 @@ struct safexcel_ahash_export_state {
u64 len;
u64 processed;
+ u32 digest;
+
u32 state[SHA256_DIGEST_SIZE / sizeof(u32)];
u8 cache[SHA256_BLOCK_SIZE];
};
@@ -86,9 +89,9 @@ static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_HASH_OUT;
cdesc->control_data.control0 |= ctx->alg;
- cdesc->control_data.control0 |= ctx->digest;
+ cdesc->control_data.control0 |= req->digest;
- if (ctx->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) {
+ if (req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) {
if (req->processed) {
if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1)
cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(6);
@@ -116,7 +119,7 @@ static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
if (req->finish)
ctx->base.ctxr->data[i] = cpu_to_le32(req->processed / blocksize);
}
- } else if (ctx->digest == CONTEXT_CONTROL_DIGEST_HMAC) {
+ } else if (req->digest == CONTEXT_CONTROL_DIGEST_HMAC) {
cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(10);
memcpy(ctx->base.ctxr->data, ctx->ipad, digestsize);
@@ -555,7 +558,7 @@ static int safexcel_ahash_enqueue(struct ahash_request *areq)
if (ctx->base.ctxr) {
if (priv->version == EIP197 &&
!ctx->base.needs_inv && req->processed &&
- ctx->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED)
+ req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED)
/* We're still setting needs_inv here, even though it is
* cleared right away, because the needs_inv flag can be
* set in other functions and we want to keep the same
@@ -590,7 +593,6 @@ static int safexcel_ahash_enqueue(struct ahash_request *areq)
static int safexcel_ahash_update(struct ahash_request *areq)
{
- struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
struct safexcel_ahash_req *req = ahash_request_ctx(areq);
struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
@@ -606,7 +608,7 @@ static int safexcel_ahash_update(struct ahash_request *areq)
* We're not doing partial updates when performing an hmac request.
* Everything will be handled by the final() call.
*/
- if (ctx->digest == CONTEXT_CONTROL_DIGEST_HMAC)
+ if (req->digest == CONTEXT_CONTROL_DIGEST_HMAC)
return 0;
if (req->hmac)
@@ -665,6 +667,8 @@ static int safexcel_ahash_export(struct ahash_request *areq, void *out)
export->len = req->len;
export->processed = req->processed;
+ export->digest = req->digest;
+
memcpy(export->state, req->state, req->state_sz);
memcpy(export->cache, req->cache, crypto_ahash_blocksize(ahash));
@@ -685,6 +689,8 @@ static int safexcel_ahash_import(struct ahash_request *areq, const void *in)
req->len = export->len;
req->processed = export->processed;
+ req->digest = export->digest;
+
memcpy(req->cache, export->cache, crypto_ahash_blocksize(ahash));
memcpy(req->state, export->state, req->state_sz);
@@ -721,7 +727,7 @@ static int safexcel_sha1_init(struct ahash_request *areq)
req->state[4] = SHA1_H4;
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
- ctx->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
+ req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
req->state_sz = SHA1_DIGEST_SIZE;
return 0;
@@ -788,10 +794,10 @@ struct safexcel_alg_template safexcel_alg_sha1 = {
static int safexcel_hmac_sha1_init(struct ahash_request *areq)
{
- struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
safexcel_sha1_init(areq);
- ctx->digest = CONTEXT_CONTROL_DIGEST_HMAC;
+ req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
return 0;
}
@@ -1029,7 +1035,7 @@ static int safexcel_sha256_init(struct ahash_request *areq)
req->state[7] = SHA256_H7;
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
- ctx->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
+ req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
req->state_sz = SHA256_DIGEST_SIZE;
return 0;
@@ -1091,7 +1097,7 @@ static int safexcel_sha224_init(struct ahash_request *areq)
req->state[7] = SHA224_H7;
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
- ctx->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
+ req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
req->state_sz = SHA256_DIGEST_SIZE;
return 0;