Skip to content

Commit f4f353c

Browse files
lin755herbertx
authored andcommitted
crypto: hisilicon/sec2 - fix for sec spec check
During encryption and decryption, user requests must be checked first, if the specifications that are not supported by the hardware are used, the software computing is used for processing. Fixes: 2f072d7 ("crypto: hisilicon - Add aead support on SEC2") Signed-off-by: Wenkai Lin <[email protected]> Signed-off-by: Chenghai Huang <[email protected]> Signed-off-by: Herbert Xu <[email protected]>
1 parent a49cc71 commit f4f353c

File tree

2 files changed

+39
-63
lines changed

2 files changed

+39
-63
lines changed

drivers/crypto/hisilicon/sec2/sec.h

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,6 @@ struct sec_aead_req {
3737
u8 *a_ivin;
3838
dma_addr_t a_ivin_dma;
3939
struct aead_request *aead_req;
40-
bool fallback;
4140
};
4241

4342
/* SEC request of Crypto */

drivers/crypto/hisilicon/sec2/sec_crypto.c

Lines changed: 39 additions & 62 deletions
Original file line numberDiff line numberDiff line change
@@ -690,14 +690,10 @@ static int sec_skcipher_fbtfm_init(struct crypto_skcipher *tfm)
690690

691691
c_ctx->fallback = false;
692692

693-
/* Currently, only XTS mode need fallback tfm when using 192bit key */
694-
if (likely(strncmp(alg, "xts", SEC_XTS_NAME_SZ)))
695-
return 0;
696-
697693
c_ctx->fbtfm = crypto_alloc_sync_skcipher(alg, 0,
698694
CRYPTO_ALG_NEED_FALLBACK);
699695
if (IS_ERR(c_ctx->fbtfm)) {
700-
pr_err("failed to alloc xts mode fallback tfm!\n");
696+
pr_err("failed to alloc fallback tfm for %s!\n", alg);
701697
return PTR_ERR(c_ctx->fbtfm);
702698
}
703699

@@ -857,7 +853,7 @@ static int sec_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
857853
}
858854

859855
memcpy(c_ctx->c_key, key, keylen);
860-
if (c_ctx->fallback && c_ctx->fbtfm) {
856+
if (c_ctx->fbtfm) {
861857
ret = crypto_sync_skcipher_setkey(c_ctx->fbtfm, key, keylen);
862858
if (ret) {
863859
dev_err(dev, "failed to set fallback skcipher key!\n");
@@ -1155,8 +1151,10 @@ static int sec_aead_setkey(struct crypto_aead *tfm, const u8 *key,
11551151
}
11561152

11571153
ret = crypto_authenc_extractkeys(&keys, key, keylen);
1158-
if (ret)
1154+
if (ret) {
1155+
dev_err(dev, "sec extract aead keys err!\n");
11591156
goto bad_key;
1157+
}
11601158

11611159
ret = sec_aead_aes_set_key(c_ctx, &keys);
11621160
if (ret) {
@@ -1170,12 +1168,6 @@ static int sec_aead_setkey(struct crypto_aead *tfm, const u8 *key,
11701168
goto bad_key;
11711169
}
11721170

1173-
if (ctx->a_ctx.a_key_len & WORD_MASK) {
1174-
ret = -EINVAL;
1175-
dev_err(dev, "AUTH key length error!\n");
1176-
goto bad_key;
1177-
}
1178-
11791171
ret = sec_aead_fallback_setkey(a_ctx, tfm, key, keylen);
11801172
if (ret) {
11811173
dev_err(dev, "set sec fallback key err!\n");
@@ -1995,8 +1987,7 @@ static int sec_aead_sha512_ctx_init(struct crypto_aead *tfm)
19951987
return sec_aead_ctx_init(tfm, "sha512");
19961988
}
19971989

1998-
static int sec_skcipher_cryptlen_check(struct sec_ctx *ctx,
1999-
struct sec_req *sreq)
1990+
static int sec_skcipher_cryptlen_check(struct sec_ctx *ctx, struct sec_req *sreq)
20001991
{
20011992
u32 cryptlen = sreq->c_req.sk_req->cryptlen;
20021993
struct device *dev = ctx->dev;
@@ -2018,10 +2009,6 @@ static int sec_skcipher_cryptlen_check(struct sec_ctx *ctx,
20182009
}
20192010
break;
20202011
case SEC_CMODE_CTR:
2021-
if (unlikely(ctx->sec->qm.ver < QM_HW_V3)) {
2022-
dev_err(dev, "skcipher HW version error!\n");
2023-
ret = -EINVAL;
2024-
}
20252012
break;
20262013
default:
20272014
ret = -EINVAL;
@@ -2030,17 +2017,21 @@ static int sec_skcipher_cryptlen_check(struct sec_ctx *ctx,
20302017
return ret;
20312018
}
20322019

2033-
static int sec_skcipher_param_check(struct sec_ctx *ctx, struct sec_req *sreq)
2020+
static int sec_skcipher_param_check(struct sec_ctx *ctx,
2021+
struct sec_req *sreq, bool *need_fallback)
20342022
{
20352023
struct skcipher_request *sk_req = sreq->c_req.sk_req;
20362024
struct device *dev = ctx->dev;
20372025
u8 c_alg = ctx->c_ctx.c_alg;
20382026

2039-
if (unlikely(!sk_req->src || !sk_req->dst ||
2040-
sk_req->cryptlen > MAX_INPUT_DATA_LEN)) {
2027+
if (unlikely(!sk_req->src || !sk_req->dst)) {
20412028
dev_err(dev, "skcipher input param error!\n");
20422029
return -EINVAL;
20432030
}
2031+
2032+
if (sk_req->cryptlen > MAX_INPUT_DATA_LEN)
2033+
*need_fallback = true;
2034+
20442035
sreq->c_req.c_len = sk_req->cryptlen;
20452036

20462037
if (ctx->pbuf_supported && sk_req->cryptlen <= SEC_PBUF_SZ)
@@ -2098,6 +2089,7 @@ static int sec_skcipher_crypto(struct skcipher_request *sk_req, bool encrypt)
20982089
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(sk_req);
20992090
struct sec_req *req = skcipher_request_ctx(sk_req);
21002091
struct sec_ctx *ctx = crypto_skcipher_ctx(tfm);
2092+
bool need_fallback = false;
21012093
int ret;
21022094

21032095
if (!sk_req->cryptlen) {
@@ -2111,11 +2103,11 @@ static int sec_skcipher_crypto(struct skcipher_request *sk_req, bool encrypt)
21112103
req->c_req.encrypt = encrypt;
21122104
req->ctx = ctx;
21132105

2114-
ret = sec_skcipher_param_check(ctx, req);
2106+
ret = sec_skcipher_param_check(ctx, req, &need_fallback);
21152107
if (unlikely(ret))
21162108
return -EINVAL;
21172109

2118-
if (unlikely(ctx->c_ctx.fallback))
2110+
if (unlikely(ctx->c_ctx.fallback || need_fallback))
21192111
return sec_skcipher_soft_crypto(ctx, sk_req, encrypt);
21202112

21212113
return ctx->req_op->process(ctx, req);
@@ -2223,52 +2215,35 @@ static int sec_aead_spec_check(struct sec_ctx *ctx, struct sec_req *sreq)
22232215
struct crypto_aead *tfm = crypto_aead_reqtfm(req);
22242216
size_t sz = crypto_aead_authsize(tfm);
22252217
u8 c_mode = ctx->c_ctx.c_mode;
2226-
struct device *dev = ctx->dev;
22272218
int ret;
22282219

2229-
/* Hardware does not handle cases where authsize is not 4 bytes aligned */
2230-
if (c_mode == SEC_CMODE_CBC && (sz & WORD_MASK)) {
2231-
sreq->aead_req.fallback = true;
2220+
if (unlikely(ctx->sec->qm.ver == QM_HW_V2 && !sreq->c_req.c_len))
22322221
return -EINVAL;
2233-
}
22342222

22352223
if (unlikely(req->cryptlen + req->assoclen > MAX_INPUT_DATA_LEN ||
2236-
req->assoclen > SEC_MAX_AAD_LEN)) {
2237-
dev_err(dev, "aead input spec error!\n");
2224+
req->assoclen > SEC_MAX_AAD_LEN))
22382225
return -EINVAL;
2239-
}
22402226

22412227
if (c_mode == SEC_CMODE_CCM) {
2242-
if (unlikely(req->assoclen > SEC_MAX_CCM_AAD_LEN)) {
2243-
dev_err_ratelimited(dev, "CCM input aad parameter is too long!\n");
2228+
if (unlikely(req->assoclen > SEC_MAX_CCM_AAD_LEN))
22442229
return -EINVAL;
2245-
}
2246-
ret = aead_iv_demension_check(req);
2247-
if (ret) {
2248-
dev_err(dev, "aead input iv param error!\n");
2249-
return ret;
2250-
}
2251-
}
22522230

2253-
if (sreq->c_req.encrypt)
2254-
sreq->c_req.c_len = req->cryptlen;
2255-
else
2256-
sreq->c_req.c_len = req->cryptlen - sz;
2257-
if (c_mode == SEC_CMODE_CBC) {
2258-
if (unlikely(sreq->c_req.c_len & (AES_BLOCK_SIZE - 1))) {
2259-
dev_err(dev, "aead crypto length error!\n");
2231+
ret = aead_iv_demension_check(req);
2232+
if (unlikely(ret))
2233+
return -EINVAL;
2234+
} else if (c_mode == SEC_CMODE_CBC) {
2235+
if (unlikely(sz & WORD_MASK))
2236+
return -EINVAL;
2237+
if (unlikely(ctx->a_ctx.a_key_len & WORD_MASK))
22602238
return -EINVAL;
2261-
}
22622239
}
22632240

22642241
return 0;
22652242
}
22662243

2267-
static int sec_aead_param_check(struct sec_ctx *ctx, struct sec_req *sreq)
2244+
static int sec_aead_param_check(struct sec_ctx *ctx, struct sec_req *sreq, bool *need_fallback)
22682245
{
22692246
struct aead_request *req = sreq->aead_req.aead_req;
2270-
struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2271-
size_t authsize = crypto_aead_authsize(tfm);
22722247
struct device *dev = ctx->dev;
22732248
u8 c_alg = ctx->c_ctx.c_alg;
22742249

@@ -2277,12 +2252,10 @@ static int sec_aead_param_check(struct sec_ctx *ctx, struct sec_req *sreq)
22772252
return -EINVAL;
22782253
}
22792254

2280-
if (ctx->sec->qm.ver == QM_HW_V2) {
2281-
if (unlikely(!req->cryptlen || (!sreq->c_req.encrypt &&
2282-
req->cryptlen <= authsize))) {
2283-
sreq->aead_req.fallback = true;
2284-
return -EINVAL;
2285-
}
2255+
if (unlikely(ctx->c_ctx.c_mode == SEC_CMODE_CBC &&
2256+
sreq->c_req.c_len & (AES_BLOCK_SIZE - 1))) {
2257+
dev_err(dev, "aead cbc mode input data length error!\n");
2258+
return -EINVAL;
22862259
}
22872260

22882261
/* Support AES or SM4 */
@@ -2291,8 +2264,10 @@ static int sec_aead_param_check(struct sec_ctx *ctx, struct sec_req *sreq)
22912264
return -EINVAL;
22922265
}
22932266

2294-
if (unlikely(sec_aead_spec_check(ctx, sreq)))
2267+
if (unlikely(sec_aead_spec_check(ctx, sreq))) {
2268+
*need_fallback = true;
22952269
return -EINVAL;
2270+
}
22962271

22972272
if (ctx->pbuf_supported && (req->cryptlen + req->assoclen) <=
22982273
SEC_PBUF_SZ)
@@ -2336,17 +2311,19 @@ static int sec_aead_crypto(struct aead_request *a_req, bool encrypt)
23362311
struct crypto_aead *tfm = crypto_aead_reqtfm(a_req);
23372312
struct sec_req *req = aead_request_ctx(a_req);
23382313
struct sec_ctx *ctx = crypto_aead_ctx(tfm);
2314+
size_t sz = crypto_aead_authsize(tfm);
2315+
bool need_fallback = false;
23392316
int ret;
23402317

23412318
req->flag = a_req->base.flags;
23422319
req->aead_req.aead_req = a_req;
23432320
req->c_req.encrypt = encrypt;
23442321
req->ctx = ctx;
2345-
req->aead_req.fallback = false;
2322+
req->c_req.c_len = a_req->cryptlen - (req->c_req.encrypt ? 0 : sz);
23462323

2347-
ret = sec_aead_param_check(ctx, req);
2324+
ret = sec_aead_param_check(ctx, req, &need_fallback);
23482325
if (unlikely(ret)) {
2349-
if (req->aead_req.fallback)
2326+
if (need_fallback)
23502327
return sec_aead_soft_crypto(ctx, a_req, encrypt);
23512328
return -EINVAL;
23522329
}

0 commit comments

Comments
 (0)