Skip to content

Commit 13a1bb9

Browse files
Pascal van Leeuwenherbertx
authored andcommitted
crypto: inside-secure - Fixed warnings on inconsistent byte order handling
This fixes a bunch of endianness related sparse warnings reported by the kbuild test robot as well as Ben Dooks. Credits for the fix to safexcel.c go to Ben Dooks. Reported-by: kbuild test robot <[email protected]> Reported-by: Ben Dooks <[email protected]> Signed-off-by: Pascal van Leeuwen <[email protected]> Signed-off-by: Herbert Xu <[email protected]>
1 parent 9b53799 commit 13a1bb9

File tree

4 files changed

+61
-67
lines changed

4 files changed

+61
-67
lines changed

drivers/crypto/inside-secure/safexcel.c

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -311,13 +311,14 @@ static void eip197_init_firmware(struct safexcel_crypto_priv *priv)
311311
static int eip197_write_firmware(struct safexcel_crypto_priv *priv,
312312
const struct firmware *fw)
313313
{
314-
const u32 *data = (const u32 *)fw->data;
314+
const __be32 *data = (const __be32 *)fw->data;
315315
int i;
316316

317317
/* Write the firmware */
318318
for (i = 0; i < fw->size / sizeof(u32); i++)
319319
writel(be32_to_cpu(data[i]),
320-
priv->base + EIP197_CLASSIFICATION_RAMS + i * sizeof(u32));
320+
priv->base + EIP197_CLASSIFICATION_RAMS +
321+
i * sizeof(__be32));
321322

322323
/* Exclude final 2 NOPs from size */
323324
return i - EIP197_FW_TERMINAL_NOPS;

drivers/crypto/inside-secure/safexcel.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -360,8 +360,8 @@
360360

361361
/* Context Control */
362362
struct safexcel_context_record {
363-
u32 control0;
364-
u32 control1;
363+
__le32 control0;
364+
__le32 control1;
365365

366366
__le32 data[40];
367367
} __packed;

drivers/crypto/inside-secure/safexcel_cipher.c

Lines changed: 39 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -57,8 +57,8 @@ struct safexcel_cipher_ctx {
5757
/* All the below is AEAD specific */
5858
u32 hash_alg;
5959
u32 state_sz;
60-
u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
61-
u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
60+
__be32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
61+
__be32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
6262

6363
struct crypto_cipher *hkaes;
6464
struct crypto_aead *fback;
@@ -92,7 +92,8 @@ static void safexcel_cipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
9292
cdesc->control_data.token[3] = 0;
9393
} else {
9494
/* 32 bit counter, start at 1 (big endian!) */
95-
cdesc->control_data.token[3] = cpu_to_be32(1);
95+
cdesc->control_data.token[3] =
96+
(__force u32)cpu_to_be32(1);
9697
}
9798

9899
return;
@@ -108,7 +109,8 @@ static void safexcel_cipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
108109
cdesc->control_data.token[3] = 0;
109110
} else {
110111
/* 32 bit counter, start at 1 (big endian!) */
111-
cdesc->control_data.token[3] = cpu_to_be32(1);
112+
*(__be32 *)&cdesc->control_data.token[3] =
113+
cpu_to_be32(1);
112114
}
113115

114116
return;
@@ -267,7 +269,7 @@ static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
267269
if (ctx->xcm != EIP197_XCM_MODE_GCM) {
268270
u8 *final_iv = (u8 *)cdesc->control_data.token;
269271
u8 *cbcmaciv = (u8 *)&token[1];
270-
u32 *aadlen = (u32 *)&token[5];
272+
__le32 *aadlen = (__le32 *)&token[5];
271273

272274
/* Construct IV block B0 for the CBC-MAC */
273275
token[0].opcode = EIP197_TOKEN_OPCODE_INSERT;
@@ -286,7 +288,8 @@ static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
286288
cbcmaciv[15] = cryptlen & 255;
287289

288290
if (assoclen) {
289-
*aadlen = cpu_to_le32(cpu_to_be16(assoclen));
291+
*aadlen = cpu_to_le32((assoclen >> 8) |
292+
((assoclen & 0xff) << 8));
290293
assoclen += 2;
291294
}
292295

@@ -333,7 +336,7 @@ static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
333336

334337
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
335338
for (i = 0; i < len / sizeof(u32); i++) {
336-
if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
339+
if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
337340
ctx->base.needs_inv = true;
338341
break;
339342
}
@@ -358,7 +361,7 @@ static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
358361
struct safexcel_crypto_priv *priv = ctx->priv;
359362
struct crypto_authenc_keys keys;
360363
struct crypto_aes_ctx aes;
361-
int err = -EINVAL;
364+
int err = -EINVAL, i;
362365

363366
if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
364367
goto badkey;
@@ -400,9 +403,14 @@ static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
400403
goto badkey;
401404
}
402405

403-
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
404-
memcmp(ctx->key, keys.enckey, keys.enckeylen))
405-
ctx->base.needs_inv = true;
406+
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
407+
for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
408+
if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
409+
ctx->base.needs_inv = true;
410+
break;
411+
}
412+
}
413+
}
406414

407415
/* Auth key */
408416
switch (ctx->hash_alg) {
@@ -450,7 +458,8 @@ static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
450458
ctx->base.needs_inv = true;
451459

452460
/* Now copy the keys into the context */
453-
memcpy(ctx->key, keys.enckey, keys.enckeylen);
461+
for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
462+
ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
454463
ctx->key_len = keys.enckeylen;
455464

456465
memcpy(ctx->ipad, &istate.state, ctx->state_sz);
@@ -1378,7 +1387,7 @@ static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
13781387

13791388
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
13801389
for (i = 0; i < keylen / sizeof(u32); i++) {
1381-
if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
1390+
if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
13821391
ctx->base.needs_inv = true;
13831392
break;
13841393
}
@@ -1534,13 +1543,11 @@ static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
15341543
return err;
15351544

15361545
/* if context exits and key changed, need to invalidate it */
1537-
if (ctx->base.ctxr_dma) {
1546+
if (ctx->base.ctxr_dma)
15381547
if (memcmp(ctx->key, key, len))
15391548
ctx->base.needs_inv = true;
1540-
}
15411549

15421550
memcpy(ctx->key, key, len);
1543-
15441551
ctx->key_len = len;
15451552

15461553
return 0;
@@ -2361,7 +2368,7 @@ static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
23612368

23622369
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
23632370
for (i = 0; i < keylen / sizeof(u32); i++) {
2364-
if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
2371+
if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
23652372
ctx->base.needs_inv = true;
23662373
break;
23672374
}
@@ -2380,8 +2387,8 @@ static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
23802387

23812388
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
23822389
for (i = 0; i < keylen / sizeof(u32); i++) {
2383-
if (ctx->key[i + keylen / sizeof(u32)] !=
2384-
cpu_to_le32(aes.key_enc[i])) {
2390+
if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
2391+
aes.key_enc[i]) {
23852392
ctx->base.needs_inv = true;
23862393
break;
23872394
}
@@ -2471,7 +2478,7 @@ static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
24712478

24722479
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
24732480
for (i = 0; i < len / sizeof(u32); i++) {
2474-
if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
2481+
if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
24752482
ctx->base.needs_inv = true;
24762483
break;
24772484
}
@@ -2498,7 +2505,7 @@ static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
24982505

24992506
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
25002507
for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2501-
if (ctx->ipad[i] != cpu_to_be32(hashkey[i])) {
2508+
if (be32_to_cpu(ctx->ipad[i]) != hashkey[i]) {
25022509
ctx->base.needs_inv = true;
25032510
break;
25042511
}
@@ -2588,7 +2595,7 @@ static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
25882595

25892596
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
25902597
for (i = 0; i < len / sizeof(u32); i++) {
2591-
if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
2598+
if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
25922599
ctx->base.needs_inv = true;
25932600
break;
25942601
}
@@ -2697,20 +2704,12 @@ static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
26972704
const u8 *key)
26982705
{
26992706
struct safexcel_crypto_priv *priv = ctx->priv;
2700-
int i;
27012707

2702-
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2703-
for (i = 0; i < CHACHA_KEY_SIZE / sizeof(u32); i++) {
2704-
if (ctx->key[i] !=
2705-
get_unaligned_le32(key + i * sizeof(u32))) {
2706-
ctx->base.needs_inv = true;
2707-
break;
2708-
}
2709-
}
2710-
}
2708+
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2709+
if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
2710+
ctx->base.needs_inv = true;
27112711

2712-
for (i = 0; i < CHACHA_KEY_SIZE / sizeof(u32); i++)
2713-
ctx->key[i] = get_unaligned_le32(key + i * sizeof(u32));
2712+
memcpy(ctx->key, key, CHACHA_KEY_SIZE);
27142713
ctx->key_len = CHACHA_KEY_SIZE;
27152714
}
27162715

@@ -2801,7 +2800,7 @@ static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
28012800
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
28022801
struct aead_request *subreq = aead_request_ctx(req);
28032802
u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2804-
int i, ret = 0;
2803+
int ret = 0;
28052804

28062805
/*
28072806
* Instead of wasting time detecting umpteen silly corner cases,
@@ -2815,8 +2814,7 @@ static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
28152814
}
28162815

28172816
/* HW cannot do full (AAD+payload) zero length, use fallback */
2818-
for (i = 0; i < CHACHA_KEY_SIZE / sizeof(u32); i++)
2819-
key[i] = cpu_to_le32(ctx->key[i]);
2817+
memcpy(key, ctx->key, CHACHA_KEY_SIZE);
28202818
if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
28212819
/* ESP variant has nonce appended to the key */
28222820
key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
@@ -2971,25 +2969,17 @@ static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
29712969
struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
29722970
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
29732971
struct safexcel_crypto_priv *priv = ctx->priv;
2974-
int i;
29752972

29762973
if (len != SM4_KEY_SIZE) {
29772974
crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
29782975
return -EINVAL;
29792976
}
29802977

2981-
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2982-
for (i = 0; i < SM4_KEY_SIZE / sizeof(u32); i++) {
2983-
if (ctx->key[i] !=
2984-
get_unaligned_le32(key + i * sizeof(u32))) {
2985-
ctx->base.needs_inv = true;
2986-
break;
2987-
}
2988-
}
2989-
}
2978+
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2979+
if (memcmp(ctx->key, key, SM4_KEY_SIZE))
2980+
ctx->base.needs_inv = true;
29902981

2991-
for (i = 0; i < SM4_KEY_SIZE / sizeof(u32); i++)
2992-
ctx->key[i] = get_unaligned_le32(key + i * sizeof(u32));
2982+
memcpy(ctx->key, key, SM4_KEY_SIZE);
29932983
ctx->key_len = SM4_KEY_SIZE;
29942984

29952985
return 0;

drivers/crypto/inside-secure/safexcel_hash.c

Lines changed: 17 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -29,8 +29,8 @@ struct safexcel_ahash_ctx {
2929
bool fb_init_done;
3030
bool fb_do_setkey;
3131

32-
u32 ipad[SHA3_512_BLOCK_SIZE / sizeof(u32)];
33-
u32 opad[SHA3_512_BLOCK_SIZE / sizeof(u32)];
32+
__le32 ipad[SHA3_512_BLOCK_SIZE / sizeof(__le32)];
33+
__le32 opad[SHA3_512_BLOCK_SIZE / sizeof(__le32)];
3434

3535
struct crypto_cipher *kaes;
3636
struct crypto_ahash *fback;
@@ -56,7 +56,8 @@ struct safexcel_ahash_req {
5656
u8 state_sz; /* expected state size, only set once */
5757
u8 block_sz; /* block size, only set once */
5858
u8 digest_sz; /* output digest size, only set once */
59-
u32 state[SHA3_512_BLOCK_SIZE / sizeof(u32)] __aligned(sizeof(u32));
59+
__le32 state[SHA3_512_BLOCK_SIZE /
60+
sizeof(__le32)] __aligned(sizeof(__le32));
6061

6162
u64 len;
6263
u64 processed;
@@ -287,7 +288,7 @@ static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv,
287288
if (unlikely(sreq->digest == CONTEXT_CONTROL_DIGEST_XCM &&
288289
ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_CRC32)) {
289290
/* Undo final XOR with 0xffffffff ...*/
290-
*(u32 *)areq->result = ~sreq->state[0];
291+
*(__le32 *)areq->result = ~sreq->state[0];
291292
} else {
292293
memcpy(areq->result, sreq->state,
293294
crypto_ahash_digestsize(ahash));
@@ -372,9 +373,9 @@ static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
372373
req->cache[cache_len + skip] = 0x80;
373374
// HW will use K2 iso K3 - compensate!
374375
for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
375-
((u32 *)req->cache)[i] ^=
376-
cpu_to_be32(ctx->ipad[i]) ^
377-
cpu_to_be32(ctx->ipad[i + 4]);
376+
((__be32 *)req->cache)[i] ^=
377+
cpu_to_be32(le32_to_cpu(
378+
ctx->ipad[i] ^ ctx->ipad[i + 4]));
378379
}
379380
cache_len = AES_BLOCK_SIZE;
380381
queued = queued + extra;
@@ -807,8 +808,8 @@ static int safexcel_ahash_final(struct ahash_request *areq)
807808
int i;
808809

809810
for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
810-
((u32 *)areq->result)[i] =
811-
cpu_to_be32(ctx->ipad[i + 4]); // K3
811+
((__be32 *)areq->result)[i] =
812+
cpu_to_be32(le32_to_cpu(ctx->ipad[i + 4]));//K3
812813
areq->result[0] ^= 0x80; // 10- padding
813814
crypto_cipher_encrypt_one(ctx->kaes, areq->result, areq->result);
814815
return 0;
@@ -1891,7 +1892,7 @@ static int safexcel_crc32_init(struct ahash_request *areq)
18911892
memset(req, 0, sizeof(*req));
18921893

18931894
/* Start from loaded key */
1894-
req->state[0] = cpu_to_le32(~ctx->ipad[0]);
1895+
req->state[0] = (__force __le32)le32_to_cpu(~ctx->ipad[0]);
18951896
/* Set processed to non-zero to enable invalidation detection */
18961897
req->len = sizeof(u32);
18971898
req->processed = sizeof(u32);
@@ -1993,7 +1994,7 @@ static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
19931994

19941995
memset(ctx->ipad, 0, 2 * AES_BLOCK_SIZE);
19951996
for (i = 0; i < len / sizeof(u32); i++)
1996-
ctx->ipad[i + 8] = cpu_to_be32(aes.key_enc[i]);
1997+
ctx->ipad[i + 8] = (__force __le32)cpu_to_be32(aes.key_enc[i]);
19971998

19981999
if (len == AES_KEYSIZE_192) {
19992000
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
@@ -2078,7 +2079,8 @@ static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
20782079
crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + AES_BLOCK_SIZE,
20792080
"\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
20802081
for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
2081-
ctx->ipad[i] = cpu_to_be32(key_tmp[i]);
2082+
ctx->ipad[i] =
2083+
cpu_to_le32((__force u32)cpu_to_be32(key_tmp[i]));
20822084

20832085
crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
20842086
crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
@@ -2164,7 +2166,8 @@ static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
21642166
}
21652167

21662168
for (i = 0; i < len / sizeof(u32); i++)
2167-
ctx->ipad[i + 8] = cpu_to_be32(aes.key_enc[i]);
2169+
ctx->ipad[i + 8] =
2170+
cpu_to_le32((__force u32)cpu_to_be32(aes.key_enc[i]));
21682171

21692172
/* precompute the CMAC key material */
21702173
crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
@@ -2197,7 +2200,7 @@ static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
21972200
/* end of code borrowed from crypto/cmac.c */
21982201

21992202
for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++)
2200-
ctx->ipad[i] = cpu_to_be32(((u32 *)consts)[i]);
2203+
ctx->ipad[i] = (__force __le32)cpu_to_be32(((u32 *)consts)[i]);
22012204

22022205
if (len == AES_KEYSIZE_192) {
22032206
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;

0 commit comments

Comments
 (0)