@@ -57,8 +57,8 @@ struct safexcel_cipher_ctx {
5757 /* All the below is AEAD specific */
5858 u32 hash_alg ;
5959 u32 state_sz ;
60- u32 ipad [SHA512_DIGEST_SIZE / sizeof (u32 )];
61- u32 opad [SHA512_DIGEST_SIZE / sizeof (u32 )];
60+ __be32 ipad [SHA512_DIGEST_SIZE / sizeof (u32 )];
61+ __be32 opad [SHA512_DIGEST_SIZE / sizeof (u32 )];
6262
6363 struct crypto_cipher * hkaes ;
6464 struct crypto_aead * fback ;
@@ -92,7 +92,8 @@ static void safexcel_cipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
9292 cdesc -> control_data .token [3 ] = 0 ;
9393 } else {
9494 /* 32 bit counter, start at 1 (big endian!) */
95- cdesc -> control_data .token [3 ] = cpu_to_be32 (1 );
95+ cdesc -> control_data .token [3 ] =
96+ (__force u32 )cpu_to_be32 (1 );
9697 }
9798
9899 return ;
@@ -108,7 +109,8 @@ static void safexcel_cipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
108109 cdesc -> control_data .token [3 ] = 0 ;
109110 } else {
110111 /* 32 bit counter, start at 1 (big endian!) */
111- cdesc -> control_data .token [3 ] = cpu_to_be32 (1 );
112+ * (__be32 * )& cdesc -> control_data .token [3 ] =
113+ cpu_to_be32 (1 );
112114 }
113115
114116 return ;
@@ -267,7 +269,7 @@ static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
267269 if (ctx -> xcm != EIP197_XCM_MODE_GCM ) {
268270 u8 * final_iv = (u8 * )cdesc -> control_data .token ;
269271 u8 * cbcmaciv = (u8 * )& token [1 ];
270- u32 * aadlen = (u32 * )& token [5 ];
272+ __le32 * aadlen = (__le32 * )& token [5 ];
271273
272274 /* Construct IV block B0 for the CBC-MAC */
273275 token [0 ].opcode = EIP197_TOKEN_OPCODE_INSERT ;
@@ -286,7 +288,8 @@ static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
286288 cbcmaciv [15 ] = cryptlen & 255 ;
287289
288290 if (assoclen ) {
289- * aadlen = cpu_to_le32 (cpu_to_be16 (assoclen ));
291+ * aadlen = cpu_to_le32 ((assoclen >> 8 ) |
292+ ((assoclen & 0xff ) << 8 ));
290293 assoclen += 2 ;
291294 }
292295
@@ -333,7 +336,7 @@ static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
333336
334337 if (priv -> flags & EIP197_TRC_CACHE && ctx -> base .ctxr_dma ) {
335338 for (i = 0 ; i < len / sizeof (u32 ); i ++ ) {
336- if (ctx -> key [i ] != cpu_to_le32 ( aes .key_enc [i ]) ) {
339+ if (le32_to_cpu ( ctx -> key [i ]) != aes .key_enc [i ]) {
337340 ctx -> base .needs_inv = true;
338341 break ;
339342 }
@@ -358,7 +361,7 @@ static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
358361 struct safexcel_crypto_priv * priv = ctx -> priv ;
359362 struct crypto_authenc_keys keys ;
360363 struct crypto_aes_ctx aes ;
361- int err = - EINVAL ;
364+ int err = - EINVAL , i ;
362365
363366 if (unlikely (crypto_authenc_extractkeys (& keys , key , len )))
364367 goto badkey ;
@@ -400,9 +403,14 @@ static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
400403 goto badkey ;
401404 }
402405
403- if (priv -> flags & EIP197_TRC_CACHE && ctx -> base .ctxr_dma &&
404- memcmp (ctx -> key , keys .enckey , keys .enckeylen ))
405- ctx -> base .needs_inv = true;
406+ if (priv -> flags & EIP197_TRC_CACHE && ctx -> base .ctxr_dma ) {
407+ for (i = 0 ; i < keys .enckeylen / sizeof (u32 ); i ++ ) {
408+ if (le32_to_cpu (ctx -> key [i ]) != aes .key_enc [i ]) {
409+ ctx -> base .needs_inv = true;
410+ break ;
411+ }
412+ }
413+ }
406414
407415 /* Auth key */
408416 switch (ctx -> hash_alg ) {
@@ -450,7 +458,8 @@ static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
450458 ctx -> base .needs_inv = true;
451459
452460 /* Now copy the keys into the context */
453- memcpy (ctx -> key , keys .enckey , keys .enckeylen );
461+ for (i = 0 ; i < keys .enckeylen / sizeof (u32 ); i ++ )
462+ ctx -> key [i ] = cpu_to_le32 (aes .key_enc [i ]);
454463 ctx -> key_len = keys .enckeylen ;
455464
456465 memcpy (ctx -> ipad , & istate .state , ctx -> state_sz );
@@ -1378,7 +1387,7 @@ static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
13781387
13791388 if (priv -> flags & EIP197_TRC_CACHE && ctx -> base .ctxr_dma ) {
13801389 for (i = 0 ; i < keylen / sizeof (u32 ); i ++ ) {
1381- if (ctx -> key [i ] != cpu_to_le32 ( aes .key_enc [i ]) ) {
1390+ if (le32_to_cpu ( ctx -> key [i ]) != aes .key_enc [i ]) {
13821391 ctx -> base .needs_inv = true;
13831392 break ;
13841393 }
@@ -1534,13 +1543,11 @@ static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
15341543 return err ;
15351544
15361545 /* if context exits and key changed, need to invalidate it */
1537- if (ctx -> base .ctxr_dma ) {
1546+ if (ctx -> base .ctxr_dma )
15381547 if (memcmp (ctx -> key , key , len ))
15391548 ctx -> base .needs_inv = true;
1540- }
15411549
15421550 memcpy (ctx -> key , key , len );
1543-
15441551 ctx -> key_len = len ;
15451552
15461553 return 0 ;
@@ -2361,7 +2368,7 @@ static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
23612368
23622369 if (priv -> flags & EIP197_TRC_CACHE && ctx -> base .ctxr_dma ) {
23632370 for (i = 0 ; i < keylen / sizeof (u32 ); i ++ ) {
2364- if (ctx -> key [i ] != cpu_to_le32 ( aes .key_enc [i ]) ) {
2371+ if (le32_to_cpu ( ctx -> key [i ]) != aes .key_enc [i ]) {
23652372 ctx -> base .needs_inv = true;
23662373 break ;
23672374 }
@@ -2380,8 +2387,8 @@ static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
23802387
23812388 if (priv -> flags & EIP197_TRC_CACHE && ctx -> base .ctxr_dma ) {
23822389 for (i = 0 ; i < keylen / sizeof (u32 ); i ++ ) {
2383- if (ctx -> key [i + keylen / sizeof (u32 )] !=
2384- cpu_to_le32 ( aes .key_enc [i ]) ) {
2390+ if (le32_to_cpu ( ctx -> key [i + keylen / sizeof (u32 )]) !=
2391+ aes .key_enc [i ]) {
23852392 ctx -> base .needs_inv = true;
23862393 break ;
23872394 }
@@ -2471,7 +2478,7 @@ static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
24712478
24722479 if (priv -> flags & EIP197_TRC_CACHE && ctx -> base .ctxr_dma ) {
24732480 for (i = 0 ; i < len / sizeof (u32 ); i ++ ) {
2474- if (ctx -> key [i ] != cpu_to_le32 ( aes .key_enc [i ]) ) {
2481+ if (le32_to_cpu ( ctx -> key [i ]) != aes .key_enc [i ]) {
24752482 ctx -> base .needs_inv = true;
24762483 break ;
24772484 }
@@ -2498,7 +2505,7 @@ static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
24982505
24992506 if (priv -> flags & EIP197_TRC_CACHE && ctx -> base .ctxr_dma ) {
25002507 for (i = 0 ; i < AES_BLOCK_SIZE / sizeof (u32 ); i ++ ) {
2501- if (ctx -> ipad [i ] != cpu_to_be32 ( hashkey [i ]) ) {
2508+ if (be32_to_cpu ( ctx -> ipad [i ]) != hashkey [i ]) {
25022509 ctx -> base .needs_inv = true;
25032510 break ;
25042511 }
@@ -2588,7 +2595,7 @@ static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
25882595
25892596 if (priv -> flags & EIP197_TRC_CACHE && ctx -> base .ctxr_dma ) {
25902597 for (i = 0 ; i < len / sizeof (u32 ); i ++ ) {
2591- if (ctx -> key [i ] != cpu_to_le32 ( aes .key_enc [i ]) ) {
2598+ if (le32_to_cpu ( ctx -> key [i ]) != aes .key_enc [i ]) {
25922599 ctx -> base .needs_inv = true;
25932600 break ;
25942601 }
@@ -2697,20 +2704,12 @@ static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
26972704 const u8 * key )
26982705{
26992706 struct safexcel_crypto_priv * priv = ctx -> priv ;
2700- int i ;
27012707
2702- if (priv -> flags & EIP197_TRC_CACHE && ctx -> base .ctxr_dma ) {
2703- for (i = 0 ; i < CHACHA_KEY_SIZE / sizeof (u32 ); i ++ ) {
2704- if (ctx -> key [i ] !=
2705- get_unaligned_le32 (key + i * sizeof (u32 ))) {
2706- ctx -> base .needs_inv = true;
2707- break ;
2708- }
2709- }
2710- }
2708+ if (priv -> flags & EIP197_TRC_CACHE && ctx -> base .ctxr_dma )
2709+ if (memcmp (ctx -> key , key , CHACHA_KEY_SIZE ))
2710+ ctx -> base .needs_inv = true;
27112711
2712- for (i = 0 ; i < CHACHA_KEY_SIZE / sizeof (u32 ); i ++ )
2713- ctx -> key [i ] = get_unaligned_le32 (key + i * sizeof (u32 ));
2712+ memcpy (ctx -> key , key , CHACHA_KEY_SIZE );
27142713 ctx -> key_len = CHACHA_KEY_SIZE ;
27152714}
27162715
@@ -2801,7 +2800,7 @@ static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
28012800 struct safexcel_cipher_ctx * ctx = crypto_tfm_ctx (tfm );
28022801 struct aead_request * subreq = aead_request_ctx (req );
28032802 u32 key [CHACHA_KEY_SIZE / sizeof (u32 ) + 1 ];
2804- int i , ret = 0 ;
2803+ int ret = 0 ;
28052804
28062805 /*
28072806 * Instead of wasting time detecting umpteen silly corner cases,
@@ -2815,8 +2814,7 @@ static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
28152814 }
28162815
28172816 /* HW cannot do full (AAD+payload) zero length, use fallback */
2818- for (i = 0 ; i < CHACHA_KEY_SIZE / sizeof (u32 ); i ++ )
2819- key [i ] = cpu_to_le32 (ctx -> key [i ]);
2817+ memcpy (key , ctx -> key , CHACHA_KEY_SIZE );
28202818 if (ctx -> aead == EIP197_AEAD_TYPE_IPSEC_ESP ) {
28212819 /* ESP variant has nonce appended to the key */
28222820 key [CHACHA_KEY_SIZE / sizeof (u32 )] = ctx -> nonce ;
@@ -2971,25 +2969,17 @@ static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
29712969 struct crypto_tfm * tfm = crypto_skcipher_tfm (ctfm );
29722970 struct safexcel_cipher_ctx * ctx = crypto_tfm_ctx (tfm );
29732971 struct safexcel_crypto_priv * priv = ctx -> priv ;
2974- int i ;
29752972
29762973 if (len != SM4_KEY_SIZE ) {
29772974 crypto_skcipher_set_flags (ctfm , CRYPTO_TFM_RES_BAD_KEY_LEN );
29782975 return - EINVAL ;
29792976 }
29802977
2981- if (priv -> flags & EIP197_TRC_CACHE && ctx -> base .ctxr_dma ) {
2982- for (i = 0 ; i < SM4_KEY_SIZE / sizeof (u32 ); i ++ ) {
2983- if (ctx -> key [i ] !=
2984- get_unaligned_le32 (key + i * sizeof (u32 ))) {
2985- ctx -> base .needs_inv = true;
2986- break ;
2987- }
2988- }
2989- }
2978+ if (priv -> flags & EIP197_TRC_CACHE && ctx -> base .ctxr_dma )
2979+ if (memcmp (ctx -> key , key , SM4_KEY_SIZE ))
2980+ ctx -> base .needs_inv = true;
29902981
2991- for (i = 0 ; i < SM4_KEY_SIZE / sizeof (u32 ); i ++ )
2992- ctx -> key [i ] = get_unaligned_le32 (key + i * sizeof (u32 ));
2982+ memcpy (ctx -> key , key , SM4_KEY_SIZE );
29932983 ctx -> key_len = SM4_KEY_SIZE ;
29942984
29952985 return 0 ;
0 commit comments