@@ -168,30 +168,23 @@ static int ghash_async_init(struct ahash_request *req)
168168 struct ghash_async_ctx * ctx = crypto_ahash_ctx (tfm );
169169 struct ahash_request * cryptd_req = ahash_request_ctx (req );
170170 struct cryptd_ahash * cryptd_tfm = ctx -> cryptd_tfm ;
171+ struct shash_desc * desc = cryptd_shash_desc (cryptd_req );
172+ struct crypto_shash * child = cryptd_ahash_child (cryptd_tfm );
171173
172- if (!irq_fpu_usable ()) {
173- memcpy (cryptd_req , req , sizeof (* req ));
174- ahash_request_set_tfm (cryptd_req , & cryptd_tfm -> base );
175- return crypto_ahash_init (cryptd_req );
176- } else {
177- struct shash_desc * desc = cryptd_shash_desc (cryptd_req );
178- struct crypto_shash * child = cryptd_ahash_child (cryptd_tfm );
179-
180- desc -> tfm = child ;
181- desc -> flags = req -> base .flags ;
182- return crypto_shash_init (desc );
183- }
174+ desc -> tfm = child ;
175+ desc -> flags = req -> base .flags ;
176+ return crypto_shash_init (desc );
184177}
185178
186179static int ghash_async_update (struct ahash_request * req )
187180{
188181 struct ahash_request * cryptd_req = ahash_request_ctx (req );
182+ struct crypto_ahash * tfm = crypto_ahash_reqtfm (req );
183+ struct ghash_async_ctx * ctx = crypto_ahash_ctx (tfm );
184+ struct cryptd_ahash * cryptd_tfm = ctx -> cryptd_tfm ;
189185
190- if (!irq_fpu_usable ()) {
191- struct crypto_ahash * tfm = crypto_ahash_reqtfm (req );
192- struct ghash_async_ctx * ctx = crypto_ahash_ctx (tfm );
193- struct cryptd_ahash * cryptd_tfm = ctx -> cryptd_tfm ;
194-
186+ if (!irq_fpu_usable () ||
187+ (in_atomic () && cryptd_ahash_queued (cryptd_tfm ))) {
195188 memcpy (cryptd_req , req , sizeof (* req ));
196189 ahash_request_set_tfm (cryptd_req , & cryptd_tfm -> base );
197190 return crypto_ahash_update (cryptd_req );
@@ -204,12 +197,12 @@ static int ghash_async_update(struct ahash_request *req)
204197static int ghash_async_final (struct ahash_request * req )
205198{
206199 struct ahash_request * cryptd_req = ahash_request_ctx (req );
200+ struct crypto_ahash * tfm = crypto_ahash_reqtfm (req );
201+ struct ghash_async_ctx * ctx = crypto_ahash_ctx (tfm );
202+ struct cryptd_ahash * cryptd_tfm = ctx -> cryptd_tfm ;
207203
208- if (!irq_fpu_usable ()) {
209- struct crypto_ahash * tfm = crypto_ahash_reqtfm (req );
210- struct ghash_async_ctx * ctx = crypto_ahash_ctx (tfm );
211- struct cryptd_ahash * cryptd_tfm = ctx -> cryptd_tfm ;
212-
204+ if (!irq_fpu_usable () ||
205+ (in_atomic () && cryptd_ahash_queued (cryptd_tfm ))) {
213206 memcpy (cryptd_req , req , sizeof (* req ));
214207 ahash_request_set_tfm (cryptd_req , & cryptd_tfm -> base );
215208 return crypto_ahash_final (cryptd_req );
@@ -249,7 +242,8 @@ static int ghash_async_digest(struct ahash_request *req)
249242 struct ahash_request * cryptd_req = ahash_request_ctx (req );
250243 struct cryptd_ahash * cryptd_tfm = ctx -> cryptd_tfm ;
251244
252- if (!irq_fpu_usable ()) {
245+ if (!irq_fpu_usable () ||
246+ (in_atomic () && cryptd_ahash_queued (cryptd_tfm ))) {
253247 memcpy (cryptd_req , req , sizeof (* req ));
254248 ahash_request_set_tfm (cryptd_req , & cryptd_tfm -> base );
255249 return crypto_ahash_digest (cryptd_req );
0 commit comments