Skip to content

Commit 77b888f

Browse files
committed
Formatting fix for enabling SDP with BF16 precision on HPU
Signed-off-by: Daniel Socek <[email protected]>
1 parent 8e73b58 commit 77b888f

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

src/diffusers/pipelines/pipeline_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -507,7 +507,7 @@ def module_is_offloaded(module):
507507
if dtype in (torch.bfloat16, None) and kwargs.pop("sdp_on_bf16", True):
508508
if hasattr(torch._C, "_set_math_sdp_allow_fp16_bf16_reduction"):
509509
torch._C._set_math_sdp_allow_fp16_bf16_reduction(True)
510-
logger.warning(f"Enabled SDP with BF16 precision on HPU. To disable, please use `.to('hpu', sdp_on_bf16=False)`")
510+
logger.warning("Enabled SDP with BF16 precision on HPU. To disable, please use `.to('hpu', sdp_on_bf16=False)`")
511511

512512
module_names, _ = self._get_signature_keys(self)
513513
modules = [getattr(self, n, None) for n in module_names]

0 commit comments

Comments
 (0)