From 8c97ce8f10b15990f6b07e8fd74c04e601eda866 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 20 Dec 2024 05:19:31 +0000 Subject: [PATCH] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- transformer_engine/pytorch/attention.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/transformer_engine/pytorch/attention.py b/transformer_engine/pytorch/attention.py index 5c2618b559..9268b9636e 100644 --- a/transformer_engine/pytorch/attention.py +++ b/transformer_engine/pytorch/attention.py @@ -604,7 +604,8 @@ def get_attention_backend( use_fused_attention = False elif cudnn_version >= (9, 6, 0) and qkv_format == "thd": logger.debug( - "Disabling FusedAttention as it does not support context parallelism with THD for cuDNN 9.6+" + "Disabling FusedAttention as it does not support context parallelism with THD for" + " cuDNN 9.6+" ) use_fused_attention = False