From caeb2537cfb2683ad48a900c24531f371c61c38e Mon Sep 17 00:00:00 2001 From: tocean Date: Tue, 17 Oct 2023 10:43:56 +0000 Subject: [PATCH] skip te unit test since fused attention only works when cuda>=12.1 --- tests/te/test_replacer.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/te/test_replacer.py b/tests/te/test_replacer.py index 34639e94..a682a909 100644 --- a/tests/te/test_replacer.py +++ b/tests/te/test_replacer.py @@ -33,6 +33,9 @@ def tearDown(self): @decorator.cuda_test def test_replace(self): """Test replace function in TeReplacer.""" + # fused attention need cuda version >= 12.1 + if torch.version.cuda < '12.1': + return te_transformer = te.TransformerLayer( self.hidden_size, self.ffn_hidden_size, self.num_attention_heads, fuse_qkv_params=True )