Skip to content

Commit 682e6bb

Browse files
committed
Skip GPU integration tests for TE
1 parent 20b297f commit 682e6bb

1 file changed

Lines changed: 3 additions & 0 deletions

File tree

tests/integration/train_tests.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -339,6 +339,7 @@ def test_gpu_cudnn_flash_te(self):
339339

340340
@pytest.mark.integration_test
341341
@pytest.mark.gpu_only
342+
@pytest.mark.skip(reason="b/489133823. Previously transient in b/462548581.")
342343
def test_gpu_context_parallelism(self):
343344
os.environ["NVTE_FUSED_ATTN"] = "1" # Enable fused attention
344345
context_parallel = [ # tests base config on GPU with All-Gather based context parallelism
@@ -376,6 +377,7 @@ def test_gpu_context_parallelism(self):
376377

377378
@pytest.mark.integration_test
378379
@pytest.mark.gpu_only
380+
@pytest.mark.skip(reason="b/489133823. Previously transient in b/462548581.")
379381
def test_gpu_tensor_parallelism(self):
380382
os.environ["NVTE_FUSED_ATTN"] = "1" # Enable fused attention
381383
tensor_parallel = [ # tests base config on GPU with Tensor Parallelism
@@ -562,6 +564,7 @@ def test_gpu_packed_attention(self):
562564

563565
@pytest.mark.integration_test
564566
@pytest.mark.gpu_only
567+
@pytest.mark.skip(reason="b/489133823. Previously transient in b/462548581.")
565568
def test_gpu_ring_attention(self):
566569
os.environ["NVTE_FUSED_ATTN"] = "1" # Enable fused attention
567570
os.environ["NVTE_FUSED_RING_ATTENTION_USE_SCAN"] = "0" # Disable scan for ring attention

0 commit comments

Comments
 (0)