Skip to content

Commit

Permalink
Move test distributed encoder to L0 distributed testsuit
Browse files Browse the repository at this point in the history
Signed-off-by: Phuong Nguyen <[email protected]>
Co-authored-by: Reese Wang <[email protected]>
  • Loading branch information
phu0ngng and zlsh80826 committed Dec 18, 2024
1 parent 518d071 commit 84895fe
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 2 deletions.
16 changes: 16 additions & 0 deletions qa/L0_jax_distributed_unittest/test.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
# Copyright (c) 2022-2024, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# See LICENSE for license information.

set -xe

: ${TE_PATH:=/opt/transformerengine}

pip install -r $TE_PATH/examples/jax/encoder/requirements.txt

# Make encoder tests to have run-to-run deterministic to have the stable CI results
export XLA_FLAGS="${XLA_FLAGS} --xla_gpu_deterministic_ops"
pytest -c $TE_PATH/tests/jax/pytest.ini -v $TE_PATH/examples/jax/encoder/test_multigpu_encoder.py
pytest -c $TE_PATH/tests/jax/pytest.ini -v $TE_PATH/examples/jax/encoder/test_model_parallel_encoder.py
pytest -c $TE_PATH/tests/jax/pytest.ini -v $TE_PATH/examples/jax/encoder/test_multiprocessing_encoder.py

3 changes: 1 addition & 2 deletions qa/L0_jax_unittest/test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -20,5 +20,4 @@ pytest -c $TE_PATH/tests/jax/pytest.ini -v $TE_PATH/examples/jax/mnist

# Make encoder tests to have run-to-run deterministic to have the stable CI results
export XLA_FLAGS="${XLA_FLAGS} --xla_gpu_deterministic_ops"
pytest -c $TE_PATH/tests/jax/pytest.ini -v $TE_PATH/examples/jax/encoder --ignore=$TE_PATH/examples/jax/encoder/test_multiprocessing_encoder.py
pytest -c $TE_PATH/tests/jax/pytest.ini -v $TE_PATH/examples/jax/encoder/test_multiprocessing_encoder.py
pytest -c $TE_PATH/tests/jax/pytest.ini -v $TE_PATH/examples/jax/encoder/test_single_gpu_encoder.py

0 comments on commit 84895fe

Please sign in to comment.