Skip to content

Commit

Permalink
try again
Browse files Browse the repository at this point in the history
  • Loading branch information
epwalsh committed Apr 2, 2024
1 parent 9fae410 commit 701369a
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 3 deletions.
8 changes: 6 additions & 2 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,14 @@ jobs:
run: make lint-check

- name: Test core
run: pytest -v --color=yes --durations=5 -n=1 src/test/ --ignore-glob='src/test/distributed/fsdp/*'
run: |
pytest -v --color=yes --durations=3 src/test/ --ignore-glob='src/test/distributed/*'
pytest -v --color=yes --durations=3 src/test/distributed/ --ignore-glob='src/test/distributed/fsdp/*'
- name: Test FSDP
run: pytest -v --color=yes --durations=5 -n=1 src/test/distributed/fsdp/
run: |
pytest -v --color=yes --durations=3 src/test/distributed/fsdp/ -m 'fsdp1'
pytest -v --color=yes --durations=3 -n=1 src/test/distributed/fsdp/ -m 'fsdp2'
- name: Type check
run: make type-check
Expand Down
3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,8 @@ log_cli = false
log_cli_level = "DEBUG"
markers = [
"gpu",
"fsdp",
"fsdp1",
"fsdp2",
]
filterwarnings = [
'ignore::FutureWarning:huggingface_hub\.file_download',
Expand Down
7 changes: 7 additions & 0 deletions src/test/distributed/fsdp/fsdp_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ def run_fsdp_against_non_distributed_model(model_factory, model_data_factory):
)


@pytest.mark.fsdp1
@pytest.mark.parametrize("backend", BACKENDS)
def test_fsdp_against_non_distributed_model(backend, tiny_model_factory, tiny_model_data_factory):
run_distributed_test(
Expand Down Expand Up @@ -158,6 +159,7 @@ def run_fsdp_against_ddp(model_factory, model_data_factory):
optim.step()


@pytest.mark.fsdp1
@pytest.mark.parametrize("backend", BACKENDS)
def test_fsdp_against_ddp(backend, tiny_model_factory, tiny_model_data_factory):
run_distributed_test(
Expand Down Expand Up @@ -215,6 +217,7 @@ def run_fsdp_with_gradient_accumulation(model_factory, model_data_factory):
)


@pytest.mark.fsdp1
@pytest.mark.parametrize("backend", BACKENDS)
def test_fsdp_with_gradient_accumulation(backend, tiny_model_factory, tiny_model_data_factory):
run_distributed_test(
Expand Down Expand Up @@ -347,6 +350,7 @@ def forward(self, x):
loss.backward()


@pytest.mark.fsdp1
@pytest.mark.parametrize("backend", BACKENDS)
def test_nested_fsdp_api(backend, tiny_model_factory, tiny_model_data_factory):
run_distributed_test(
Expand Down Expand Up @@ -383,6 +387,7 @@ def run_fsdp_with_mixed_precision(model_factory, model_data_factory, precision):
assert param.grad.dtype == param.dtype


@pytest.mark.fsdp2
@pytest.mark.parametrize("backend", BACKENDS)
@pytest.mark.parametrize("precision", FSDP_MIXED_PRECISION)
def test_fsdp_with_mixed_precision(backend, tiny_model_factory, tiny_model_data_factory, precision):
Expand Down Expand Up @@ -431,6 +436,7 @@ def __init__(self):
assert fsdp.module.fc3.out_proj.max_prefetch_count == 3


@pytest.mark.fsdp1
@pytest.mark.parametrize("backend", BACKENDS)
def test_auto_wrap(backend):
run_distributed_test(
Expand Down Expand Up @@ -491,6 +497,7 @@ def initialize_and_check(m: nn.Module):
assert (param.data.detach() == 1.1).all()


@pytest.mark.fsdp1
@pytest.mark.parametrize("backend", BACKENDS)
def test_apply(backend):
run_distributed_test(
Expand Down

0 comments on commit 701369a

Please sign in to comment.