Skip to content

Commit

Permalink
try mac runner
Browse files Browse the repository at this point in the history
  • Loading branch information
epwalsh committed Apr 2, 2024
1 parent 701369a commit 95e49f2
Show file tree
Hide file tree
Showing 4 changed files with 6 additions and 15 deletions.
3 changes: 2 additions & 1 deletion .github/actions/setup-venv/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,8 @@ runs:
# Set up virtual environment without cache hit.
test -d .venv || virtualenv -p $(which python) --copies --reset-app-data .venv
. .venv/bin/activate
pip install 'torch${{ inputs.torch-version }}' --extra-index-url https://download.pytorch.org/whl/cpu
#pip install 'torch${{ inputs.torch-version }}' --extra-index-url https://download.pytorch.org/whl/cpu
pip install 'torch${{ inputs.torch-version }}'
pip install -e .[all]
- if: steps.virtualenv-cache.outputs.cache-hit == 'true'
Expand Down
9 changes: 4 additions & 5 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,25 +24,24 @@ env:
jobs:
checks:
name: ${{ matrix.task.name }}
runs-on: [ubuntu-latest]
runs-on: ${{ matrix.runner }}
timeout-minutes: 5
strategy:
fail-fast: false
matrix:
python: ['3.10']
runner: [macos-13] # TODO: change to ubuntu-latest once repo is public (will have more RAM then)
task:
- name: Lint
run: make lint-check

- name: Test core
run: |
pytest -v --color=yes --durations=3 src/test/ --ignore-glob='src/test/distributed/*'
pytest -v --color=yes --durations=3 src/test/distributed/ --ignore-glob='src/test/distributed/fsdp/*'
pytest -v --color=yes --durations=3 src/test/ --ignore-glob='src/test/distributed/fsdp/*'
- name: Test FSDP
run: |
pytest -v --color=yes --durations=3 src/test/distributed/fsdp/ -m 'fsdp1'
pytest -v --color=yes --durations=3 -n=1 src/test/distributed/fsdp/ -m 'fsdp2'
pytest -v --color=yes --durations=3 src/test/distributed/fsdp/
- name: Type check
run: make type-check
Expand Down
2 changes: 0 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -152,8 +152,6 @@ log_cli = false
log_cli_level = "DEBUG"
markers = [
"gpu",
"fsdp1",
"fsdp2",
]
filterwarnings = [
'ignore::FutureWarning:huggingface_hub\.file_download',
Expand Down
7 changes: 0 additions & 7 deletions src/test/distributed/fsdp/fsdp_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,6 @@ def run_fsdp_against_non_distributed_model(model_factory, model_data_factory):
)


@pytest.mark.fsdp1
@pytest.mark.parametrize("backend", BACKENDS)
def test_fsdp_against_non_distributed_model(backend, tiny_model_factory, tiny_model_data_factory):
run_distributed_test(
Expand Down Expand Up @@ -159,7 +158,6 @@ def run_fsdp_against_ddp(model_factory, model_data_factory):
optim.step()


@pytest.mark.fsdp1
@pytest.mark.parametrize("backend", BACKENDS)
def test_fsdp_against_ddp(backend, tiny_model_factory, tiny_model_data_factory):
run_distributed_test(
Expand Down Expand Up @@ -217,7 +215,6 @@ def run_fsdp_with_gradient_accumulation(model_factory, model_data_factory):
)


@pytest.mark.fsdp1
@pytest.mark.parametrize("backend", BACKENDS)
def test_fsdp_with_gradient_accumulation(backend, tiny_model_factory, tiny_model_data_factory):
run_distributed_test(
Expand Down Expand Up @@ -350,7 +347,6 @@ def forward(self, x):
loss.backward()


@pytest.mark.fsdp1
@pytest.mark.parametrize("backend", BACKENDS)
def test_nested_fsdp_api(backend, tiny_model_factory, tiny_model_data_factory):
run_distributed_test(
Expand Down Expand Up @@ -387,7 +383,6 @@ def run_fsdp_with_mixed_precision(model_factory, model_data_factory, precision):
assert param.grad.dtype == param.dtype


@pytest.mark.fsdp2
@pytest.mark.parametrize("backend", BACKENDS)
@pytest.mark.parametrize("precision", FSDP_MIXED_PRECISION)
def test_fsdp_with_mixed_precision(backend, tiny_model_factory, tiny_model_data_factory, precision):
Expand Down Expand Up @@ -436,7 +431,6 @@ def __init__(self):
assert fsdp.module.fc3.out_proj.max_prefetch_count == 3


@pytest.mark.fsdp1
@pytest.mark.parametrize("backend", BACKENDS)
def test_auto_wrap(backend):
run_distributed_test(
Expand Down Expand Up @@ -497,7 +491,6 @@ def initialize_and_check(m: nn.Module):
assert (param.data.detach() == 1.1).all()


@pytest.mark.fsdp1
@pytest.mark.parametrize("backend", BACKENDS)
def test_apply(backend):
run_distributed_test(
Expand Down

0 comments on commit 95e49f2

Please sign in to comment.