Skip to content

Commit

Permalink
Added unit test for scoring with BS_INSTALL_DEPENDENCIES
Browse files Browse the repository at this point in the history
  • Loading branch information
shehadak committed Nov 2, 2023
1 parent 03d8f1e commit 59f78c3
Showing 1 changed file with 25 additions and 0 deletions.
25 changes: 25 additions & 0 deletions tests/test_integration.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import os
import pytest
import subprocess
import sys
Expand Down Expand Up @@ -53,6 +54,30 @@ def test_score(model_identifier, benchmark_identifier, expected_score):
assert actual_score == expected_score


@pytest.mark.parametrize(
"model_identifier, benchmark_identifier, expected_score, install_dependencies",
[
("randomembedding-100", "Pereira2018.243sentences-linear",
approx(0.0285022, abs=0.0005), "newenv"),
("randomembedding-100", "Pereira2018.243sentences-linear",
approx(0.0285022, abs=0.0005), "yes"),
("randomembedding-100", "Pereira2018.243sentences-linear",
approx(0.0285022, abs=0.0005), "no"),
]
)
def test_score_with_install_dependencies(
model_identifier, benchmark_identifier, expected_score, install_dependencies):
install_dependence_preference = os.environ.get(
"BS_INSTALL_DEPENDENCIES", "yes")
os.environ["BS_INSTALL_DEPENDENCIES"] = install_dependencies
actual_score = score(
model_identifier=model_identifier,
benchmark_identifier=benchmark_identifier,
conda_active=True)
os.environ["BS_INSTALL_DEPENDENCIES"] = install_dependence_preference
assert actual_score == expected_score


def test_commandline_score():
process = subprocess.run(
[
Expand Down

0 comments on commit 59f78c3

Please sign in to comment.