Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Jkumwenda/resource file #1406

Open
wants to merge 8 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -507,7 +507,7 @@ def run():

# Basic arguments required for the simulation
start_date = Date(2010, 1, 1)
end_date = Date(2050, 1, 1)
end_date = Date(2011, 1, 1)
pop_size = 20000

# This creates the Simulation instance for this run. Because we"ve passed the `seed` and
Expand Down
52 changes: 33 additions & 19 deletions src/tlo/dependencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import inspect
import os
import pkgutil
from pathlib import Path
from typing import Any, Callable, Generator, Iterable, Mapping, Optional, Set, Type, Union

import tlo.methods
Expand Down Expand Up @@ -77,6 +78,8 @@ def get_all_required_dependencies(
def topologically_sort_modules(
module_instances: Iterable[Module],
get_dependencies: DependencyGetter = get_init_dependencies,
data_folder: Path = None,
auto_register_modules: bool = False
) -> Generator[Module, None, None]:
"""Generator which yields topological sort of modules based on their dependencies.

Expand All @@ -91,6 +94,9 @@ def topologically_sort_modules(
:param get_dependencies: Function which given a module gets the set of module
dependencies. Defaults to returing the ``Module.INIT_DEPENDENCIES`` class
attribute.
:param data_folder: Resource files folder.
:param auto_register_modules: Whether to register missing modules or not. Any missing
modules will be registered with default values for their initialiser arguments.

:raises ModuleDependencyError: Raised when a module dependency is missing from
``module_instances`` or a module has circular dependencies.
Expand Down Expand Up @@ -120,29 +126,37 @@ def depth_first_search(module):
dependencies = get_dependencies(
module_instance_map[module], module_instance_map.keys()
)

module_class_map = get_module_class_map(set())
for dependency in sorted(dependencies):
if dependency not in module_instance_map:
alternatives_with_instances = [
name for name, instance in module_instance_map.items()
if dependency in instance.ALTERNATIVE_TO
]
if len(alternatives_with_instances) != 1:
message = (
f'Module {module} depends on {dependency} which is '
'missing from modules to register'
)
if len(alternatives_with_instances) == 0:
message += f' as are any alternatives to {dependency}.'
else:
message += (
' and there are multiple alternatives '
f'({alternatives_with_instances}) so which '
'to use to resolve dependency is ambiguous.'
if auto_register_modules:
# add missing dependencies and associated classes in module instance map dictionary
module_instance = module_class_map[dependency](resourcefilepath=data_folder)
module_instance_map[dependency] = module_instance
yield from depth_first_search(dependency)
else:
alternatives_with_instances = [
name for name, instance in module_instance_map.items()
if dependency in instance.ALTERNATIVE_TO
]
if len(alternatives_with_instances) != 1:
message = (
f'Module {module} depends on {dependency} which is '
'missing from modules to register'
)
raise ModuleDependencyError(message)
if len(alternatives_with_instances) == 0:
message += f' as are any alternatives to {dependency}.'
else:
message += (
' and there are multiple alternatives '
f'({alternatives_with_instances}) so which '
'to use to resolve dependency is ambiguous.'
)
raise ModuleDependencyError(message)

else:
yield from depth_first_search(alternatives_with_instances[0])
else:
yield from depth_first_search(alternatives_with_instances[0])

else:
yield from depth_first_search(dependency)
Expand Down
3 changes: 2 additions & 1 deletion src/tlo/methods/enhanced_lifestyle.py
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,8 @@ def __init__(self, name=None, resourcefilepath=None):
def read_parameters(self, data_folder):
p = self.parameters
dataframes = pd.read_excel(
Path(self.resourcefilepath) / 'ResourceFile_Lifestyle_Enhanced.xlsx',
self.sim.read_resources('ResourceFile_Lifestyle_Enhanced.xlsx'),
# simulation.resource_file_path('ResourceFile_Lifestyle_Enhanced.xlsx'),
sheet_name=["parameter_values", "urban_rural_by_district"],
)
self.load_parameters_from_dataframe(dataframes["parameter_values"])
Expand Down
17 changes: 13 additions & 4 deletions src/tlo/simulation.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""The main simulation controller."""

import os
import datetime
import heapq
import itertools
Expand Down Expand Up @@ -44,7 +44,7 @@ class Simulation:
"""

def __init__(self, *, start_date: Date, seed: int = None, log_config: dict = None,
show_progress_bar=False):
show_progress_bar=False, data_folder: Path = None):
"""Create a new simulation.

:param start_date: the date the simulation begins; must be given as
Expand All @@ -53,6 +53,7 @@ def __init__(self, *, start_date: Date, seed: int = None, log_config: dict = Non
:param log_config: sets up the logging configuration for this simulation
:param show_progress_bar: whether to show a progress bar instead of the logger
output during the simulation
:param data_folder: path to resource files folder
"""
# simulation
self.date = self.start_date = start_date
Expand All @@ -63,6 +64,7 @@ def __init__(self, *, start_date: Date, seed: int = None, log_config: dict = Non
self.population: Optional[Population] = None

self.show_progress_bar = show_progress_bar
self.data_folder = data_folder

# logging
if log_config is None:
Expand Down Expand Up @@ -125,7 +127,7 @@ def log_filepath(self):
"""The path to the log file, if one has been set."""
return self._log_filepath

def register(self, *modules, sort_modules=True, check_all_dependencies=True):
def register(self, *modules, sort_modules=True, check_all_dependencies=True, auto_register_modules: bool = False):
"""Register one or more disease modules with the simulation.

:param modules: the disease module(s) to use as part of this simulation.
Expand All @@ -143,9 +145,11 @@ def register(self, *modules, sort_modules=True, check_all_dependencies=True):
``ADDITIONAL_DEPENDENCIES`` attributes) have been included in the set of
modules to be registered. A ``ModuleDependencyError`` exception will
be raised if there are missing dependencies.
:param auto_register_modules: whether to register missing modules or not
"""
if sort_modules:
modules = list(topologically_sort_modules(modules))
modules = list(topologically_sort_modules(modules, data_folder=self.data_folder,
auto_register_modules=auto_register_modules))
if check_all_dependencies:
check_dependencies_present(modules)
# Iterate over modules and per-module seed sequences spawned from simulation
Expand Down Expand Up @@ -308,6 +312,11 @@ def find_events_for_person(self, person_id: int):

return person_events

def read_resources(self, *args):
"""The path to the resourcefile"""
self.resourcefilepath = Path('./resources')
lastfile = os.path.join(self.resourcefilepath, str(*args))
return lastfile

class EventQueue:
"""A simple priority queue for events.
Expand Down
80 changes: 52 additions & 28 deletions tests/test_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,9 @@
summarize,
unflatten_flattened_multi_index_in_logging,
)
from tlo.dependencies import ModuleDependencyError
from tlo.events import PopulationScopeEventMixin, RegularEvent
from tlo.methods import demography
from tlo.methods import copd, demography
from tlo.methods.fullmodel import fullmodel
from tlo.methods.scenario_switcher import ImprovedHealthSystemAndCareSeekingScenarioSwitcher

Expand Down Expand Up @@ -88,7 +89,7 @@ def initialise_simulation(self, sim: Simulation):

# At INFO level
assert (
len(output["tlo.methods.dummy"]["_metadata"]["tlo.methods.dummy"]) == 2
len(output["tlo.methods.dummy"]["_metadata"]["tlo.methods.dummy"]) == 2
) # should have two tables

# tables should be at level INFO
Expand Down Expand Up @@ -148,7 +149,7 @@ def initialise_simulation(self, sim):
sim = Simulation(
start_date=sim_start_date,
seed=0,
log_config={"filename": "temp", "directory": tmpdir,},
log_config={"filename": "temp", "directory": tmpdir, },
)
sim.register(
demography.Demography(resourcefilepath=resourcefilepath), DummyModule()
Expand Down Expand Up @@ -368,7 +369,7 @@ def test_get_parameter_functions(seed):

# Check that the parameter identified exists in the simulation
assert (
name in sim.modules[module].parameters
name in sim.modules[module].parameters
), f"Parameter not recognised: {module}:{name}."

# Check that the original value and the updated value are of the same type.
Expand Down Expand Up @@ -412,9 +413,9 @@ def is_list_same_size_and_dtype(l1, l2):
def test_mix_scenarios():
"""Check that `mix_scenarios` works as expected."""

d1 = {"Mod1": {"param_a": "value_in_d1", "param_b": "value_in_d1",}}
d1 = {"Mod1": {"param_a": "value_in_d1", "param_b": "value_in_d1", }}

d2 = {"Mod2": {"param_a": "value_in_d2", "param_b": "value_in_d2",}}
d2 = {"Mod2": {"param_a": "value_in_d2", "param_b": "value_in_d2", }}

d3 = {"Mod1": {"param_b": "value_in_d3", "param_c": "value_in_d3"}}

Expand All @@ -433,8 +434,8 @@ def test_mix_scenarios():

assert 1 == len(record)
assert (
record.list[0].message.args[0]
== "Parameter is being updated more than once: module=Mod1, parameter=param_b"
record.list[0].message.args[0]
== "Parameter is being updated more than once: module=Mod1, parameter=param_b"
)

# Test the behaviour of the `mix_scenarios` taking the value in the right-most dict.
Expand All @@ -460,15 +461,15 @@ def test_mix_scenarios():
"param_c": "value_in_dict3",
}
},
{"Mod1": {"param_a": "value_in_dict_right_most", "param_c": "value_in_dict4",}},
{"Mod1": {"param_c": "value_in_dict_right_most",}},
{"Mod1": {"param_a": "value_in_dict_right_most", "param_c": "value_in_dict4", }},
{"Mod1": {"param_c": "value_in_dict_right_most", }},
) == {
"Mod1": {
"param_a": "value_in_dict_right_most",
"param_b": "value_in_dict_right_most",
"param_c": "value_in_dict_right_most",
}
}
"Mod1": {
"param_a": "value_in_dict_right_most",
"param_b": "value_in_dict_right_most",
"param_c": "value_in_dict_right_most",
}
}


def test_improved_healthsystem_and_care_seeking_scenario_switcher(seed):
Expand Down Expand Up @@ -538,24 +539,23 @@ def check_parameters(self) -> None:
hcs = sim.modules["HealthSeekingBehaviour"].force_any_symptom_to_lead_to_healthcareseeking
assert isinstance(hcs, bool) and (hcs is max_healthcare_seeking[phase_of_simulation])


sim = Simulation(start_date=Date(2010, 1, 1), seed=seed)
sim.register(
*(
fullmodel(resourcefilepath=resourcefilepath)
+ [
ImprovedHealthSystemAndCareSeekingScenarioSwitcher(
resourcefilepath=resourcefilepath
),
DummyModule(),
]
fullmodel(resourcefilepath=resourcefilepath)
+ [
ImprovedHealthSystemAndCareSeekingScenarioSwitcher(
resourcefilepath=resourcefilepath
),
DummyModule(),
]
)
)

# Check that the `ImprovedHealthSystemAndCareSeekingScenarioSwitcher` is the first registered module.
assert (
"ImprovedHealthSystemAndCareSeekingScenarioSwitcher"
== list(sim.modules.keys())[0]
"ImprovedHealthSystemAndCareSeekingScenarioSwitcher"
== list(sim.modules.keys())[0]
)
module = sim.modules["ImprovedHealthSystemAndCareSeekingScenarioSwitcher"]

Expand Down Expand Up @@ -586,7 +586,7 @@ def test_summarize():
names=("draw", "run"),
),
index=["TimePoint0", "TimePoint1"],
data=np.array([[0, 20, 1000, 2000], [0, 20, 1000, 2000],]),
data=np.array([[0, 20, 1000, 2000], [0, 20, 1000, 2000], ]),
)

results_one_draw = pd.DataFrame(
Expand Down Expand Up @@ -637,7 +637,31 @@ def test_summarize():
pd.DataFrame(
columns=pd.Index(["lower", "mean", "upper"], name="stat"),
index=["TimePoint0", "TimePoint1"],
data=np.array([[0.5, 10.0, 19.5], [0.5, 10.0, 19.5],]),
data=np.array([[0.5, 10.0, 19.5], [0.5, 10.0, 19.5], ]),
),
summarize(results_one_draw, collapse_columns=True),
)


def test_auto_register_modules(tmpdir):
""" check module dependencies can be registered automatically """
start_date = Date(2010, 1, 1)
# configure logging
log_config = {
"filename": "LogFile",
"directory": tmpdir,
}
sim = Simulation(start_date=start_date, seed=0, log_config=log_config, data_folder=resourcefilepath)
try:
# try executing the code in this block and go to except block if module dependency error exception is fired

# register modules without their associated dependencies
sim.register(demography.Demography(resourcefilepath=resourcefilepath),
copd.Copd(resourcefilepath=resourcefilepath),
auto_register_modules=True)

except ModuleDependencyError as exception:
# if auto register modules argument is false, there should be a module dependency error exception fired
assert exception
assert exception.__class__ == ModuleDependencyError

2 changes: 1 addition & 1 deletion tests/test_enhanced_lifestyle.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def test_properties_and_dtypes(simulation):
df = simulation.population.props
orig = simulation.population.new_row
assert (df.dtypes == orig.dtypes).all()

print(f'Function Output {simulation.resource_file_path()}')

def test_assign_rural_urban_by_district(simulation):
""" test linear model integrity in assigning individual rural urban status based on their districts """
Expand Down