Skip to content

Commit

Permalink
260 remove v03 code and tests from v04 beta branch (#272)
Browse files Browse the repository at this point in the history
* clean up old v0.3 tests and data

* removes use of eval as a variable name in favor if ev
  • Loading branch information
mgdenno authored Oct 8, 2024
1 parent 134ba66 commit 3fe6273
Show file tree
Hide file tree
Showing 154 changed files with 224 additions and 21,838 deletions.
34 changes: 17 additions & 17 deletions src/teehr/evaluation/fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,34 +44,34 @@
class Fetch:
"""Component class for fetching data from external sources."""

def __init__(self, eval) -> None:
def __init__(self, ev) -> None:
"""Initialize the Fetch class."""
# Now we have access to the Evaluation object.
self.eval = eval
self.ev = ev
self.usgs_cache_dir = Path(
eval.cache_dir,
ev.cache_dir,
const.FETCHING_CACHE_DIR,
const.USGS_CACHE_DIR,
)
self.nwm_cache_dir = Path(
eval.cache_dir,
ev.cache_dir,
const.FETCHING_CACHE_DIR,
const.NWM_CACHE_DIR
)
self.kerchunk_cache_dir = Path(
eval.cache_dir,
ev.cache_dir,
const.FETCHING_CACHE_DIR,
const.KERCHUNK_DIR
)
self.weights_cache_dir = Path(
eval.cache_dir,
ev.cache_dir,
const.FETCHING_CACHE_DIR,
const.WEIGHTS_DIR
)

def _get_secondary_location_ids(self, prefix: str) -> List[str]:
"""Get the secondary location IDs corresponding to primary IDs."""
lcw_df = self.eval.location_crosswalks.query(
lcw_df = self.ev.location_crosswalks.query(
filters={
"column": "secondary_location_id",
"operator": "like",
Expand Down Expand Up @@ -102,7 +102,7 @@ def usgs_streamflow(
"""Fetch USGS gage data and save as a Parquet file."""
logger.info("Getting primary location IDs.")
if sites is None:
locations_gdf = self.eval.locations.query(
locations_gdf = self.ev.locations.query(
filters={
"column": "id",
"operator": "like",
Expand Down Expand Up @@ -132,7 +132,7 @@ def usgs_streamflow(
)

validate_and_insert_timeseries(
ev=self.eval,
ev=self.ev,
in_path=Path(
self.usgs_cache_dir
),
Expand Down Expand Up @@ -181,11 +181,11 @@ def nwm_retrospective_points(
)

validate_and_insert_timeseries(
ev=self.eval,
ev=self.ev,
in_path=Path(
self.nwm_cache_dir
),
# dataset_path=self.eval.dataset_dir,
# dataset_path=self.ev.dataset_dir,
timeseries_type=timeseries_type,
)

Expand Down Expand Up @@ -227,11 +227,11 @@ def nwm_retrospective_grids(
)

validate_and_insert_timeseries(
ev=self.eval,
ev=self.ev,
in_path=Path(
self.nwm_cache_dir
),
# dataset_path=self.eval.dataset_dir,
# dataset_path=self.ev.dataset_dir,
timeseries_type=timeseries_type,
)

Expand Down Expand Up @@ -289,11 +289,11 @@ def nwm_forecast_points(
)

validate_and_insert_timeseries(
ev=self.eval,
ev=self.ev,
in_path=Path(
self.nwm_cache_dir
),
# dataset_path=self.eval.dataset_dir,
# dataset_path=self.ev.dataset_dir,
timeseries_type=timeseries_type,
)

Expand Down Expand Up @@ -350,10 +350,10 @@ def nwm_forecast_grids(
pass

validate_and_insert_timeseries(
ev=self.eval,
ev=self.ev,
in_path=Path(
self.nwm_cache_dir
),
# dataset_path=self.eval.dataset_dir,
# dataset_path=self.ev.dataset_dir,
timeseries_type=timeseries_type,
)
26 changes: 13 additions & 13 deletions src/teehr/evaluation/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,11 +39,11 @@
class Load:
"""Component class for loading data into the dataset."""

def __init__(self, eval) -> None:
def __init__(self, ev) -> None:
"""Initialize the Load class."""
self.eval = eval
self.cache_dir = eval.cache_dir
self.dataset_dir = eval.dataset_dir
self.ev = ev
self.cache_dir = ev.cache_dir
self.dataset_dir = ev.dataset_dir
self.locations_cache_dir = Path(
self.cache_dir,
const.LOADING_CACHE_DIR,
Expand Down Expand Up @@ -89,7 +89,7 @@ def add_configuration(
>>> type="primary",
>>> description="USGS observations",
>>> )
>>> eval.load.add_configuration(configuration)
>>> ev.load.add_configuration(configuration)
"""
add_configuration(self.dataset_dir, configuration)
Expand All @@ -112,7 +112,7 @@ def add_unit(
>>> name="m^3/s",
>>> long_name="Cubic meters per second"
>>> )
>>> eval.load.add_unit(unit)
>>> ev.load.add_unit(unit)
"""
add_unit(self.dataset_dir, unit)

Expand All @@ -134,7 +134,7 @@ def add_variable(
>>> name="streamflow_hourly_inst",
>>> long_name="Instantaneous streamflow"
>>> )
>>> eval.load.add_variable(variable)
>>> ev.load.add_variable(variable)
"""
add_variable(self.dataset_dir, variable)

Expand All @@ -157,7 +157,7 @@ def add_attribute(
>>> type="continuous",
>>> description="Drainage area in square kilometers"
>>> )
>>> eval.load.add_attribute(attribute)
>>> ev.load.add_attribute(attribute)
"""
add_attribute(self.dataset_dir, attribute)

Expand Down Expand Up @@ -200,7 +200,7 @@ def import_locations(
**kwargs
)
validate_and_insert_locations(
ev=self.eval,
ev=self.ev,
in_path=self.locations_cache_dir
# self.dataset_dir
)
Expand Down Expand Up @@ -241,7 +241,7 @@ def import_location_crosswalks(
**kwargs
)
validate_and_insert_location_crosswalks(
self.eval,
self.ev,
self.crosswalk_cache_dir,
)

Expand Down Expand Up @@ -282,7 +282,7 @@ def import_location_attributes(
**kwargs
)
validate_and_insert_location_attributes(
self.eval,
self.ev,
self.attributes_cache_dir,
)

Expand Down Expand Up @@ -339,7 +339,7 @@ def import_secondary_timeseries(
pattern = pattern.replace(".csv", ".parquet")

validate_and_insert_timeseries(
ev=self.eval,
ev=self.ev,
in_path=self.secondary_cache_dir,
# dataset_path=self.dataset_dir,
timeseries_type="secondary",
Expand Down Expand Up @@ -399,7 +399,7 @@ def import_primary_timeseries(
pattern = pattern.replace(".csv", ".parquet")

validate_and_insert_timeseries(
ev=self.eval,
ev=self.ev,
# dataset_path=self.dataset_dir,
in_path=self.primary_cache_dir,
timeseries_type="primary",
Expand Down
10 changes: 5 additions & 5 deletions src/teehr/evaluation/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,12 @@
class Metrics:
"""Component class for calculating metrics."""

def __init__(self, eval) -> None:
def __init__(self, ev) -> None:
"""Initialize the Metrics class."""
self.spark = eval.spark
self.dataset_dir = eval.dataset_dir
self.locations = eval.locations
self.joined_timeseries = eval.joined_timeseries
self.spark = ev.spark
self.dataset_dir = ev.dataset_dir
self.locations = ev.locations
self.joined_timeseries = ev.joined_timeseries
self.df = self.joined_timeseries.to_sdf()

def query(
Expand Down
Loading

0 comments on commit 3fe6273

Please sign in to comment.