Skip to content

Commit

Permalink
Merge pull request #122 from cmutel/platformdirs
Browse files Browse the repository at this point in the history
Refactor user data directories with `platformdirs`
  • Loading branch information
romainsacchi committed Oct 10, 2023
2 parents b0691d8 + c355b28 commit 3933a13
Show file tree
Hide file tree
Showing 29 changed files with 218 additions and 154 deletions.
23 changes: 12 additions & 11 deletions conda/meta.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,23 +18,24 @@ requirements:
- python
- setuptools
run:
- bottleneck
- bw2data
- bw2io >=0.8.10
- constructive_geometries>=0.8.2
- cryptography
- datapackage
- numpy
- pandas
- bw2io >=0.8.10
- bw2data
- wurst
- xarray
- platformdirs
- prettytable
- pyarrow
- pycountry
- cryptography
- pyYaml
- sparse>=0.14.0
- schema
- datapackage
- requests
- bottleneck
- constructive_geometries>=0.8.2
- pyarrow
- schema
- sparse>=0.14.0
- wurst
- xarray

test:
imports:
Expand Down
5 changes: 0 additions & 5 deletions premise/__init__.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,6 @@
__all__ = ("NewDatabase", "clear_cache", "get_regions_definition")
__version__ = (1, 7, 6)

from pathlib import Path

DATA_DIR = Path(__file__).resolve().parent / "data"
INVENTORY_DIR = Path(__file__).resolve().parent / "data" / "additional_inventories"
VARIABLES_DIR = Path(__file__).resolve().parent / "iam_variables_mapping"

from .ecoinvent_modification import NewDatabase
from .utils import clear_cache, get_regions_definition
3 changes: 1 addition & 2 deletions premise/activity_maps.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,14 @@
mapping between ``premise`` and ``ecoinvent`` terminology.
"""

import sys
from collections import defaultdict
from pathlib import Path
from typing import List, Union

import yaml
from wurst import searching as ws

from . import DATA_DIR, VARIABLES_DIR
from .filesystem_constants import DATA_DIR, VARIABLES_DIR

POWERPLANT_TECHS = VARIABLES_DIR / "electricity_variables.yaml"
FUELS_TECHS = VARIABLES_DIR / "fuels_variables.yaml"
Expand Down
5 changes: 0 additions & 5 deletions premise/cement.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,15 +8,10 @@
"""

import logging.config
from collections import defaultdict
from pathlib import Path

import yaml

from .logger import create_logger
from .transformation import BaseTransformation, Dict, IAMDataCollection, List, np, ws
from .utils import DATA_DIR

logger = create_logger("cement")

Expand Down
2 changes: 1 addition & 1 deletion premise/clean_datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@
from bw2data.database import DatabaseChooser
from wurst import searching as ws

from . import DATA_DIR
from .data_collection import get_delimiter
from .filesystem_constants import DATA_DIR


def remove_uncertainty(database):
Expand Down
8 changes: 3 additions & 5 deletions premise/data_collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
import yaml
from cryptography.fernet import Fernet

from . import DATA_DIR, VARIABLES_DIR
from .filesystem_constants import DATA_DIR, IAM_OUTPUT_DIR, VARIABLES_DIR
from .marginal_mixes import consequential_method

IAM_ELEC_VARS = VARIABLES_DIR / "electricity_variables.yaml"
Expand All @@ -30,11 +30,9 @@
IAM_STEEL_VARS = VARIABLES_DIR / "steel_variables.yaml"
IAM_DAC_VARS = VARIABLES_DIR / "direct_air_capture_variables.yaml"
IAM_OTHER_VARS = VARIABLES_DIR / "other_variables.yaml"
FILEPATH_FLEET_COMP = (
DATA_DIR / "iam_output_files" / "fleet_files" / "fleet_all_vehicles.csv"
)
FILEPATH_FLEET_COMP = IAM_OUTPUT_DIR / "fleet_files" / "fleet_all_vehicles.csv"
FILEPATH_IMAGE_TRUCKS_FLEET_COMP = (
DATA_DIR / "iam_output_files" / "fleet_files" / "image_fleet_trucks.csv"
IAM_OUTPUT_DIR / "fleet_files" / "image_fleet_trucks.csv"
)
VEHICLES_MAP = DATA_DIR / "transport" / "vehicles_map.yaml"
IAM_CARBON_CAPTURE_VARS = VARIABLES_DIR / "carbon_capture_variables.yaml"
Expand Down
13 changes: 4 additions & 9 deletions premise/direct_air_capture.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,20 +3,13 @@
"""

import copy
import logging.config
from pathlib import Path

import numpy as np
import wurst
import yaml

from .filesystem_constants import DATA_DIR
from .logger import create_logger
from .utils import DATA_DIR

logger = create_logger("dac")


import numpy as np

from .transformation import (
BaseTransformation,
IAMDataCollection,
Expand All @@ -26,6 +19,8 @@
ws,
)

logger = create_logger("dac")

HEAT_SOURCES = DATA_DIR / "fuels" / "heat_sources_map.yml"


Expand Down
18 changes: 7 additions & 11 deletions premise/ecoinvent_modification.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
import datapackage
import yaml

from . import DATA_DIR, INVENTORY_DIR, __version__
from . import __version__
from .cement import _update_cement
from .clean_datasets import DatabaseCleaner
from .data_collection import IAMDataCollection
Expand All @@ -34,6 +34,7 @@
)
from .external import ExternalScenario
from .external_data_validation import check_external_scenarios, check_inventories
from .filesystem_constants import DATA_DIR, DIR_CACHED_DB, IAM_OUTPUT_DIR, INVENTORY_DIR
from .fuels import _update_fuels
from .inventory_imports import AdditionalInventory, DefaultInventory
from .report import generate_change_report, generate_summary_report
Expand All @@ -51,8 +52,6 @@
write_brightway2_database,
)

DIR_CACHED_DB = DATA_DIR / "cache"

FILEPATH_OIL_GAS_INVENTORIES = INVENTORY_DIR / "lci-ESU-oil-and-gas.xlsx"
FILEPATH_CARMA_INVENTORIES = INVENTORY_DIR / "lci-Carma-CCS.xlsx"
FILEPATH_CO_FIRING_INVENTORIES = INVENTORY_DIR / "lci-co-firing-power-plants.xlsx"
Expand Down Expand Up @@ -344,7 +343,8 @@ def check_scenarios(scenario: dict, key: bytes) -> dict:
filepath = scenario["filepath"]
scenario["filepath"] = check_filepath(filepath)
else:
scenario["filepath"] = DATA_DIR / "iam_output_files"
# Note: A directory path, not a file path
scenario["filepath"] = IAM_OUTPUT_DIR
if key is None:
raise ValueError(
"You need to provide the encryption key to decrypt the IAM output files provided by `premise`."
Expand Down Expand Up @@ -633,13 +633,11 @@ def __find_cached_db(self, db_name: str, keep_uncertainty_data: bool) -> List[di
:param db_name: database name
:return: database
"""
# check that directory exists, otherwise create it
Path(DIR_CACHED_DB).mkdir(parents=True, exist_ok=True)
# build file path
if db_name is None and self.source_type == "ecospold":
db_name = f"ecospold_{self.system_model}_{self.version}"

file_name = Path(
file_name = (
DIR_CACHED_DB
/ f"cached_{''.join(tuple(map( str , __version__ )))}_{db_name.strip().lower()}.pickle"
)
Expand All @@ -663,13 +661,11 @@ def __find_cached_inventories(self, db_name: str) -> Union[None, List[dict]]:
:param db_name: database name
:return: database
"""
# check that directory exists, otherwise create it
Path(DIR_CACHED_DB).mkdir(parents=True, exist_ok=True)
# build file path
if db_name is None and self.source_type == "ecospold":
db_name = f"ecospold_{self.system_model}_{self.version}"

file_name = Path(
file_name = (
DIR_CACHED_DB
/ f"cached_{''.join(tuple(map( str , __version__ )))}_{db_name.strip().lower()}_inventories.pickle"
)
Expand Down Expand Up @@ -1602,7 +1598,7 @@ def write_datapackage(self, name: str = f"datapackage_{date.today()}"):
cached_inventories = self.__find_cached_inventories(self.source)

if not cached_inventories:
cache_fp = DATA_DIR / "cache" / f"cached_{self.source}_inventories.pickle"
cache_fp = DIR_CACHED_DB / f"cached_{self.source}_inventories.pickle"
raise ValueError(f"No cached inventories found at {cache_fp}.")

cache = {}
Expand Down
5 changes: 2 additions & 3 deletions premise/electricity.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,13 @@
import re
from collections import defaultdict
from functools import lru_cache
from pprint import pprint

import wurst
import yaml

from . import VARIABLES_DIR
from .data_collection import get_delimiter
from .export import biosphere_flows_dictionary
from .filesystem_constants import DATA_DIR, VARIABLES_DIR
from .logger import create_logger
from .transformation import (
BaseTransformation,
Expand All @@ -35,7 +34,7 @@
uuid,
ws,
)
from .utils import DATA_DIR, eidb_label, get_efficiency_solar_photovoltaics
from .utils import eidb_label, get_efficiency_solar_photovoltaics

LOSS_PER_COUNTRY = DATA_DIR / "electricity" / "losses_per_country.csv"
IAM_BIOMASS_VARS = VARIABLES_DIR / "biomass_variables.yaml"
Expand Down
4 changes: 1 addition & 3 deletions premise/emissions.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,7 @@
from GAINS.
"""

import logging.config
from functools import lru_cache
from pathlib import Path
from typing import Union

import numpy as np
Expand All @@ -14,6 +12,7 @@
import yaml
from numpy import ndarray

from .filesystem_constants import DATA_DIR
from .logger import create_logger
from .transformation import (
BaseTransformation,
Expand All @@ -24,7 +23,6 @@
Set,
ws,
)
from .utils import DATA_DIR

logger = create_logger("emissions")

Expand Down
6 changes: 3 additions & 3 deletions premise/export.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,9 @@
import uuid
from collections import defaultdict
from functools import lru_cache
from multiprocessing import Pool as ProcessPool
from multiprocessing.pool import ThreadPool as Pool
from pathlib import Path
from typing import Any, Dict, List, Set, Tuple, Union
from typing import Any, Dict, List

import numpy as np
import pandas as pd
Expand All @@ -24,8 +23,9 @@
from pandas import DataFrame
from scipy import sparse as nsp

from . import DATA_DIR, __version__
from . import __version__
from .data_collection import get_delimiter
from .filesystem_constants import DATA_DIR
from .inventory_imports import get_correspondence_bio_flows
from .transformation import BaseTransformation
from .utils import check_database_name
Expand Down
21 changes: 15 additions & 6 deletions premise/external.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,23 @@
"""
Implements external scenario data.
"""
import logging
import uuid
from collections import defaultdict
from pathlib import Path
from typing import List, Union


import numpy as np
import wurst
from numpy import ndarray
import xarray as xr
import yaml
from wurst import searching as ws

from .clean_datasets import get_biosphere_flow_uuid
from .data_collection import IAMDataCollection
from .filesystem_constants import DATA_DIR
from .inventory_imports import generate_migration_maps, get_correspondence_bio_flows
from .transformation import *
from .transformation import BaseTransformation, get_shares_from_production_volume
from .utils import eidb_label

LOG_CONFIG = DATA_DIR / "utils" / "logging" / "logconfig.yaml"
Expand Down Expand Up @@ -679,14 +688,14 @@ def check_existence_of_market_suppliers(self):

def fetch_supply_share(
self, i: int, region: str, var: str, variables: list
) -> ndarray:
) -> np.ndarray:
"""
Return the supply share of a given variable in a given region.
:param i: index of the scenario
:param region: region
:param var: variable
:param variables: list of all variables
:return: ndarray
:return: np.ndarray
"""

return np.clip(
Expand Down Expand Up @@ -858,7 +867,7 @@ def adjust_efficiency_of_new_markets(
ineff["variable"], region, eff_data, self.year
)

if not "includes" in ineff:
if "includes" not in ineff:
wurst.change_exchanges_by_constant_factor(datatset, scaling_factor)

else:
Expand Down
4 changes: 0 additions & 4 deletions premise/external_data_validation.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,6 @@
"""
Validates datapackages that contain external scenario data.
"""

import sys
from pprint import pprint

import numpy as np
import pandas as pd
import yaml
Expand Down
20 changes: 20 additions & 0 deletions premise/filesystem_constants.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
from pathlib import Path

import platformdirs

# Directories for data which comes with Premise
DATA_DIR = Path(__file__).resolve().parent / "data"
INVENTORY_DIR = DATA_DIR / "additional_inventories"
# Todo: Should this be in the data directory?
VARIABLES_DIR = Path(__file__).resolve().parent / "iam_variables_mapping"
IAM_OUTPUT_DIR = DATA_DIR / "iam_output_files"

# Directories for user-created data
USER_DATA_BASE_DIR = platformdirs.user_data_path(appname="premise", appauthor="pylca")
USER_DATA_BASE_DIR.mkdir(parents=True, exist_ok=True)

DIR_CACHED_DB = USER_DATA_BASE_DIR / "cache"
DIR_CACHED_DB.mkdir(parents=True, exist_ok=True)

USER_LOGS_DIR = platformdirs.user_log_path(appname="premise", appauthor="pylca")
USER_LOGS_DIR.mkdir(parents=True, exist_ok=True)
Loading

0 comments on commit 3933a13

Please sign in to comment.