diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 00000000..9e9d4a6a --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,20 @@ +name: Publish docs +on: + workflow_dispatch: + push: + branches: + - develop + +jobs: + mkdocs: + name: Publish docs + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Deploy docs + uses: mhausenblas/mkdocs-deploy-gh-pages@master + env: + REQUIREMENTS: docs/requirements.txt + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 35fa9f53..eae32eab 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -29,15 +29,4 @@ jobs: TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} run: | python setup.py sdist bdist_wheel - twine upload dist/* - - name: Build Sphinx docs - run: | - cd docs - make html - - name: Deploy - if: success() - uses: peaceiris/actions-gh-pages@v3 - with: - publish_branch: gh-pages - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: _build/html/ + twine upload dist/* \ No newline at end of file diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index d1e42991..0cb056ce 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -33,3 +33,10 @@ jobs: - name: Test with pytest run: | pytest -s -m "not skipci" + - name: Test documentation build + uses: Tiryoh/actions-mkdocs@v0 + with: + mkdocs_version: 'latest' + requirements: 'docs/requirements.txt' + configfile: 'mkdocs.yml' + diff --git a/.gitignore b/.gitignore index 99763f63..52120285 100644 --- a/.gitignore +++ b/.gitignore @@ -72,10 +72,8 @@ coverage.xml *.mo *.pot -# Sphinx documentation -docs/_build/ -docs/_generated/* -docs/_autosummary/* +# mkdocs documentation +site/* # mkdocs documentation site/* diff --git a/.markdownlint.yaml b/.markdownlint.yaml new file mode 100644 index 00000000..0b1d6a3d --- /dev/null +++ b/.markdownlint.yaml @@ -0,0 +1,9 @@ +# includes/excludes all rules by default +default: true + +# 4-space list indentation works best with MkDocs +MD007: + indent: 4 + +# Remove line length limit +MD013: false \ No newline at end of file diff --git a/README.md b/README.md index af7408fa..dee52204 100644 --- a/README.md +++ b/README.md @@ -13,9 +13,8 @@ If you are managing multiple python versions, we suggest using [`virtualenv`](ht The following instructions create and activate a conda environment (recommended) in which you can install: ```bash -conda config --add channels conda-forge -conda create python=3.7 -n -conda activate +conda env create -f environment.yml +conda activate tm2py ``` Basic installation instructions are as follows: @@ -28,9 +27,8 @@ pip install tm2py If you want to install a more up-to-date or development version, you can do so by installing it from the `develop` branch as follows: ```bash -conda config --add channels conda-forge -conda create python=3.7 -n -conda activate +conda env create -f environment.yml +conda activate tm2py pip install git+https://github.com/bayareametro/tm2py@develop ``` @@ -39,17 +37,36 @@ If you are going to be working on Lasso locally, you might want to clone it to y ```bash -conda config --add channels conda-forge -conda create python=3.7 -n -conda activate +conda env create -f environment.yml +conda activate tm2py git clone https://github.com/bayareametro/tm2py cd tm2py pip install -e . ``` +Note that you'll also need to install Emme's python packages into this conda environment. +Following these instructions from an INRO community forum post: In the Emme Desktop application, open Tools->Application Options->Modeller, change your Python path as desired and click the "Install Modeller Package" button. + +If this is successful, the following packages will be visible in your environment when you type `pip list`: +* inro-dynameq +* inro-emme +* inro-emme-agent +* inro-emme-engine +* inro-modeller + +Note that doing the emme package install will also install the package *pywin32*; if *pywin32* gets installed by other means (like +conda or pip), then I got DLL load errors when tryring to import the emme packages, so I recommend uninstalling *pywin32* before +installing the emme packages. ## Basic Usage +Copy and unzip [example_union_test_highway.zip](https://mtcdrive.box.com/s/3entr016e9teq2wt46x1os3fjqylfoge) to a local +drive and from within that directory run: + +```sh +get_test_data +tm2py -s scenario.toml -m model.toml +``` ## Contributing diff --git a/bin/get_test_data b/bin/get_test_data new file mode 100644 index 00000000..a4b29a13 --- /dev/null +++ b/bin/get_test_data @@ -0,0 +1,28 @@ +#!/bin/bash + +# Copies test data to examples directory or a given directory as an input argument +# USAGE: get_test_data + +TEST_DATA_LOCATION=https://mtcdrive.box.com/s/3entr016e9teq2wt46x1os3fjqylfoge +TEST_DATA_NAME="UnionCity" +DEFAULT_DIRECTORY="examples" + +echo "Retreiving $TEST_DATA_NAME data from $TEST_DATA_LOCATION" + +if [ "$#"==0 ] +then + DIR=$(find . -name $DEFAULT_DIRECTORY) + echo "Finding directory:" $DIR + cd $DIR +else + OUTDIR=$1 + [ ! -d $OUTDIR ] && mkdir -p $OUTDIR + cd $OUTDIR + echo "Moved to provided directory:" $OUTDIR +fi + +echo "Writing to $PWD" + +curl $TEST_DATA_LOCATION -L -o test_data.zip +unzip test_data.zip -d $TEST_DATA_NAME +rm test_data.zip /y diff --git a/bin/get_test_data.bat b/bin/get_test_data.bat new file mode 100644 index 00000000..865a39ec --- /dev/null +++ b/bin/get_test_data.bat @@ -0,0 +1,20 @@ +REM Copies test data to examples directory or a given directory as an input argument +REM USAGE: get_test_data.bat Optional +REM ECHO OFF + +SET CWD=%cd% +SET TEST_DATA_LOCATION=https://mtcdrive.box.com/s/3entr016e9teq2wt46x1os3fjqylfoge +REM SET TEST_DATA_NAME=UnionCity +SET DEFAULT_DIRECTORY=examples + +ECHO "Retreiving %TEST_DATA_NAME% data from %TEST_DATA_LOCATION% + +SET OUTDIR=%1 +if "%OUTDIR%"=="" SET OUTDIR=DEFAULT_DIRECTORY +if not exist %OUTDIR% mkdir %OUTDIR% +CD %OUTDIR% +ECHO "Writing to %CD%" + +curl -i -X GET %TEST_DATA_LOCATION% -L -o test_data.zip + +CD %CWD% \ No newline at end of file diff --git a/bin/tm2py b/bin/tm2py new file mode 100644 index 00000000..8b9aa39f --- /dev/null +++ b/bin/tm2py @@ -0,0 +1,23 @@ +#!/usr/bin/env python + +import argparse + +from tm2py.controller import RunController + +def usage(): + print("tm2py -s scenario.toml -m model.toml") + +def run(): + parser = argparse.ArgumentParser(description="Main: run MTC TM2PY") + + parser.add_argument( + "-s", "--scenario", required=True, help=r"Scenario config file path" + ) + parser.add_argument("-m", "--model", required=True, help=r"Model config file path") + + args = parser.parse_args() + controller = RunController([args.scenario, args.model]) + controller.run() + +if __name__ == "__main__": + run() \ No newline at end of file diff --git a/bin/update_docs b/bin/update_docs new file mode 100644 index 00000000..f81ce8eb --- /dev/null +++ b/bin/update_docs @@ -0,0 +1,35 @@ +#!/usr/bin/env python +import logging +import os + +import tm2py + +# High-level settings +MODULE_CLASS_DOC_LIST = [ + ("classes_components.md",[("## Components", tm2py.components, 1)]), + ("classes_basic.md", [("## Basic", tm2py, 1)]), + ("classes_config.md", [("## Config", tm2py.config, 1)]), + ("classes_emme.md",[("## Emme", tm2py.emme, 3) ], + ), +] + +# Basic setup +logger = logging.getLogger() +logger.setLevel(logging.INFO) + +base_dir = os.path.dirname(os.path.dirname(__file__)) +docs_dir = os.path.join(base_dir,"docs") +logger.info(f"Using docs directory:\n {docs_dir}") + +# Update class diagrams (currently using defaults) + +logger.info("Updating class diagrams") + +from tm2py.utils import doc_modules + +for _class_diagram_md,_module_list in MODULE_CLASS_DOC_LIST: + class_diagram_str = doc_modules.generate_md_class_diagram(_module_list) + class_diagram_outfile = os.path.join(docs_dir,"includes","class_diagrams",_class_diagram_md) + with open(class_diagram_outfile,'w') as f: + f.write(class_diagram_str) + logger.info(f"Updated class diagrams in:\n{class_diagram_outfile}") \ No newline at end of file diff --git a/configs/version/model_config_v0_0_0.toml b/configs/version/model_config_v0_0_0.toml new file mode 100644 index 00000000..d0f86fad --- /dev/null +++ b/configs/version/model_config_v0_0_0.toml @@ -0,0 +1,450 @@ +#################################### +# MODEL CONFIGURATION # +#################################### + +version = 0.0.0 + +[dir] +skims = "skims" +assignment = "assign" +demand = "demand" + +[model_process] +initialize_components = [ + "prepare_network", + "air_passenger_demand", + "active_mode_skim", + "highway_assignment", + "transit_assignment", +] + +global_iteration_components = [ + "resident_demand", + "internal_external_demand", + "truck_demand", + "average_demand", + "highway_assignment", + "transit_assignment", +] + + +#################################### +# TIME PERIODS # +#################################### + +[[time_periods]] +name = "early am" +short_name = "ea" +duration_hours = 3 +peaking_factor = 1 + +[[time_periods]] +name = "am peak" +short_name = "am" +duration_hours = 4 +peaking_factor = 1.1 + +[[time_periods]] +name = "midday" +short_name = "md" +duration_hours = 5 +peaking_factor = 1 + +[[time_periods]] +name = "pm peak" +short_name = "pm" +duration_hours = 4 +peaking_factor = 1.1 + +[[time_periods]] +name = "evening" +short_name = "ev" +duration_hours = 8 +peaking_factor = 1 + + +[highway] + +[highway.assignment] +type = "SOLA_TRAFFIC_ASSIGNMENT" + +[highway.assignment.stopping_criteria] +relative_gap = 0.0005 +best_relative_gap: 0.0, +max_iterations = 30 +"normalized_gap": 0.0, + +[highway.assignment.background_traffic] +"link_component": "ul1", +"turn_component": None, +"add_transit_vehicles": False, + +# TODO document what this is +tollbooth_start_index = 11 + + +#################################### +# HIGHWAY CLASSES # +#################################### + +[[highway.classes]] +name = "drive alone" +short_name = "da" +code = "d" +excluded_links = [ "is_toll_da", "is_sr2",] +value_of_time = 18.93 +operating_cost = 17.23 +link_price = "@bridgetoll_da" +skims = [ "time", "dist", "freeflowtime", "bridgetoll_da"] + +[[highway.classes]] +name = "shared ride 2" +short_name = "sr2" +code = "e" +excluded_links = [ "is_toll_sr2", "is_sr3",] +value_of_time = 18.93 +operating_cost = 17.23 +link_price = "@bridgetoll_sr2" +skims = [ "time", "dist", "freeflowtime", "bridgetoll_sr2", "hovdist"] + +[[highway.classes]] +name = "shared ride 3+" +short_name = "sr3" +code = "f" +excluded_links = [ "is_toll_sr3",] +value_of_time = 18.93 +operating_cost = 17.23 +link_price = "@bridgetoll_sr3" +skims = [ "time", "dist", "freeflowtime", "bridgetoll_sr3", "hovdist",] + +[[highway.classes]] +name = "truck" +short_name = "trk" +code = "t" +excluded_links = [ "is_toll_trk", "is_sr2",] +value_of_time = 37.87 +operating_cost = 31.28 +link_price = "@bridgetoll_sml" +skims = [ "time", "dist", "freeflowtime", "bridgetoll_vsm", "bridgetoll_sml", "bridgetoll_med",] + +[[highway.classes]] +name = "large truck" +short_name = "lrgtrk" +code = "l" +excluded_links = [ "is_toll_lrgtrk", "is_auto_only",] +value_of_time = 37.87 +operating_cost = 31.28 +link_price = "@bridgetoll_lrg" +skims = [ "time", "dist", "freeflowtime", "bridgetoll_lrg",] + +[[highway.classes]] +short_name = "datoll" +name = "drive alone toll" +code = "D" +excluded_links = [ "is_sr2",] +value_of_time = 18.93 +operating_cost = 17.23 +link_price = "@toll_da" +skims = [ "time", "dist", "freeflowtime", "bridgetoll_da", "valuetoll_da", "tolldist",] + +[[highway.classes]] +name = "shared ride 2 toll" +short_name = "sr2toll" +code = "E" +excluded_links = [ "is_sr3",] +value_of_time = 18.93 +operating_cost = 17.23 +link_price = "@toll_sr2" +price_factor = 0.57 +skims = [ "time", "dist", "freeflowtime", "bridgetoll_sr2", "valuetoll_sr2", "hovdist", "tolldist",] + +[[highway.classes]] +name = "shared ride 3+ toll" +short_name = "sr3toll" +code = "F" +excluded_links = [] +value_of_time = 18.93 +operating_cost = 17.23 +link_price = "@toll_sr3" +price_factor = 0.4 +skims = [ "time", "dist", "freeflowtime", "bridgetoll_sr3", "valuetoll_sr3", "hovdist", "tolldist",] + +[[highway.classes]] +name = "truck toll" +short_name = "trktoll" +code = "T" +excluded_links = [ "is_sr2",] +value_of_time = 37.87 +operating_cost = 31.28 +link_price = "@toll_sml" +skims = [ "time", "dist", "freeflowtime", "bridgetoll_vsm", "bridgetoll_sml", "bridgetoll_med", "valuetoll_vsm", "valuetoll_sml", "valuetoll_med",] + +[[highway.classes]] +name = "large truck toll" +short_name = "lrgtrktoll" +code = "L" +excluded_links = [ "is_auto_only", "is_sr2",] +value_of_time = 37.87 +operating_cost = 31.28 +toll = "@toll_lrg" +skims = [ "time", "dist", "freeflowtime", "bridgetoll_lrg", "valuetoll_lrg",] + + +#################################### +# HIGHWAY DEMAND # +#################################### + + +###### HOUSEHOLD ###### + +[[highway.demand]] +name = "drive alone" +file = ["household","TAZ_Demand_{time_period}.omx"] +matrix = "SOV_GP_{time_period}" +highway_class = "drive alone" +average_occupancy = 1.0 + +[[highway.demand]] +name = "shared ride 2 general purpose lanes" +file = ["household","TAZ_Demand_{time_period}.omx"] +matrix = "SR2_GP_{time_period}" +highway_class = +average_occupancy = 1.75 + +[[highway.demand]] +name = "shared ride 2 HOV lanes" +file = ["household","TAZ_Demand_{time_period}.omx"] +matrix = "SR2_HOV_{time_period}" +highway_class = +average_occupancy = 1.75 + +[[highway.demand]] +name = "shared ride 2 Toll Paying" +file = ["household","TAZ_Demand_{time_period}.omx"] +matrix = "SR2_PAY_{period}" +highway_class = +average_occupancy = 1.75 + +[[highway.demand]] +name = "shared ride 3 general HOV lanes" +file = ["household","TAZ_Demand_{time_period}.omx"] +matrix = "SR3_HOV_{period}" +highway_class = +average_occupancy = 2.5 + +[[highway.demand]] +name = "shared ride 3 general purpose lanes" +file = ["household","TAZ_Demand_{time_period}.omx"] +matrix = "SR3_GP_{period}" +highway_class = +average_occupancy = 2.5 + +[[highway.demand]] +name = "shared ride 3 toll-paying" +file = ["household","TAZ_Demand_{time_period}.omx"] +matrix = "SR3_PAY_{period}" +highway_class = +average_occupancy = 2.5 + +###### AIR PAX ###### + +[[highway.demand]] +name = "air passenger drive alone" +file = ["air_passenger","tripsAirPax{period}.omx"] +matrix = "DA" +highway_class = "drive alone" +average_occupancy = 1.0 + +[[highway.demand]] +name = "air passenger shared ride 2" +file = ["air_passenger","tripsAirPax{period}.omx"] +matrix = "SR2" +highway_class = +average_occupancy = 1.75 + +[[highway.demand]] +name = "air passenger shared ride 2 toll-paying" +file = ["air_passenger","tripsAirPax{period}.omx"] +matrix = "SR2TOLL" +highway_class = +average_occupancy = 1.75 + +[[highway.demand]] +name = "air passenger shared ride 3+" +file = ["air_passenger","tripsAirPax{period}.omx"] +matrix = "SR3" +highway_class = +average_occupancy = 2.5 + +[[highway.demand]] +name = name = "air passenger shared ride 3+ toll-paying" +file = ["air_passenger","tripsAirPax{period}.omx"] +mattrix = "SR3TOLL" +highway_class = +average_occupancy = 2.5 + + +###### INTERNAL EXTERNAL ###### + +[[highway.demand]] +name = "internal external drive alone" +file = ["internal_external","tripsIx{time_period}.omx"] +matrix = "DA" +highway_class = "drive alone" +average_occupancy = 1.0 + +[[highway.demand]] +name = "internal external shared ride 2" +file = ["internal_external","tripsIx{time_period}.omx"] +matrix = "SR2" +highway_class = "shared ride 2" +average_occupancy = + +[[emme.highway.classes.demand]] +name = "internal external shared ride 2 toll-paying" +file = ["internal_external","tripsIx{time_period}.omx"] +matrix = "SR2TOLL" +highway_class = +average_occupancy = + +[[highway.demand]] +name = "internal external shared ride 3+" +file = ["internal_external","tripsIx{time_period}.omx"] +matrix = "SR3" +highway_class = "shared ride 3+" +average_occupancy = + +[[highway.demand]] +name = "internal external shared ride 3+ toll-paying" +file = ["internal_external","tripsIx{time_period}.omx"] +matrix = "SR3TOLL" +highway_class = +average_occupancy = + +###### COMMERCIAL VEHICLES ###### + +[[highway.demand]] +name = "commercial vehicles" +file = ["commercial","tripstrk{time_period}.omx"] +matrix = "CTRUCK" +highway_class = +average_occupancy = +pce = 2.0 + +[[highway.demand]] +name = "commercial vehicles toll-paying" +file = ["commercial","tripstrk{time_period}.omx"] +matrix = "CTRUCKTOLL" +highway_class = +average_occupancy = +pce = 2.0 + +[[highway.demand]] +name = "very small trucks" +file = ["commercial","tripstrk{time_period}.omx"] +matrix = "VSTRUCK" +highway_class = +average_occupancy = +pce = 2.0 + +[[highway.demand]] +name = "very small trucks toll-paying" +file = ["commercial","tripstrk{time_period}.omx"] +matrix = "VSTRUCKTOLL" +highway_class = +average_occupancy = +pce = 2.0 + +[[highway.demand]] +name = "small trucks" +file = ["commercial","tripstrk{time_period}.omx"] +matrix = "STRUCK" +highway_class = +average_occupancy = +pce = 2.0 + +[[highway.demand]] +name = "small trucks toll-paying" +file = ["commercial","tripstrk{time_period}.omx"] +name = "STRUCKTOLL" +highway_class = +average_occupancy = +pce = 2.0 + +[[highway.demand]] +name = "medium trucks" +file = ["commercial","tripstrk{time_period}.omx"] +matrix = "MTRUCK" +highway_class = +average_occupancy = +pce = 2.0 + +[[highway.demand]] +name = "medium trucks toll-paying" +file = ["commercial","tripstrk{time_period}.omx"] +matrix = "MTRUCKTOLL" +highway_class = +average_occupancy = +pce = 2.0 + + +############################### +# Transit # +############################### +[transit] + + +skim_name = "{access_mode}_TRN_{transit_set}_{time_period}_{iteration}" +skim_files = "transit_skim_{skim_name}.omx" + +assignment_name = "TRN_{set}_{period}" +assignment_files = "transit_assign_{assignment_name}.omx" + +access_modes = ["WLK", "PNR", "KNRTNC", "KNRPRV"] + +[[transit_set]] +name = "BUS" + +[[transit_set]] +name = "PREM" + +[[transit_set]] +name = "ALLPEN" + +[[transit_mode]] +code = "b" +type = "local" + +[[transit_mode]] +code = "x" +type = "premium" + +[[transit_mode]] +code = "f" +type = "premium" + +[[transit_mode]] +code = "l" +type = "premium" + +[[transit_mode]] +code = "h" +type = "premium" + +[[transit_mode]] +code = "r" +type = "premium" + +[[aux_mode]] +code = "w" +type = "walk" + +[[aux_mode]] +code = "a" +type = "walk" + +[[aux_mode]] +code = "e" +type = "walk" + + diff --git a/dev-requirements.txt b/dev-requirements.txt index f840628d..102092a2 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -2,7 +2,4 @@ black flake8 pre-commit pytest -recommonmark -sphinx -sphinx-autodoc-typehints -sphinx_rtd_theme +recommonmark \ No newline at end of file diff --git a/docs/Makefile b/docs/Makefile deleted file mode 100644 index d4bb2cbb..00000000 --- a/docs/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line, and also -# from the environment for the first two. -SPHINXOPTS ?= -SPHINXBUILD ?= sphinx-build -SOURCEDIR = . -BUILDDIR = _build - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/README.md b/docs/README.md deleted file mode 100644 index 6706a0ea..00000000 --- a/docs/README.md +++ /dev/null @@ -1,35 +0,0 @@ -# Documentation - -Documentation is developed using the Python package [Sphinx](https://www.sphinx-doc.org/). - -## Installing - -1. Along with all over development tools ( recommended ) -```sh -pip install -r dev-requirements.txt -``` - -2. Using pip -```sh -pip install sphinx sphinx-autodoc-typehints sphinx_rtd_theme -``` - -2. Using conda -```sh -conda install sphinx sphinx-autodoc-typehints sphinx_rtd_theme -``` - -## Developing - -... - -## Building - -Sphinx documentation webpages can be built using the following shell command from the `docs` folder: -```sh -make html -``` - -## Deploying - -Documentation is built and deployed to {EDITME} upon the master branch successfully passing continuous integration tests. diff --git a/docs/_templates/my-class-template.rst b/docs/_templates/my-class-template.rst deleted file mode 100644 index 5a91b231..00000000 --- a/docs/_templates/my-class-template.rst +++ /dev/null @@ -1,33 +0,0 @@ -{{ fullname | escape | underline}} - -.. currentmodule:: {{ module }} - -.. autoclass:: {{ objname }} - :members: - :show-inheritance: - :inherited-members: - - {% block methods %} - .. automethod:: __init__ - - {% if methods %} - .. rubric:: {{ _('Methods') }} - - .. autosummary:: - {% for item in methods %} - ~{{ name }}.{{ item }} - {%- endfor %} - {% endif %} - {% endblock %} - - {% block attributes %} - {% if attributes %} - .. rubric:: {{ _('Attributes') }} - - .. autosummary:: - {% for item in attributes %} - ~{{ name }}.{{ item }} - {%- endfor %} - {% endif %} - {% endblock %} - \ No newline at end of file diff --git a/docs/_templates/my-module-template.rst b/docs/_templates/my-module-template.rst deleted file mode 100644 index c552fc33..00000000 --- a/docs/_templates/my-module-template.rst +++ /dev/null @@ -1,67 +0,0 @@ -{{ fullname | escape | underline}} - -{% block modules %} -{% if modules %} -.. rubric:: Modules - -.. autosummary:: - :toctree: - :template: my-module-template.rst - :recursive: -{% for item in modules %} - {{ item }} -{%- endfor %} -{% endif %} -{% endblock %} - -.. automodule:: {{ fullname }} - - {% block attributes %} - {% if attributes %} - .. rubric:: Module Attributes - - .. autosummary:: - :toctree: - {% for item in attributes %} - {{ item }} - {%- endfor %} - {% endif %} - {% endblock %} - - {% block functions %} - {% if functions %} - .. rubric:: {{ _('Functions') }} - - .. autosummary:: - :toctree: - {% for item in functions %} - {{ item }} - {%- endfor %} - {% endif %} - {% endblock %} - - {% block classes %} - {% if classes %} - .. rubric:: {{ _('Classes') }} - - .. autosummary:: - :toctree: - :template: my-class-template.rst - {% for item in classes %} - {{ item }} - {%- endfor %} - {% endif %} - {% endblock %} - - {% block exceptions %} - {% if exceptions %} - .. rubric:: {{ _('Exceptions') }} - - .. autosummary:: - :toctree: - {% for item in exceptions %} - {{ item }} - {%- endfor %} - {% endif %} - {% endblock %} - diff --git a/docs/api.md b/docs/api.md new file mode 100644 index 00000000..336309b4 --- /dev/null +++ b/docs/api.md @@ -0,0 +1,29 @@ +# API Documentation + +## Controller + +::: tm2py.controller + +## Components + +### Base Component + +::: tm2py.components.component + +### Demand Components + +::: tm2py.components.demand + +### Network Components + +::: tm2py.components.network + +## Emme Wrappers + +::: tm2py.emme + +## Errata + +::: tm2py.logger +::: tm2py.tools +::: tm2py.examples diff --git a/docs/architecture.md b/docs/architecture.md new file mode 100644 index 00000000..e5e733d9 --- /dev/null +++ b/docs/architecture.md @@ -0,0 +1,225 @@ +# Architecture + +## Abstract Component +``` mermaid +classDiagram + class Component{ + +_controller + +_trace + +validate_inputs() + +run() + +report_progress() + +test_component() + +write_top_sheet() + } +``` +## Controllers +``` mermaid +classDiagram + Controller <|-- ModelController + class Controller{ + _config + +_logger + +_top_sheet + +_trace + +validate_inputs() + +run() + +report_progress() + +test_component() + +write_top_sheet() + } + class ModelController{ + +_components: String model.Component + +_iteration + +validate_inputs() + +run() + +report_progress() + +test_component() + +write_top_sheet() + +run_prepare_emme_networks() + +run_non_motorized_skims() + +run_airpassenger_model() + +run_resident_model() + +run_internal_external_model() + +run_truck_model() + +run_average_demand() + +run_highway_assignment() + +run_transit_assignment() + } + class Logger{ + +controller + +log() + } +``` +## Configs + +```mermaid +classDiagram + class Configuration{ + +load() + +save() + } +``` + +## Utils + +```mermaid +classDiagram + class NetworkCalc{ + +_scenario + +_network_calc + +__call__() + +_add_calc() + +run() + } + class OMX{ + +_file_path + +_mode + +_scenario + +_omx_key + +_omx_file + +_matrix_cache + +_generate_name() + +open() + +close() + +__enter__() + +__exit__() + +write_matrices() + +write_clipped_array() + +write_array() + +read() + +read_hdf5() + } + class EmmeProjectCache{ + +close_all() + +create_project() + +project() + } + class MatrixCache{ + +_scenario + +_emmebanks + +_timestamps + +_data + +get_data() + +set_data() + +clear() + } + +``` +## Demand + +``` mermaid + +classDiagram + Component -- AirPassenger: how? + Component -- InternalExternal: how? + Component -- Truck: how? + Component <|-- ResidentsModel + ResidentsModel -- InternalExternal: how? + ResidentsModel -- AirPassenger: how? + class Component{ + } + class AirPassenger{ + +_parameter + +_load_demand() + +_sum_demand() + +_interpolate() + +_export_result() + } + class InternalExternal{ + +_parameter + +_ix_forecast() + +_ix_time_of_day() + +_ix_toll_choice() + +_export_results() + } + class ResidentsModel{ + +_start_household_manager() + +_start_matrix_manager() + +_run_resident_model() + +_stop_java() + } + class Truck{ + +_parameter + +_generation() + +_distribution() + +_time_of_day() + +_toll_choice() + +_export_results() + } +``` + +## Assignment + +``` mermaid + +classDiagram + Component <|-- HighwayAssignment + Component <|-- AssignMAZSPDemand + Component <|-- ActiveModesAssignment + ActiveModesAssignment -- TransitAssignment: how? + HighwayAssignment -- AssignMAZSPDemand: how? + ImportDemand -- HighwayAssignment: how? + ActiveModesAssignment -- AssignMAZSPDemand: how? + class Component{ + } + class HighwayAssignment{ + +_num_processors + +_root_dir + +_matrix_cache + +_emme_manager + +_Emmebank + +_skim_matrices + +_setup() + +_assign_and_skim() + +_calc_time_skim() + +_set_intrazonal_values() + +_export_skims() + +_base_spec() + +_prepare_traffic_class() + +_prepare_path_analyses() + } + class ImportDemand{ + +_root_dir + +_scenario + +_period + +_setup() + +_read_demand() + } + class AssignMAZSPDemand{ + +_scenario + +_period + +_modes + +_modeller + +_bin_edges + +_net_calc + +_debug_report + +_debug + +_mazs + +_demand + +_max_dist + +_network + +_root_index + +_leaf_index + +_setup() + +_prepare_network() + +_get_county_mazs() + +_process_demand() + +_group_demand() + +_find_roots_and_leaves() + +_run_shortest_path() + +_assign_flow() + } + class ActiveModesAssignment{ + +_scenario + +_modeller + +_setup() + +_prepare_network() + +_run_shortest_path() + } + class TransitAssignment{ + +_root_dir + +_emme_manager + +_setup() + } +``` diff --git a/docs/autodoc.rst b/docs/autodoc.rst deleted file mode 100644 index 9eab5881..00000000 --- a/docs/autodoc.rst +++ /dev/null @@ -1,6 +0,0 @@ -.. autosummary:: - :toctree: _autosummary - :template: my-module-template.rst - :recursive: - - tm2py \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py deleted file mode 100644 index 9eb89747..00000000 --- a/docs/conf.py +++ /dev/null @@ -1,90 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -import sys - -# Get the project root dir, which is the parent dir of this -cwd = os.getcwd() -project_root = os.path.dirname(cwd) - -# Insert the project root dir as the first element in the PYTHONPATH. -# This lets us ensure that the source package is imported, and that its -# version is used. -sys.path.insert(0, project_root) - -import tm2py - -# -- Project information ----------------------------------------------------- - -project = 'tm2py' -copyright = '2021, Bay Area Metro' -authors = 'TBD' -version = tm2py.__version__ - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.doctest", - "sphinx_autodoc_typehints", - "sphinx.ext.ifconfig", - "sphinx.ext.inheritance_diagram", - "sphinx.ext.intersphinx", - "sphinx.ext.todo", - "sphinx.ext.coverage", - "sphinx.ext.autosummary", - "sphinx.ext.viewcode", - "sphinx.ext.napoleon", - "recommonmark", -] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] - - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = "sphinx_rtd_theme" - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -intersphinx_mapping = { -} - -autodoc_default_options = { - "members": True, - "undoc-members": True, - "inherited-members": True, - "imported-members": True, - "show-inheritance": True, - "member-order": "groupwise", -} - -autoclass_content = "class" -# classes should include both the class' and the __init__ method's docstring - -autosummary_generate = True diff --git a/docs/contributing/documentation.md b/docs/contributing/documentation.md new file mode 100644 index 00000000..d8468ddc --- /dev/null +++ b/docs/contributing/documentation.md @@ -0,0 +1,21 @@ +# Documentation + +Documentation is developed using the Python package [mkdocs](https://www.mkdocs.org/). + +## Installing + +Using pip: +```sh +pip install -r docs/requirements.txt +``` +## Building Locally + +Mkdocs documentation webpages can be built using the following shell command from the `docs` folder: +```sh +mkdocs build +mkdocs serve +``` + +## Deploying documentation + +Documentation is built and deployed to [http://bayareametro.github.io/tm2py] upon the `develop` branch successfully passing continuous integration tests (to be updated to `master` when released) as specified in `.github/workflows/docs.yml`. diff --git a/docs/css/additional.css b/docs/css/additional.css new file mode 100644 index 00000000..e69de29b diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 00000000..e69de29b diff --git a/docs/index.rst b/docs/index.rst deleted file mode 100644 index fcc7575e..00000000 --- a/docs/index.rst +++ /dev/null @@ -1,27 +0,0 @@ -.. tm2py documentation master file, created by - sphinx-quickstart on Mon Apr 19 14:20:11 2021. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Welcome to tm2py's documentation! -======================================== - -This package ....ADDME - -It aims to have the following functionality: -1. ADDME - -.. toctree:: - :maxdepth: 1 - :caption: Contents: - - starting - API reference <_autosummary/tm2py> - - -Indices and tables -=================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` diff --git a/docs/make.bat b/docs/make.bat deleted file mode 100644 index 2119f510..00000000 --- a/docs/make.bat +++ /dev/null @@ -1,35 +0,0 @@ -@ECHO OFF - -pushd %~dp0 - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set SOURCEDIR=. -set BUILDDIR=_build - -if "%1" == "" goto help - -%SPHINXBUILD% >NUL 2>NUL -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ - exit /b 1 -) - -%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% -goto end - -:help -%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% - -:end -popd diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 00000000..3c8d7e78 --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1 @@ +-r ../requirements.txt diff --git a/docs/starting.md b/docs/starting.md index afa95b4c..01028056 100644 --- a/docs/starting.md +++ b/docs/starting.md @@ -7,9 +7,8 @@ If you are managing multiple python versions, we suggest using [`virtualenv`](ht The following instructions create and activate a conda environment (recommended) in which you can install: ```bash -conda config --add channels conda-forge -conda create python=3.7 -n -conda activate +conda env create -f environment.yml +conda activate tm2py ``` Basic installation instructions are as follows: @@ -22,9 +21,8 @@ pip install tm2py If you want to install a more up-to-date or development version, you can do so by installing it from the `develop` branch as follows: ```bash -conda config --add channels conda-forge -conda create python=3.7 -n -conda activate +conda env create -f environment.yml +conda activate tm2py pip install git+https://github.com/bayareametro/tm2py@develop ``` @@ -33,9 +31,8 @@ If you are going to be working on Lasso locally, you might want to clone it to y ```bash -conda config --add channels conda-forge -conda create python=3.7 -n -conda activate +conda env create -f environment.yml +conda activate tm2py git clone https://github.com/bayareametro/tm2py cd tm2py pip install -e . @@ -69,6 +66,6 @@ To learn basic lasso functionality, please refer to the following jupyter notebo Jupyter notebooks can be started by activating the lasso conda environment and typing `jupyter notebook`: ```bash - conda activate + conda activate tm2py jupyter notebook ``` diff --git a/environment.yml b/environment.yml new file mode 100644 index 00000000..c3545a0e --- /dev/null +++ b/environment.yml @@ -0,0 +1,302 @@ +name: tm2py +channels: + - conda-forge + - defaults +dependencies: + - affine=2.3.0=py_0 + - appnope=0.1.2=py37hf985489_2 + - argon2-cffi=21.3.0=pyhd8ed1ab_0 + - argon2-cffi-bindings=21.2.0=py37h271585c_1 + - attrs=21.4.0=pyhd8ed1ab_0 + - backcall=0.2.0=pyh9f0ad1d_0 + - backports=1.0=py_2 + - backports.functools_lru_cache=1.6.4=pyhd8ed1ab_0 + - bleach=4.1.0=pyhd8ed1ab_0 + - boost-cpp=1.74.0=hff03dee_4 + - branca=0.4.2=pyhd8ed1ab_0 + - brotli=1.0.9=h0d85af4_6 + - brotli-bin=1.0.9=h0d85af4_6 + - brotlipy=0.7.0=py37h271585c_1003 + - bzip2=1.0.8=h0d85af4_4 + - c-ares=1.18.1=h0d85af4_0 + - ca-certificates=2021.10.8=h033912b_0 + - cairo=1.16.0=he43a7df_1008 + - certifi=2021.10.8=py37hf985489_1 + - cffi=1.15.0=py37h446072c_0 + - cfitsio=3.470=h01dc385_7 + - chardet=4.0.0=py37hf985489_2 + - charset-normalizer=2.0.0=pyhd8ed1ab_0 + - click-plugins=1.1.1=py_0 + - cligj=0.7.2=pyhd8ed1ab_1 + - cryptography=36.0.1=py37h5e77fcc_0 + - curl=7.82.0=h9f20792_0 + - cycler=0.11.0=pyhd8ed1ab_0 + - dbus=1.13.6=h811a1a6_3 + - debugpy=1.5.1=py37hd8d24ac_0 + - decorator=5.1.1=pyhd8ed1ab_0 + - defusedxml=0.7.1=pyhd8ed1ab_0 + - entrypoints=0.4=pyhd8ed1ab_0 + - expat=2.4.7=h96cf925_0 + - fiona=1.8.20=py37h687fd47_1 + - flit-core=3.7.1=pyhd8ed1ab_0 + - folium=0.12.0=pyhd8ed1ab_1 + - fontconfig=2.13.96=h676cef8_1 + - fonttools=4.30.0=py37h69ee0a8_0 + - freetype=2.10.4=h4cff582_1 + - freexl=1.0.6=h0d85af4_0 + - gdal=3.3.1=py37hd6d8e87_3 + - geographiclib=1.52=pyhd8ed1ab_0 + - geojson=2.5.0=py_0 + - geopandas=0.9.0=pyhd8ed1ab_1 + - geopandas-base=0.9.0=pyhd8ed1ab_1 + - geos=3.9.1=he49afe7_2 + - geotiff=1.6.0=h26421ea_6 + - gettext=0.19.8.1=hd1a6beb_1008 + - giflib=5.2.1=hbcb3906_2 + - hdf4=4.2.15=hefd3b78_3 + - hdf5=1.12.1=nompi_ha60fbc9_104 + - icu=68.2=he49afe7_0 + - idna=3.3=pyhd8ed1ab_0 + - ipykernel=6.9.1=py37h4c52d7d_0 + - ipython=7.32.0=py37hf985489_0 + - ipython_genutils=0.2.0=py_1 + - ipywidgets=7.6.5=pyhd8ed1ab_0 + - jbig=2.1=h0d85af4_2003 + - jedi=0.18.1=py37hf985489_0 + - jinja2=3.0.3=pyhd8ed1ab_0 + - joblib=1.1.0=pyhd8ed1ab_0 + - jpeg=9e=h0d85af4_0 + - json-c=0.15=hcb556a6_0 + - jsonschema=3.2.0=pyhd8ed1ab_3 + - jupyter=1.0.0=py37hf985489_7 + - jupyter_client=7.1.2=pyhd8ed1ab_0 + - jupyter_console=6.4.3=pyhd8ed1ab_0 + - jupyter_contrib_core=0.3.3=py_2 + - jupyter_contrib_nbextensions=0.5.1=pyhd8ed1ab_2 + - jupyter_core=4.9.2=py37hf985489_0 + - jupyter_highlight_selected_word=0.2.0=py37hf985489_1005 + - jupyter_latex_envs=1.4.6=pyhd8ed1ab_1002 + - jupyter_nbextensions_configurator=0.4.1=py37hf985489_2 + - jupyterlab_pygments=0.1.2=pyh9f0ad1d_0 + - jupyterlab_widgets=1.0.0=pyhd8ed1ab_1 + - kealib=1.4.14=ha22a8b1_3 + - kiwisolver=1.3.2=py37h737db71_1 + - krb5=1.19.2=hb49756b_4 + - lark-parser=0.11.3=pyhd8ed1ab_0 + - lcms2=2.12=h577c468_0 + - lerc=3.0=he49afe7_0 + - libblas=3.9.0=13_osx64_openblas + - libbrotlicommon=1.0.9=h0d85af4_6 + - libbrotlidec=1.0.9=h0d85af4_6 + - libbrotlienc=1.0.9=h0d85af4_6 + - libcblas=3.9.0=13_osx64_openblas + - libclang=11.1.0=default_he082bbe_1 + - libcurl=7.82.0=h9f20792_0 + - libcxx=12.0.1=habf9029_1 + - libdap4=3.20.6=h3e144a0_2 + - libdeflate=1.10=h0d85af4_0 + - libedit=3.1.20191231=h0678c8f_2 + - libev=4.33=haf1e3a3_1 + - libffi=3.4.2=h0d85af4_5 + - libgdal=3.3.1=h754b685_3 + - libgfortran=5.0.0=9_3_0_h6c81a4c_23 + - libgfortran5=9.3.0=h6c81a4c_23 + - libglib=2.70.2=hf1fb8c0_4 + - libiconv=1.16=haf1e3a3_0 + - libkml=1.3.0=h8fd9edb_1014 + - liblapack=3.9.0=13_osx64_openblas + - libllvm11=11.1.0=hd011deb_3 + - libnetcdf=4.8.1=nompi_h6609ca0_101 + - libnghttp2=1.47.0=h942079c_0 + - libopenblas=0.3.18=openmp_h3351f45_0 + - libpng=1.6.37=h7cec526_2 + - libpq=13.5=hea3049e_1 + - librttopo=1.1.0=h5413771_6 + - libsodium=1.0.18=hbcb3906_1 + - libspatialindex=1.9.3=he49afe7_4 + - libspatialite=5.0.1=h035f608_6 + - libssh2=1.10.0=h52ee1ee_2 + - libtiff=4.3.0=h17f2ce3_3 + - libwebp=1.2.2=h28dabe5_0 + - libwebp-base=1.2.2=h0d85af4_1 + - libxcb=1.13=h0d85af4_1004 + - libxml2=2.9.12=h93ec3fd_0 + - libxslt=1.1.33=h5739fc3_2 + - libzip=1.8.0=h8b0c345_1 + - libzlib=1.2.11=h9173be1_1013 + - llvm-openmp=13.0.1=hcb1a161_1 + - lxml=4.8.0=py37h250624f_0 + - lz4-c=1.9.3=he49afe7_1 + - mapclassify=2.4.3=pyhd8ed1ab_0 + - markupsafe=2.1.0=py37h69ee0a8_1 + - matplotlib-base=3.5.1=py37h3147e9e_0 + - matplotlib-inline=0.1.3=pyhd8ed1ab_0 + - mistune=0.8.4=py37h271585c_1005 + - munch=2.5.0=py_0 + - munkres=1.1.4=pyh9f0ad1d_0 + - mysql-common=8.0.28=h694c41f_0 + - mysql-libs=8.0.28=h115446f_0 + - nbclient=0.5.12=pyhd8ed1ab_0 + - nbconvert=6.4.2=py37hf985489_0 + - nbformat=5.1.3=pyhd8ed1ab_0 + - ncurses=6.3=he49afe7_0 + - nest-asyncio=1.5.4=pyhd8ed1ab_0 + - notebook=6.4.0=pyha770c72_0 + - nspr=4.32=hcd9eead_1 + - nss=3.74=h31e2bf1_0 + - numpy=1.21.1=py37h84c02c4_0 + - openjpeg=2.4.0=h6e7aa92_1 + - openssl=1.1.1n=h6c3fc93_0 + - osmnx=1.1.1=pyhd8ed1ab_0 + - packaging=21.3=pyhd8ed1ab_0 + - pandas=1.3.1=py37hb23ed4d_0 + - pandoc=2.17.1.1=h694c41f_0 + - pandocfilters=1.5.0=pyhd8ed1ab_0 + - parso=0.8.3=pyhd8ed1ab_0 + - pcre=8.45=he49afe7_0 + - pexpect=4.8.0=pyh9f0ad1d_2 + - pickleshare=0.7.5=py_1003 + - pillow=9.0.1=py37h2540ef4_2 + - pip=22.0.4=pyhd8ed1ab_0 + - pixman=0.40.0=hbcb3906_0 + - poppler=21.03.0=h640f9a4_0 + - poppler-data=0.4.11=hd8ed1ab_0 + - postgresql=13.5=he8fe76e_1 + - proj=8.0.1=h1512c50_0 + - prometheus_client=0.13.1=pyhd8ed1ab_0 + - prompt-toolkit=3.0.27=pyha770c72_0 + - prompt_toolkit=3.0.27=hd8ed1ab_0 + - pthread-stubs=0.4=hc929b4f_1001 + - ptyprocess=0.7.0=pyhd3deb0d_0 + - pycparser=2.21=pyhd8ed1ab_0 + - pydantic=1.9.0=py37h271585c_0 + - pygments=2.11.2=pyhd8ed1ab_0 + - pyopenssl=22.0.0=pyhd8ed1ab_0 + - pyparsing=3.0.7=pyhd8ed1ab_0 + - pyproj=3.1.0=py37h7b8e8f7_4 + - pyqt=5.12.3=py37hf985489_8 + - pyqt-impl=5.12.3=py37hab5ec1f_8 + - pyqt5-sip=4.19.18=py37h070e122_8 + - pyqtchart=5.12=py37hab5ec1f_8 + - pyqtwebengine=5.12.1=py37hab5ec1f_8 + - pyrsistent=0.18.1=py37h271585c_0 + - pysocks=1.7.1=py37hf985489_4 + - python=3.7.10=haf480d7_104_cpython + - python-dateutil=2.8.2=pyhd8ed1ab_0 + - python_abi=3.7=2_cp37m + - pytz=2021.1=pyhd8ed1ab_0 + - pyyaml=5.4.1=py37h271585c_1 + - pyzmq=22.3.0=py37h8f778e5_1 + - qt=5.12.9=h126340a_4 + - qtconsole=5.2.2=pyhd8ed1ab_1 + - qtconsole-base=5.2.2=pyhd8ed1ab_1 + - qtpy=2.0.1=pyhd8ed1ab_0 + - rasterio=1.2.8=py37h34762e7_0 + - readline=8.1=h05e3726_0 + - requests=2.26.0=pyhd8ed1ab_0 + - rtree=0.9.7=py37hf13911c_3 + - scikit-learn=1.0.2=py37h572704e_0 + - scipy=1.7.3=py37h4e3cf02_0 + - send2trash=1.8.0=pyhd8ed1ab_0 + - setuptools=59.8.0=py37hf985489_0 + - shapely=1.7.1=py37hf313787_5 + - six=1.16.0=pyh6c4a22f_0 + - snuggs=1.4.7=py_0 + - sqlite=3.37.0=h23a322b_0 + - terminado=0.13.3=py37hf985489_0 + - testpath=0.6.0=pyhd8ed1ab_0 + - threadpoolctl=3.1.0=pyh8a188c0_0 + - tiledb=2.3.4=h8370e7a_0 + - tk=8.6.12=h5dbffcc_0 + - tornado=6.1=py37h271585c_2 + - traitlets=5.1.1=pyhd8ed1ab_0 + - typing-extensions=4.1.1=hd8ed1ab_0 + - typing_extensions=4.1.1=pyha770c72_0 + - tzcode=2021e=h0d85af4_0 + - tzdata=2021e=he74cb21_0 + - unicodedata2=14.0.0=py37h271585c_0 + - urllib3=1.26.8=pyhd8ed1ab_1 + - wcwidth=0.2.5=pyh9f0ad1d_2 + - webencodings=0.5.1=py_1 + - wheel=0.37.1=pyhd8ed1ab_0 + - widgetsnbextension=3.5.2=py37hf985489_1 + - xerces-c=3.2.3=h379762d_3 + - xorg-libxau=1.0.9=h35c211d_0 + - xorg-libxdmcp=1.1.3=h35c211d_0 + - xz=5.2.5=haf1e3a3_1 + - yaml=0.2.5=h0d85af4_2 + - zeromq=4.3.4=he49afe7_1 + - zipp=3.7.0=pyhd8ed1ab_1 + - zlib=1.2.11=h9173be1_1013 + - zstd=1.5.2=h582d3a0_0 + - pip: + - alabaster==0.7.12 + - astunparse==1.6.3 + - babel==2.9.1 + - beautifulsoup4==4.10.0 + - black==22.1.0 + - bracex==2.2.1 + - cached-property==1.5.2 + - cfgv==3.3.1 + - click==8.0.4 + - commonmark==0.9.1 + - distlib==0.3.4 + - docutils==0.17.1 + - editorconfig==0.12.3 + - filelock==3.6.0 + - flake8==4.0.1 + - fontawesome-markdown==0.2.6 + - ghp-import==2.0.2 + - identify==2.4.11 + - imagesize==1.3.0 + - importlib-metadata==4.2.0 + - iniconfig==1.1.1 + - jsbeautifier==1.14.0 + - markdown==3.3.4 + - mccabe==0.6.1 + - mergedeep==1.3.4 + - mkdocs==1.2.3 + - mkdocs-autorefs==0.4.1 + - mkdocs-awesome-pages-plugin==2.7.0 + - mkdocs-include-markdown-plugin==3.3.0 + - mkdocs-macros-plugin==0.6.4 + - mkdocs-material==8.2.5 + - mkdocs-material-extensions==1.0.3 + - mkdocs-mermaid2-plugin==0.5.2 + - mkdocstrings==0.18.1 + - mkdocstrings-python-legacy==0.2.2 + - mypy-extensions==0.4.3 + - networkx==2.6.3 + - nodeenv==1.6.0 + - numexpr==2.8.1 + - openmatrix==0.3.5.0 + - pathspec==0.9.0 + - platformdirs==2.5.1 + - pluggy==1.0.0 + - pre-commit==2.17.0 + - py==1.11.0 + - pycodestyle==2.8.0 + - pyflakes==2.4.0 + - pymdown-extensions==9.2 + - pytest==7.0.1 + - pytkdocs==0.16.1 + - pyyaml-env-tag==0.1 + - recommonmark==0.7.1 + - snowballstemmer==2.2.0 + - soupsieve==2.3.1 + - sphinx==4.3.2 + - sphinxcontrib-applehelp==1.0.2 + - sphinxcontrib-devhelp==1.0.2 + - sphinxcontrib-htmlhelp==2.0.0 + - sphinxcontrib-jsmath==1.0.1 + - sphinxcontrib-qthelp==1.0.3 + - sphinxcontrib-serializinghtml==1.1.5 + - tables==3.7.0 + - termcolor==1.1.0 + - toml==0.10.2 + - tomli==2.0.1 + - typed-ast==1.5.2 + - virtualenv==20.13.3 + - watchdog==2.1.6 + - wcmatch==8.3 +prefix: /Users/elizabeth/opt/miniconda3/envs/tm2py diff --git a/examples/README.md b/examples/README.md index f384b492..282cc8c4 100644 --- a/examples/README.md +++ b/examples/README.md @@ -1,3 +1,9 @@ # Examples This repository comes with the following example data... + +## Get Example Data + +```sh +get_test_data +``` diff --git a/examples/model_config.toml b/examples/model_config.toml new file mode 100644 index 00000000..19c1d3c7 --- /dev/null +++ b/examples/model_config.toml @@ -0,0 +1,1279 @@ +#################################### +# MODEL CONFIGURATION # +#################################### + +[[time_periods]] + name = "ea" + length_hours = 3 + highway_capacity_factor = 3 + emme_scenario_id = 11 +[[time_periods]] + name = "am" + length_hours = 4 + highway_capacity_factor = 3.65 + emme_scenario_id = 12 +[[time_periods]] + name = "md" + length_hours = 5 + highway_capacity_factor = 5 + emme_scenario_id = 13 +[[time_periods]] + name = "pm" + length_hours = 4 + highway_capacity_factor = 3.65 + emme_scenario_id = 14 +[[time_periods]] + name = "ev" + length_hours = 8 + highway_capacity_factor = 8 + emme_scenario_id = 15 + +[household] + highway_demand_file = "demand_matrices\\highway\\household\\TAZ_Demand_{period}.omx" + transit_demand_file = "demand_matrices\\transit\\TAP_Demand_{set}_{period}.omx" + +[air_passenger] + highway_demand_file = "demand_matrices\\highway\\air_passenger\\tripsAirPax{period}.omx" + input_demand_folder = "inputs\\nonres" + reference_start_year = "2007" + reference_end_year = "2035" + [[air_passenger.demand_aggregation]] + result_class_name = "da" + src_group_name = "DA" + access_modes = [ "ES", "PK", "RN", "TX", "LI",] + [[air_passenger.demand_aggregation]] + result_class_name = "sr2" + src_group_name = "S2" + access_modes = [ "ES", "PK", "RN", "TX", "LI",] + [[air_passenger.demand_aggregation]] + result_class_name = "sr3" + src_group_name = "S3" + access_modes = [ "ES", "PK", "RN", "TX", "LI", "VN", "HT", "CH",] + +[internal_external] + highway_demand_file = "demand_matrices\\highway\\internal_external\\tripsIx{period}.omx" + input_demand_file = "inputs\\nonres\\IXDaily2006x4.may2208.new.omx" + reference_year = 2005 + toll_choice_time_coefficient = -0.088 + value_of_time = 18.93 + shared_ride_2_toll_factor = 0.5714285714285714 + shared_ride_3_toll_factor = 0.4 + operating_cost_per_mile = 17.23 + +[truck] + highway_demand_file = "demand_matrices\\highway\\commercial\\tripstrk{period}.omx" + k_factors_file = "inputs\\nonres\\truck_kfactors_taz.csv" + friction_factors_file = "inputs\\nonres\\truckFF.dat" + value_of_time = 37.87 + operating_cost_per_mile = 31.28 + toll_choice_time_coefficient = -0.088 + max_balance_iterations = 999 + max_balance_relative_error = 0.0001 + +[active_modes] + emme_scenario_id = 1 + [[active_modes.shortest_path_skims]] + mode = "walk" + roots = "MAZ" + leaves = "MAZ" + max_dist_miles = 3 + output = "skim_matrices\\non_motorized\\ped_distance_maz_maz.txt" + [[active_modes.shortest_path_skims]] + mode = "walk" + roots = "MAZ" + leaves = "TAP" + max_dist_miles = 0.5 + output = "skim_matrices\\non_motorized\\ped_distance_maz_tap.txt" + [[active_modes.shortest_path_skims]] + mode = "bike" + roots = "MAZ" + leaves = "MAZ" + max_dist_miles = 3 + output = "skim_matrices\\non_motorized\\bike_distance_maz_maz.txt" + [[active_modes.shortest_path_skims]] + mode = "bike" + roots = "MAZ" + leaves = "TAP" + max_dist_miles = 3 + output = "skim_matrices\\non_motorized\\bike_distance_maz_tap.txt" + [[active_modes.shortest_path_skims]] + mode = "bike" + roots = "TAZ" + leaves = "TAZ" + output = "skim_matrices\\non_motorized\\bike_distance_taz_taz.txt" + [[active_modes.shortest_path_skims]] + mode = "walk" + roots = "TAP" + leaves = "TAP" + max_dist_miles = 0.5 + output = "skim_matrices\\non_motorized\\ped_distance_tap_tap.txt" + +[highway] + output_skim_path = "skim_matrices\\highway\\HWYSKM{period}_taz.omx" + relative_gap = 0.0005 + max_iterations = 100 + # labels entire highway network (any of the classes) + MAZ connectors + generic_highway_mode_code = "c" + # include other MAZs to estimate density (pop+jobs*2.5)/acres for each MAZ + area_type_buffer_dist_miles = 0.5 + [highway.tolls] + file_path = "inputs\\hwy\\tolls.csv" + src_vehicle_group_names = ["da", "s2", "s3", "vsm", "sml", "med", "lrg"] + # the dst_vehicle_group_names is used in the class group suffix for the + # highway.classes toll attribute name and the skims name, "bridgetoll_{}" + # and "valuetoll_{}" + dst_vehicle_group_names = ["da", "sr2", "sr3", "vsm", "sml", "med", "lrg"] + # tollbooth separates links with "bridge" tolls (index < this value) + # (used in all classes) vs. "value" tolls (used in toll-available classes only) + tollbooth_start_index = 11 + [highway.maz_to_maz] + mode_code = "x" + excluded_links = [ "is_toll_da", "is_sr",] + operating_cost_per_mile = 18.93 + value_of_time = 17.23 + output_skim_file = "skim_matrices\\highway\\HWYSKIM_MAZMAZ_DA.csv" + skim_period = "md" + max_skim_cost = 11.0 + # based on ~= 5 miles @ 40 mph = 11 + # = time + (0.6 / vot) * (dist * opcost) + # = 5 / 40 * 60 + (0.6 / 17.23) * (5 * 18.93) + demand_file = "demand_matrices\\highway\\maz_demand\\auto_{period}_MAZ_AUTO_{number}_{period}.omx" + [[highway.maz_to_maz.demand_county_groups]] + number = 1 + counties = ["San Francisco", "San Mateo", "Santa Clara"] + [[highway.maz_to_maz.demand_county_groups]] + number = 2 + counties = ["Alameda", "Contra Costa"] + [[highway.maz_to_maz.demand_county_groups]] + number = 3 + counties = ["Solano", "Napa", "Sonoma", "Marin"] + + [[highway.classes]] + name = "da" + description = "drive alone" + mode_code = "d" + excluded_links = [ "is_toll_da", "is_sr",] + value_of_time = 18.93 + operating_cost_per_mile = 17.23 + toll = ["@bridgetoll_da"] + skims = [ "time", "dist", "freeflowtime", "bridgetoll_da",] + [[highway.classes.demand]] + source = "household" + name = "SOV_GP_{period}" + [[highway.classes.demand]] + source = "air_passenger" + name = "da" + [[highway.classes.demand]] + source = "internal_external" + name = "da" + [[highway.classes]] + name = "sr2" + description = "shared ride 2" + mode_code = "e" + excluded_links = [ "is_toll_sr2", "is_sr3",] + value_of_time = 18.93 + operating_cost_per_mile = 17.23 + toll = [ "@bridgetoll_sr2" ] + toll_factor = 0.5714285714285714 + skims = [ "time", "dist", "freeflowtime", "bridgetoll_sr2", "hovdist",] + [[highway.classes.demand]] + source = "household" + name = "SR2_GP_{period}" + factor = 0.5714285714285714 + [[highway.classes.demand]] + source = "household" + name = "SR2_HOV_{period}" + factor = 0.5714285714285714 + [[highway.classes.demand]] + source = "air_passenger" + name = "sr2" + [[highway.classes.demand]] + source = "internal_external" + name = "sr2" + [[highway.classes]] + name = "sr3" + description = "shared ride 3+" + mode_code = "f" + excluded_links = [ "is_toll_sr3",] + value_of_time = 18.93 + operating_cost_per_mile = 17.23 + toll = ["@bridgetoll_sr3"] + toll_factor = 0.4 + skims = [ "time", "dist", "freeflowtime", "bridgetoll_sr3", "hovdist",] + [[highway.classes.demand]] + source = "household" + name = "SR3_GP_{period}" + factor = 0.4 + [[highway.classes.demand]] + source = "household" + name = "SR3_HOV_{period}" + factor = 0.4 + [[highway.classes.demand]] + source = "air_passenger" + name = "sr3" + [[highway.classes.demand]] + source = "internal_external" + name = "sr3" + [[highway.classes]] + name = "trk" + description = "truck" + mode_code = "t" + excluded_links = [ "is_toll_vsm", "is_toll_sml", "is_toll_med", "is_sr",] + value_of_time = 37.87 + operating_cost_per_mile = 31.28 + toll = ["@bridgetoll_sml"] + skims = [ "time", "dist", "freeflowtime", "bridgetoll_vsm", "bridgetoll_sml", "bridgetoll_med",] + [[highway.classes.demand]] + source = "truck" + name = "vsmtrk" + [[highway.classes.demand]] + source = "truck" + name = "smltrk" + [[highway.classes.demand]] + source = "truck" + name = "medtrk" + [[highway.classes]] + name = "lrgtrk" + description = "large truck" + mode_code = "l" + excluded_links = [ "is_toll_lrg", "is_auto_only",] + value_of_time = 37.87 + operating_cost_per_mile = 31.28 + toll = ["@bridgetoll_lrg"] + pce = 2.0 + skims = [ "time", "dist", "freeflowtime", "bridgetoll_lrg",] + [[highway.classes.demand]] + source = "truck" + name = "lrgtrk" + factor = 2.0 + [[highway.classes]] + name = "datoll" + description = "drive alone toll" + mode_code = "D" + excluded_links = [ "is_sr",] + value_of_time = 18.93 + operating_cost_per_mile = 17.23 + toll = [ "@valuetoll_da", "@bridgetoll_da" ] + skims = [ "time", "dist", "freeflowtime", "bridgetoll_da", "valuetoll_da", "tolldist",] + [[highway.classes.demand]] + source = "household" + name = "SOV_PAY_{period}" + [[highway.classes.demand]] + source = "internal_external" + name = "datoll" + [[highway.classes]] + name = "sr2toll" + description = "shared ride 2 toll" + mode_code = "E" + excluded_links = [ "is_sr3",] + value_of_time = 18.93 + operating_cost_per_mile = 17.23 + toll = [ "@valuetoll_sr2", "@bridgetoll_sr2" ] + toll_factor = 0.5714285714285714 + skims = [ "time", "dist", "freeflowtime", "bridgetoll_sr2", "valuetoll_sr2", "hovdist", "tolldist",] + [[highway.classes.demand]] + source = "household" + name = "SR2_PAY_{period}" + factor = 0.5714285714285714 + [[highway.classes.demand]] + source = "internal_external" + name = "sr2toll" + [[highway.classes]] + name = "sr3toll" + description = "shared ride 3+ toll" + mode_code = "F" + excluded_links = [] + value_of_time = 18.93 + operating_cost_per_mile = 17.23 + toll = [ "@valuetoll_sr3", "@bridgetoll_sr3" ] + toll_factor = 0.4 + skims = [ "time", "dist", "freeflowtime", "bridgetoll_sr3", "valuetoll_sr3", "hovdist", "tolldist",] + [[highway.classes.demand]] + source = "household" + name = "SR3_PAY_{period}" + factor = 0.4 + [[highway.classes.demand]] + source = "internal_external" + name = "sr3toll" + [[highway.classes]] + name = "trktoll" + description = "truck toll" + mode_code = "T" + excluded_links = [ "is_sr",] + value_of_time = 37.87 + operating_cost_per_mile = 31.28 + toll = [ "@valuetoll_sml", "@bridgetoll_sml" ] + skims = [ "time", "dist", "freeflowtime", "bridgetoll_vsm", "bridgetoll_sml", "bridgetoll_med", "valuetoll_vsm", "valuetoll_sml", "valuetoll_med",] + [[highway.classes.demand]] + source = "truck" + name = "vsmtrktoll" + [[highway.classes.demand]] + source = "truck" + name = "smltrktoll" + [[highway.classes.demand]] + source = "truck" + name = "medtrktoll" + [[highway.classes]] + name = "lrgtrktoll" + description = "large truck toll" + mode_code = "L" + excluded_links = [ "is_auto_only", "is_sr",] + value_of_time = 37.87 + operating_cost_per_mile = 31.28 + pce = 2.0 + toll = [ "@valuetoll_lrg", "@bridgetoll_lrg" ] + skims = [ "time", "dist", "freeflowtime", "bridgetoll_lrg", "valuetoll_lrg",] + [[highway.classes.demand]] + source = "truck" + name = "lrgtrktoll" + factor = 2.0 + + +[transit] + apply_msa_demand = false + value_of_time = 16.2 + effective_headway_source = "hdw" + initial_wait_perception_factor = 1.5 + transfer_wait_perception_factor = 3.0 + walk_perception_factor = 2.0 + initial_boarding_penalty = 10 + transfer_boarding_penalty = 10 + max_transfers = 3 + output_skim_path = "skim_matrices\\transit\\transit_skims_{period}.omx" + fares_path = "inputs\\trn\\fares.far" + fare_matrix_path = "inputs\\trn\\fareMatrix.txt" + # max expected transfer distance for mode-to-mode transfer fare table generation + fare_max_transfer_distance_miles = 3.0 + use_fares = false + # for TAZ instead of TAPs + override_connector_times = false + #input_connector_access_times_path = "inputs\\trn\\estimated_taz_access_connectors.csv" + #input_connector_egress_times_path = "inputs\\trn\\estimated_taz_egress_connectors.csv" + #output_stop_usage_path = "inputs\\trn\\stop_usage_{period}.csv" + +[emme] + num_processors = "max-1" + all_day_scenario_id = 1 + project_path = "emme_project\\mtc_emme.emp" + highway_database_path = "emme_project\\Database_highway\\emmebank" + active_database_paths = [ "emme_project\\Database_maz\\emmebank", ] + transit_database_path = "emme_project\\Database_transit\\emmebank" + + +[[highway.capclass_lookup]] + capclass = 0 + capacity = 0 + free_flow_speed = 0 + critical_speed = 0 + +[[highway.capclass_lookup]] + capclass = 1 + capacity = 2050 + free_flow_speed = 55 + critical_speed = 18.835 + +[[highway.capclass_lookup]] + capclass = 2 + capacity = 1450 + free_flow_speed = 40 + critical_speed = 25.898 + +[[highway.capclass_lookup]] + capclass = 3 + capacity = 1450 + free_flow_speed = 30 + critical_speed = 11.772 + +[[highway.capclass_lookup]] + capclass = 4 + capacity = 900 + free_flow_speed = 20 + critical_speed = 4.709 + +[[highway.capclass_lookup]] + capclass = 5 + capacity = 900 + free_flow_speed = 20 + critical_speed = 11.772 + +[[highway.capclass_lookup]] + capclass = 6 + capacity = 600 + free_flow_speed = 15 + critical_speed = 47.087 + +[[highway.capclass_lookup]] + capclass = 7 + capacity = 600 + free_flow_speed = 15 + critical_speed = 7.063 + +[[highway.capclass_lookup]] + capclass = 8 + capacity = 2050 + free_flow_speed = 18 + critical_speed = 9.417 + +[[highway.capclass_lookup]] + capclass = 9 + capacity = 0 + free_flow_speed = 0 + critical_speed = 25.898 + +[[highway.capclass_lookup]] + capclass = 10 + capacity = 0 + free_flow_speed = 18 + critical_speed = 14.126 + +[[highway.capclass_lookup]] + capclass = 11 + capacity = 2050 + free_flow_speed = 55 + critical_speed = 7.063 + +[[highway.capclass_lookup]] + capclass = 12 + capacity = 1450 + free_flow_speed = 40 + critical_speed = 4.709 + +[[highway.capclass_lookup]] + capclass = 13 + capacity = 1500 + free_flow_speed = 30 + critical_speed = 4.709 + +[[highway.capclass_lookup]] + capclass = 14 + capacity = 950 + free_flow_speed = 25 + critical_speed = 3.0 + +[[highway.capclass_lookup]] + capclass = 15 + capacity = 950 + free_flow_speed = 25 + critical_speed = 3.0 + +[[highway.capclass_lookup]] + capclass = 16 + capacity = 650 + free_flow_speed = 20 + critical_speed = 18.835 + +[[highway.capclass_lookup]] + capclass = 17 + capacity = 650 + free_flow_speed = 20 + critical_speed = 25.898 + +[[highway.capclass_lookup]] + capclass = 18 + capacity = 2050 + free_flow_speed = 18 + critical_speed = 11.772 + +[[highway.capclass_lookup]] + capclass = 19 + capacity = 0 + free_flow_speed = 0 + critical_speed = 4.709 + +[[highway.capclass_lookup]] + capclass = 20 + capacity = 0 + free_flow_speed = 18 + critical_speed = 11.772 + +[[highway.capclass_lookup]] + capclass = 21 + capacity = 2100 + free_flow_speed = 60 + critical_speed = 47.087 + +[[highway.capclass_lookup]] + capclass = 22 + capacity = 1600 + free_flow_speed = 45 + critical_speed = 9.417 + +[[highway.capclass_lookup]] + capclass = 23 + capacity = 1550 + free_flow_speed = 35 + critical_speed = 9.417 + +[[highway.capclass_lookup]] + capclass = 24 + capacity = 1000 + free_flow_speed = 30 + critical_speed = 28.252 + +[[highway.capclass_lookup]] + capclass = 25 + capacity = 1000 + free_flow_speed = 30 + critical_speed = 16.48 + +[[highway.capclass_lookup]] + capclass = 26 + capacity = 700 + free_flow_speed = 25 + critical_speed = 9.417 + +[[highway.capclass_lookup]] + capclass = 27 + capacity = 700 + free_flow_speed = 25 + critical_speed = 4.709 + +[[highway.capclass_lookup]] + capclass = 28 + capacity = 700 + free_flow_speed = 18 + critical_speed = 4.709 + +[[highway.capclass_lookup]] + capclass = 29 + capacity = 0 + free_flow_speed = 0 + critical_speed = 3.0 + +[[highway.capclass_lookup]] + capclass = 30 + capacity = 0 + free_flow_speed = 18 + critical_speed = 3.0 + +[[highway.capclass_lookup]] + capclass = 31 + capacity = 2100 + free_flow_speed = 60 + critical_speed = 21.189 + +[[highway.capclass_lookup]] + capclass = 32 + capacity = 1600 + free_flow_speed = 45 + critical_speed = 28.252 + +[[highway.capclass_lookup]] + capclass = 33 + capacity = 1550 + free_flow_speed = 35 + critical_speed = 14.126 + +[[highway.capclass_lookup]] + capclass = 34 + capacity = 1000 + free_flow_speed = 35 + critical_speed = 7.063 + +[[highway.capclass_lookup]] + capclass = 35 + capacity = 1000 + free_flow_speed = 35 + critical_speed = 14.126 + +[[highway.capclass_lookup]] + capclass = 36 + capacity = 700 + free_flow_speed = 30 + critical_speed = 47.087 + +[[highway.capclass_lookup]] + capclass = 37 + capacity = 700 + free_flow_speed = 30 + critical_speed = 11.772 + +[[highway.capclass_lookup]] + capclass = 38 + capacity = 2100 + free_flow_speed = 18 + critical_speed = 11.772 + +[[highway.capclass_lookup]] + capclass = 39 + capacity = 0 + free_flow_speed = 0 + critical_speed = 30.607 + +[[highway.capclass_lookup]] + capclass = 40 + capacity = 0 + free_flow_speed = 18 + critical_speed = 18.835 + +[[highway.capclass_lookup]] + capclass = 41 + capacity = 2150 + free_flow_speed = 65 + critical_speed = 11.772 + +[[highway.capclass_lookup]] + capclass = 42 + capacity = 1650 + free_flow_speed = 50 + critical_speed = 7.063 + +[[highway.capclass_lookup]] + capclass = 43 + capacity = 1550 + free_flow_speed = 40 + critical_speed = 7.063 + +[[highway.capclass_lookup]] + capclass = 44 + capacity = 1050 + free_flow_speed = 35 + critical_speed = 3.0 + +[[highway.capclass_lookup]] + capclass = 45 + capacity = 1050 + free_flow_speed = 35 + critical_speed = 3.0 + +[[highway.capclass_lookup]] + capclass = 46 + capacity = 900 + free_flow_speed = 30 + critical_speed = 21.189 + +[[highway.capclass_lookup]] + capclass = 47 + capacity = 900 + free_flow_speed = 30 + critical_speed = 28.252 + +[[highway.capclass_lookup]] + capclass = 48 + capacity = 2150 + free_flow_speed = 18 + critical_speed = 14.126 + +[[highway.capclass_lookup]] + capclass = 49 + capacity = 0 + free_flow_speed = 0 + critical_speed = 9.417 + +[[highway.capclass_lookup]] + capclass = 50 + capacity = 0 + free_flow_speed = 18 + critical_speed = 14.126 + +[[highway.capclass_lookup]] + capclass = 51 + capacity = 2150 + free_flow_speed = 65 + critical_speed = 47.087 + +[[highway.capclass_lookup]] + capclass = 52 + capacity = 1650 + free_flow_speed = 55 + critical_speed = 11.772 + +[[highway.capclass_lookup]] + capclass = 53 + capacity = 1550 + free_flow_speed = 40 + critical_speed = 11.772 + +[[highway.capclass_lookup]] + capclass = 54 + capacity = 1050 + free_flow_speed = 40 + critical_speed = 23.543 + +[[highway.capclass_lookup]] + capclass = 55 + capacity = 1050 + free_flow_speed = 40 + critical_speed = 9.417 + +[[highway.capclass_lookup]] + capclass = 56 + capacity = 950 + free_flow_speed = 35 + critical_speed = 11.772 + +[[highway.capclass_lookup]] + capclass = 57 + capacity = 950 + free_flow_speed = 35 + critical_speed = 9.417 + +[[highway.capclass_lookup]] + capclass = 58 + capacity = 2150 + free_flow_speed = 18 + critical_speed = 9.417 + +[[highway.capclass_lookup]] + capclass = 59 + capacity = 0 + free_flow_speed = 0 + critical_speed = 3.0 + +[[highway.capclass_lookup]] + capclass = 60 + capacity = 0 + free_flow_speed = 0 + critical_speed = 3.0 + +[[highway.capclass_lookup]] + capclass = 61 + capacity = 0 + free_flow_speed = 0 + critical_speed = 23.543 + +[[highway.capclass_lookup]] + capclass = 62 + capacity = 0 + free_flow_speed = 0 + critical_speed = 30.607 + +[[highway.capclass_lookup]] + capclass = 63 + capacity = 0 + free_flow_speed = 0 + critical_speed = 16.48 + +[[highway.capclass_lookup]] + capclass = 64 + capacity = 0 + free_flow_speed = 0 + critical_speed = 11.772 + +[[highway.capclass_lookup]] + capclass = 65 + capacity = 0 + free_flow_speed = 0 + critical_speed = 16.48 + +[[highway.capclass_lookup]] + capclass = 66 + capacity = 0 + free_flow_speed = 0 + critical_speed = 47.087 + +[[highway.capclass_lookup]] + capclass = 67 + capacity = 0 + free_flow_speed = 0 + critical_speed = 14.126 + +[[highway.capclass_lookup]] + capclass = 68 + capacity = 0 + free_flow_speed = 0 + critical_speed = 14.126 + +[[highway.capclass_lookup]] + capclass = 69 + capacity = 0 + free_flow_speed = 0 + critical_speed = 21.189 + +[[highway.capclass_lookup]] + capclass = 70 + capacity = 0 + free_flow_speed = 0 + critical_speed = 11.772 + +[[highway.capclass_lookup]] + capclass = 71 + capacity = 0 + free_flow_speed = 0 + critical_speed = 14.126 + +[[highway.capclass_lookup]] + capclass = 72 + capacity = 0 + free_flow_speed = 0 + critical_speed = 11.772 + +[[highway.capclass_lookup]] + capclass = 73 + capacity = 0 + free_flow_speed = 0 + critical_speed = 11.772 + +[[highway.capclass_lookup]] + capclass = 74 + capacity = 0 + free_flow_speed = 0 + critical_speed = 3.0 + +[[highway.capclass_lookup]] + capclass = 75 + capacity = 0 + free_flow_speed = 0 + critical_speed = 3.0 + +[[highway.capclass_lookup]] + capclass = 76 + capacity = 0 + free_flow_speed = 0 + critical_speed = 23.543 + +[[highway.capclass_lookup]] + capclass = 77 + capacity = 0 + free_flow_speed = 0 + critical_speed = 30.607 + +[[highway.capclass_lookup]] + capclass = 78 + capacity = 0 + free_flow_speed = 0 + critical_speed = 16.48 + +[[highway.capclass_lookup]] + capclass = 79 + capacity = 0 + free_flow_speed = 0 + critical_speed = 14.126 + +[[highway.capclass_lookup]] + capclass = 80 + capacity = 0 + free_flow_speed = 0 + critical_speed = 16.48 + +[[highway.capclass_lookup]] + capclass = 81 + capacity = 0 + free_flow_speed = 0 + critical_speed = 47.087 + +[[highway.capclass_lookup]] + capclass = 82 + capacity = 0 + free_flow_speed = 0 + critical_speed = 16.48 + +[[highway.capclass_lookup]] + capclass = 83 + capacity = 0 + free_flow_speed = 0 + critical_speed = 14.126 + +[[highway.capclass_lookup]] + capclass = 84 + capacity = 0 + free_flow_speed = 0 + critical_speed = 23.543 + +[[highway.capclass_lookup]] + capclass = 85 + capacity = 0 + free_flow_speed = 0 + critical_speed = 16.48 + +[[highway.capclass_lookup]] + capclass = 86 + capacity = 0 + free_flow_speed = 0 + critical_speed = 16.48 + +[[highway.capclass_lookup]] + capclass = 87 + capacity = 0 + free_flow_speed = 0 + critical_speed = 14.126 + +[[highway.capclass_lookup]] + capclass = 88 + capacity = 0 + free_flow_speed = 0 + critical_speed = 14.126 + +[[highway.capclass_lookup]] + capclass = 89 + capacity = 0 + free_flow_speed = 0 + critical_speed = 3.0 + +[[highway.capclass_lookup]] + capclass = 90 + capacity = 0 + free_flow_speed = 0 + critical_speed = 3.0 + +[[highway.capclass_lookup]] + capclass = 99 + capacity = 0 + free_flow_speed = 10 + critical_speed = 3.0 + +[[highway.capclass_lookup]] + capclass = 109 + capacity = 0 + free_flow_speed = 10 + critical_speed = 3.0 + +[[highway.capclass_lookup]] + capclass = 119 + capacity = 0 + free_flow_speed = 10 + critical_speed = 3.0 + +[[highway.capclass_lookup]] + capclass = 129 + capacity = 0 + free_flow_speed = 10 + critical_speed = 3.0 + +[[highway.capclass_lookup]] + capclass = 139 + capacity = 0 + free_flow_speed = 10 + critical_speed = 3.0 + +[[highway.capclass_lookup]] + capclass = 149 + capacity = 0 + free_flow_speed = 10 + critical_speed = 3.0 + +[[transit.modes]] + mode_id = "w" + name = "walk" + type = "WALK" + assign_type = "AUX_TRANSIT" + speed_miles_per_hour = 3.0 +[[transit.modes]] + mode_id = "a" + name = "access" + type = "ACCESS" + assign_type = "AUX_TRANSIT" + speed_miles_per_hour = 3.0 +[[transit.modes]] + mode_id = "e" + name = "egress" + type = "EGRESS" + assign_type = "AUX_TRANSIT" + speed_miles_per_hour = 3.0 +[[transit.modes]] + mode_id = "b" + name = "local_bus" + type = "LOCAL" + assign_type = "TRANSIT" + in_vehicle_perception_factor = 1.0 +[[transit.modes]] + mode_id = "x" + name = "exp_bus" + type = "PREMIUM" + assign_type = "TRANSIT" + in_vehicle_perception_factor = 1.0 +[[transit.modes]] + mode_id = "f" + name = "ferry" + type = "PREMIUM" + assign_type = "TRANSIT" + in_vehicle_perception_factor = 1.0 +[[transit.modes]] + mode_id = "l" + name = "light_rail" + type = "PREMIUM" + assign_type = "TRANSIT" + in_vehicle_perception_factor = 1.0 +[[transit.modes]] + mode_id = "h" + name = "heavy_rail" + type = "PREMIUM" + assign_type = "TRANSIT" + in_vehicle_perception_factor = 1.0 +[[transit.modes]] + mode_id = "r" + name = "comm_rail" + type = "PREMIUM" + assign_type = "TRANSIT" + in_vehicle_perception_factor = 1.0 + +[[transit.vehicles]] + vehicle_id = 12 + mode = "b" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 14 + mode = "b" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 13 + mode = "b" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 16 + mode = "b" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 17 + mode = "b" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 20 + mode = "b" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 21 + mode = "b" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 24 + mode = "b" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 28 + mode = "b" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 30 + mode = "b" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 38 + mode = "b" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 42 + mode = "b" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 44 + mode = "b" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 46 + mode = "b" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 49 + mode = "b" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 52 + mode = "b" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 56 + mode = "b" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 60 + mode = "b" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 63 + mode = "b" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 66 + mode = "b" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 68 + mode = "b" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 70 + mode = "b" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 71 + mode = "b" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 80 + mode = "x" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 81 + mode = "x" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 84 + mode = "x" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 86 + mode = "x" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 87 + mode = "x" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 90 + mode = "x" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 91 + mode = "x" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 92 + mode = "x" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 94 + mode = "x" + name = "" + auto_equivalent = 2.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 101 + mode = "f" + name = "" + auto_equivalent = 0.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 103 + mode = "f" + name = "" + auto_equivalent = 0.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 110 + mode = "l" + name = "" + auto_equivalent = 0.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 111 + mode = "l" + name = "" + auto_equivalent = 0.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 120 + mode = "h" + name = "" + auto_equivalent = 0.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 130 + mode = "r" + name = "" + auto_equivalent = 0.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 131 + mode = "r" + name = "" + auto_equivalent = 0.0 + seated_capacity = 1 + total_capacity = 2 +[[transit.vehicles]] + vehicle_id = 133 + mode = "r" + name = "" + auto_equivalent = 0.0 + seated_capacity = 1 + total_capacity = 2 \ No newline at end of file diff --git a/examples/scenario_config.toml b/examples/scenario_config.toml new file mode 100644 index 00000000..605d64fd --- /dev/null +++ b/examples/scenario_config.toml @@ -0,0 +1,35 @@ +#################################### +# SCENARIO CONFIGURATION # +#################################### + +[scenario] + year = 2015 + verify = false + maz_landuse_file = "inputs\\landuse\\maz_data.csv" + +[run] + start_component = "" + initial_components = [ + #"create_tod_scenarios", + #"active_modes", + #"air_passenger", + "prepare_network_highway", + "highway", + "highway_maz_skim", + #"prepare_network_transit", + #"transit_assign", + #"transit_skim" + ] + global_iteration_components = [ + #"household", + #"internal_external", + #"truck", + #"highway_maz_assign", + #"highway", + #"prepare_network_transit", + #"transit_assign", + #"transit_skim" + ] + final_components = [] + start_iteration = 0 + end_iteration = 1 diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 00000000..0e54f2ab --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,86 @@ +site_name: "Travel Model Two Python Package: tm2py" +site_url: https://bayareametro.github.io/tm2py +repo_url: https://github.com/bayareametro/tm2py +edit_uri: edit/main/docs + +theme: + name: material + features: + - navigation.tabs + palette: + - media: "(prefers-color-scheme: light)" + scheme: default + primary: blue + accent: grey + toggle: + icon: material/toggle-switch-off-outline + name: Switch to dark mode + - media: "(prefers-color-scheme: dark)" + scheme: slate + primary: blue + accent: grey + toggle: + icon: material/toggle-switch + name: Switch to light mode + +plugins: + - autorefs + - awesome-pages + - mkdocstrings: + default_handler: python + enable_inventory: true + handlers: + python: + setup_commands: + - import sys + - from unittest.mock import MagicMock as mock + - sys.path.append('../') + - sys.modules["inro"] = mock() + - sys.modules["inro.emme"] = mock() + - sys.modules["inro.emme.database"] = mock() + - sys.modules["inro.emme.database.emmebank"] = mock() + - sys.modules["inro.emme.database.scenario"] = mock() + - sys.modules["inro.emme.database.matrix"] = mock() + - sys.modules["inro.emme.network"] = mock() + - sys.modules["inro.emme.network.node"] = mock() + - sys.modules["inro.emme.desktop"] = mock() + - sys.modules["inro.emme.desktop.app"] = mock() + - sys.modules["inro.modeller"] = mock() + selection: + new_path_syntax: true + rendering: + show_root_heading: true + show_source: true + heading_level: 3 + custom_templates: templates + watch: + - tm2py + - search + +extra_javascript: + - https://unpkg.com/mermaid@8.5.0/dist/mermaid.min.js + +extra_css: + - https://use.fontawesome.com/releases/v5.13.0/css/all.css + +markdown_extensions: + - admonition + - codehilite: + linenums: true + - fontawesome_markdown + - meta + - pymdownx.inlinehilite + - pymdownx.tasklist: + custom_checkbox: true + - pymdownx.tabbed + - pymdownx.superfences: + custom_fences: + - name: mermaid + class: mermaid + format: !!python/name:pymdownx.superfences.fence_div_format + - pymdownx.smartsymbols + - smarty + - toc: + # insert a blank space before the character + permalink: " ¶" + \ No newline at end of file diff --git a/pre-commit-config.yaml b/pre-commit-config.yaml index ed28d5b0..eaaf0ec8 100644 --- a/pre-commit-config.yaml +++ b/pre-commit-config.yaml @@ -5,13 +5,20 @@ repos: - id: black language_version: python3.7 - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v2.4.0 + rev: v3.4.0 hooks: - id: flake8 types: - python - id: trailing-whitespace + args: [--markdown-linebreak-ext=md] - id: end-of-file-fixer - id: check-yaml - id: check-added-large-files - id: check-json + - id: requirements-txt-fixer + - id: mixed-line-ending + - repo: https://github.com/igorshubovych/markdownlint-cli + rev: v0.27.1 + hooks: + - id: markdownlint \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 9d25121a..f755789b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,14 +3,27 @@ geographiclib geopandas > 0.8.0 fiona folium +fontawesome_markdown +jinja2 < 3.1.0 jsonschema jupyter lark-parser +mkdocs +mkdocs-autorefs +mkdocs-awesome-pages-plugin +mkdocs-macros-plugin +mkdocs-material +mkdocstrings +openmatrix osmnx >= 0.12 notebook numpy pandas > 1.0 +pydantic pyproj > 2.2.0 pyyaml +pywin32==224 ; sys_platform == 'win32' rtree shapely +tables +toml diff --git a/setup.py b/setup.py index be37de81..61116b0c 100644 --- a/setup.py +++ b/setup.py @@ -1,3 +1,4 @@ +import os from setuptools import setup version = "0.0.1" @@ -24,10 +25,10 @@ dev_requirements = f.readlines() install_requires_dev = [r.strip() for r in dev_requirements] -# While version is in active development, install both development and base requirements. -major_version_number = int(version.split('.')[0]) +# While version is in active development, install both development and base requirements. +major_version_number = int(version.split(".")[0]) if major_version_number < 1: - install_requires = install_requires + install_requires_dev + install_requires = install_requires + install_requires_dev setup( name="tm2py", @@ -41,4 +42,5 @@ packages=["tm2py"], include_package_data=True, install_requires=install_requires, + scripts=["bin/tm2py", "bin/get_test_data"], ) diff --git a/tests/test_config.py b/tests/test_config.py new file mode 100644 index 00000000..3be9d088 --- /dev/null +++ b/tests/test_config.py @@ -0,0 +1,46 @@ +import os +import sys +from unittest.mock import MagicMock + +import pytest + +EXAMPLE_DIR = os.path.join( + os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "examples" +) +TEST_CONFIG = os.path.join(EXAMPLE_DIR, "scenario_config.toml") +MODEL_CONFIG = os.path.join(EXAMPLE_DIR, "model_config.toml") + + +def test_config_read(): + """Configuration should load parameters to the correct namespaces.""" + # If (and only if) Emme is not installed, replace inro libraries with MagicMock + try: + import inro.emme.database.emmebank + except ModuleNotFoundError: + sys.modules["inro.emme.database.emmebank"] = MagicMock() + sys.modules["inro.emme.network"] = MagicMock() + sys.modules["inro.emme.database.scenario"] = MagicMock() + sys.modules["inro.emme.database.matrix"] = MagicMock() + sys.modules["inro.emme.network.node"] = MagicMock() + sys.modules["inro.emme.desktop.app"] = MagicMock() + sys.modules["inro"] = MagicMock() + sys.modules["inro.modeller"] = MagicMock() + from tm2py.config import Configuration + + my_config = Configuration.load_toml([TEST_CONFIG, MODEL_CONFIG]) + + assert my_config.run.start_iteration == 0 + assert my_config.run.end_iteration == 1 + assert my_config.scenario.year == 2015 + assert my_config.time_periods[1].name == "am" + assert my_config.highway.maz_to_maz.operating_cost_per_mile == 18.93 + assert len(my_config.time_periods) == 5 + assert my_config.highway.classes[0].description == "drive alone" + + +@pytest.mark.xfail +def test_config_read_badfile(): + """Should have good behavior when file isn't there.""" + from tm2py.config import Configuration + + Configuration.load_toml("this_is_not_a_valid_file.toml") diff --git a/tests/test_docs.py b/tests/test_docs.py new file mode 100644 index 00000000..e36eb0ad --- /dev/null +++ b/tests/test_docs.py @@ -0,0 +1,23 @@ +import pytest + + +def test_docs_build(): + """ + Test that the documentation build is successful. + """ + import subprocess + import os + + # Get the path to the base directory + base_dir = os.path.join(os.path.dirname(__file__), "..") + assert os.path.exists("mkdocs.yml") + + # Build the docs + try: + subprocess.run(["mkdocs", "build"], check=True, cwd=base_dir,capture_output=True) + except subprocess.CalledProcessError as e: + msg = e.stderr.decode('utf-8') + pytest.fail(f"Documentation Failed to Build.\n {msg}") + + # Check that the docs were built successfully + assert os.path.exists(os.path.join(base_dir, "site", "index.html")) \ No newline at end of file diff --git a/tests/test_tests.py b/tests/test_tests.py index 56e31af2..2018140c 100644 --- a/tests/test_tests.py +++ b/tests/test_tests.py @@ -1,14 +1,12 @@ import pytest + @pytest.mark.skipci def test_skipci(): - """Shouldn't be run on CI server. - """ + """Shouldn't be run on CI server.""" print("If this is a CI server, the marker isn't working!!!") def test_testing(): - """Tests that tests are run. - """ + """Tests that tests are run.""" print("Tests are being run!") - import tm2py diff --git a/tests/test_tools.py b/tests/test_tools.py new file mode 100644 index 00000000..f4c8ec8d --- /dev/null +++ b/tests/test_tools.py @@ -0,0 +1,48 @@ +from unittest.mock import MagicMock +import sys +import os +import pytest + +_EXAMPLE_URL = ( + r"https://mtcdrive.box.com/shared/static/3entr016e9teq2wt46x1os3fjqylfoge.zip" +) + + +def test_download_unzip(): + # If (and only if) Emme is not installed, replace INRO libraries with MagicMock + try: + import inro.emme.database.emmebank + except ModuleNotFoundError: + sys.modules["inro.emme.database.emmebank"] = MagicMock() + sys.modules["inro.emme.network"] = MagicMock() + sys.modules["inro.emme.database.scenario"] = MagicMock() + sys.modules["inro.emme.database.matrix"] = MagicMock() + sys.modules["inro.emme.network.node"] = MagicMock() + sys.modules["inro.emme.desktop.app"] = MagicMock() + sys.modules["inro"] = MagicMock() + sys.modules["inro.modeller"] = MagicMock() + + from tm2py.tools import _download, _unzip + + import tempfile + + with tempfile.TemporaryDirectory() as temp_dir: + temp_file = os.path.join(temp_dir, "test_download.zip") + unzip_directory = os.path.join(temp_dir, "test_download") + _download(_EXAMPLE_URL, temp_file) + assert os.path.getsize(temp_file) > 0, "download failed" + + _unzip(temp_file, unzip_directory) + assert os.path.exists(unzip_directory), "unzip failed, no directory" + assert os.path.getsize(unzip_directory) > 0, "unzip failed, empty directory" + files_to_check = [ + os.path.join("inputs", "hwy", "tolls.csv"), + os.path.join("inputs", "nonres", "2035_fromOAK.csv"), + os.path.join("inputs", "landuse", "maz_data.csv"), + os.path.join("emme_project", "mtc_emme.emp"), + os.path.join("emme_project", "Database_highway", "emmebank"), + ] + for file_name in files_to_check: + assert os.path.exists( + os.path.join(unzip_directory, file_name) + ), f"unzip failed, missing {file_name}" diff --git a/tests/test_union_city.py b/tests/test_union_city.py new file mode 100644 index 00000000..5982b59d --- /dev/null +++ b/tests/test_union_city.py @@ -0,0 +1,107 @@ +import os +from unittest.mock import MagicMock +import sys +import pytest + + +_EXAMPLES_DIR = r"examples" + + +def test_example_download(): + # If (and only if) Emme is not installed, replace INRO libraries with MagicMock + try: + import inro.emme.database.emmebank + except ModuleNotFoundError: + sys.modules["inro.emme.database.emmebank"] = MagicMock() + sys.modules["inro.emme.network"] = MagicMock() + sys.modules["inro.emme.database.scenario"] = MagicMock() + sys.modules["inro.emme.database.matrix"] = MagicMock() + sys.modules["inro.emme.network.node"] = MagicMock() + sys.modules["inro.emme.desktop.app"] = MagicMock() + sys.modules["inro"] = MagicMock() + sys.modules["inro.modeller"] = MagicMock() + + import shutil + from tm2py.examples import get_example + + name = "UnionCity" + example_dir = os.path.join(os.getcwd(), _EXAMPLES_DIR) + union_city_root = os.path.join(example_dir, name) + if os.path.exists(union_city_root): + shutil.rmtree(union_city_root) + + get_example( + example_name="UnionCity", example_subdir=_EXAMPLES_DIR, root_dir=os.getcwd() + ) + # default retrieval_url points to Union City example on box + + # check that the root union city folder exists + assert os.path.isdir(os.path.join(example_dir, name)) + # check some expected files exists + files_to_check = [ + os.path.join("inputs", "hwy", "tolls.csv"), + os.path.join("inputs", "nonres", "2035_fromOAK.csv"), + os.path.join("inputs", "landuse", "maz_data.csv"), + os.path.join("emme_project", "mtc_emme.emp"), + os.path.join("emme_project", "Database_highway", "emmebank"), + ] + for file_name in files_to_check: + assert os.path.exists( + os.path.join(example_dir, name, file_name) + ), f"get_example failed, missing {file_name}" + # check zip file was removed + assert not (os.path.exists(os.path.join(example_dir, name, "test_data.zip"))) + + +@pytest.mark.skipci +def test_highway(): + from tm2py.controller import RunController + from tm2py.examples import get_example + import openmatrix as _omx + + union_city_root = os.path.join(os.getcwd(), _EXAMPLES_DIR, "UnionCity") + get_example( + example_name="UnionCity", example_subdir=_EXAMPLES_DIR, root_dir=os.getcwd() + ) + controller = RunController( + [ + os.path.join(_EXAMPLES_DIR, r"scenario_config.toml"), + os.path.join(_EXAMPLES_DIR, r"model_config.toml"), + ], + run_dir=union_city_root + ) + controller.run() + + root = os.path.join(controller.run_dir, r"skim_matrices\highway") + ref_root = os.path.join(controller.run_dir, r"ref_skim_matrices\highway") + open_files = [] + file_names = [name for name in os.listdir(root) if name.endswith(".omx")] + different_skims = [] + try: + for name in file_names: + skims = _omx.open_file(os.path.join(root, name)) + open_files.append(skims) + ref_skims = _omx.open_file(os.path.join(ref_root, name)) + open_files.append(ref_skims) + for key in skims.list_matrices(): + data = skims[key].read() + ref_data = ref_skims[key].read() + if not (data == ref_data).all(): + different_skims.append(key) + finally: + for f in open_files: + f.close() + assert ( + len(different_skims) == 0 + ), f"there are {len(different_skims)} different skims: {','.join(different_skims)}" + + count_different_lines = 0 + with open(os.path.join(root, "HWYSKIM_MAZMAZ_DA.csv")) as data: + with open(os.path.join(ref_root, "HWYSKIM_MAZMAZ_DA.csv")) as ref_data: + for line in data: + ref_line = next(ref_data) + if ref_line != line: + count_different_lines += 1 + assert ( + count_different_lines == 0 + ), f"HWYSKIM_MAZMAZ_DA.csv differs on {count_different_lines} lines" diff --git a/tm2py/__init__.py b/tm2py/__init__.py index fa3b4604..ba9a9f87 100644 --- a/tm2py/__init__.py +++ b/tm2py/__init__.py @@ -1,2 +1,31 @@ -from ._api import * +"""Base of tm2py module""" from ._version import __version__ + +from .config import ( + Configuration, + HouseholdConfig, + RunConfig, + ScenarioConfig, + TimePeriodConfig, +) +from .logger import Logger, LogStartEnd +from .controller import RunController +from .components.component import Component +from .examples import get_example + +__all__ = [ + # component + "Component", + # config + "Configuration", + "get_example", + "HouseholdConfig", + "RunConfig", + "ScenarioConfig", + "TimePeriodConfig", + # controller + "RunController", + # logger + "Logger", + "LogStartEnd", +] diff --git a/tm2py/_api.py b/tm2py/_api.py deleted file mode 100644 index bc5e6a52..00000000 --- a/tm2py/_api.py +++ /dev/null @@ -1,3 +0,0 @@ -"Exposes api in direct package namespace" - -from .template import TemplateClass \ No newline at end of file diff --git a/tm2py/_version.py b/tm2py/_version.py index b3c06d48..b408052a 100644 --- a/tm2py/_version.py +++ b/tm2py/_version.py @@ -1 +1,3 @@ -__version__ = "0.0.1" \ No newline at end of file +"""Version info""" + +__version__ = "0.0.1" diff --git a/tm2py/components/__init__.py b/tm2py/components/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tm2py/components/component.py b/tm2py/components/component.py new file mode 100644 index 00000000..d4543e87 --- /dev/null +++ b/tm2py/components/component.py @@ -0,0 +1,112 @@ +"""Root component ABC +""" +from __future__ import annotations +import os +from abc import ABC, abstractmethod + +from typing import TYPE_CHECKING, List + +from tm2py.emme.manager import EmmeScenario + +if TYPE_CHECKING: + from tm2py.controller import RunController + + +class Component(ABC): + """Base component class for tm2py top-level inheritance. + + Example: + :: + class MyComponent(Component): + + def __init__(self, controller): + super().__init__(controller) + self._parameter = None + + def run(self): + self._step1() + self._step2() + + def _step1(self): + pass + + def _step2(self): + pass + """ + + def __init__(self, controller: RunController): + self._controller = controller + self._trace = None + + @property + def controller(self): + """Parent controller""" + return self._controller + + def get_abs_path(self, rel_path: str): + """Get the absolute path from the root run directory given a relative path.""" + return os.path.join(self.controller.run_dir, rel_path) + + def get_emme_scenario(self, emmebank_path: str, time_period: str) -> EmmeScenario: + """Get the Emme scenario object from the Emmebank at emmebank_path for the time_period ID. + + Args: + emmebank_path: valid Emmebank path, absolute or relative to root run directory + time_period: valid time_period ID + + Returns + Emme Scenario object (see Emme API Reference) + """ + if not os.path.isabs(emmebank_path): + emmebank_path = self.get_abs_path(emmebank_path) + emmebank = self.controller.emme_manager.emmebank(emmebank_path) + scenario_id = {tp.name: tp.emme_scenario_id for tp in self.config.time_periods}[ + time_period + ] + return emmebank.scenario(scenario_id) + + @property + def config(self): + """Configuration settings loaded from config files""" + return self.controller.config + + @property + def top_sheet(self): + """docstring placeholder for top sheet""" + return self.controller.top_sheet + + @property + def logger(self): + """docstring placeholder for logger""" + return self.controller.logger + + @property + def trace(self): + """docstring placeholder for trace""" + return self._trace + + def validate_inputs(self): + """Validate inputs are correct at model initiation, fail fast if not""" + + @abstractmethod + def run(self): + """Run model component""" + + def report_progress(self): + """Write progress to log file""" + + def test_component(self): + """Run stand-alone component test""" + + def write_top_sheet(self): + """Write key outputs to the model top sheet""" + + def verify(self): + """Verify component outputs / results""" + + def time_period_names(self) -> List[str]: + """Return input time_period name or names and return list of time_period names. + + Returns: list of string names of time periods + """ + return [time.name for time in self.config.time_periods] diff --git a/tm2py/components/demand/__init__.py b/tm2py/components/demand/__init__.py new file mode 100644 index 00000000..13370d5b --- /dev/null +++ b/tm2py/components/demand/__init__.py @@ -0,0 +1 @@ +"""Demand components module""" diff --git a/tm2py/components/demand/air_passenger.py b/tm2py/components/demand/air_passenger.py new file mode 100644 index 00000000..e69de29b diff --git a/tm2py/components/demand/commercial.py b/tm2py/components/demand/commercial.py new file mode 100644 index 00000000..e69de29b diff --git a/tm2py/components/demand/demand.py b/tm2py/components/demand/demand.py new file mode 100644 index 00000000..24da7202 --- /dev/null +++ b/tm2py/components/demand/demand.py @@ -0,0 +1,144 @@ +"""Demand loading from OMX to Emme database""" + +from __future__ import annotations +from abc import ABC +from typing import Dict, Union, List, TYPE_CHECKING +import numpy as np + +from tm2py.components.component import Component +from tm2py.emme.matrix import OMXManager + +if TYPE_CHECKING: + from tm2py.controller import RunController + + +class PrepareDemand(Component, ABC): + """Abstract base class to import and average demand.""" + + def __init__(self, controller: RunController): + super().__init__(controller) + self._emmebank = None + + def _read(self, path, name, num_zones, factor=None): + with OMXManager(path, "r") as omx_file: + demand = omx_file.read(name) + if factor is not None: + demand = factor * demand + demand = self._redim_demand(demand, num_zones) + return demand + + @staticmethod + def _redim_demand(demand, num_zones): + _shape = demand.shape + if _shape != (num_zones, num_zones): + demand = np.pad( + demand, ((0, num_zones - _shape[0]), (0, num_zones - _shape[1])) + ) + return demand + + # Disable too many arguments recommendation + # pylint: disable=R0913 + def _save_demand(self, name, demand, scenario, description="", apply_msa=False): + matrix = self._emmebank.matrix(f'mf"{name}"') + msa_iteration = self.controller.iteration + if not apply_msa or msa_iteration <= 1: + if not matrix: + ident = self._emmebank.available_matrix_identifier("FULL") + matrix = self._emmebank.create_matrix(ident) + matrix.name = name + matrix.description = description + else: + if not matrix: + raise Exception(f"error averaging demand: matrix {name} does not exist") + prev_demand = matrix.get_numpy_data(scenario.id) + demand = prev_demand + (1.0 / msa_iteration) * (demand - prev_demand) + + matrix.set_numpy_data(demand, scenario.id) + + def _create_zero_matrix(self): + zero_matrix = self._emmebank.matrix('ms"zero"') + if zero_matrix is None: + ident = self._emmebank.available_matrix_identifier("SCALAR") + zero_matrix = self._emmebank.create_matrix(ident) + zero_matrix.name = "zero" + zero_matrix.description = "zero demand matrix" + zero_matrix.data = 0 + + +class PrepareHighwayDemand(PrepareDemand): + """Import and average highway demand. + + Demand is imported from OMX files based on reference file paths and OMX + matrix names in highway assignment config (highway.classes). + The demand is average using MSA with the current demand matrices + (in the Emmebank) if the controller.iteration > 1. + + Args: + controller: parent RunController object + """ + + def __init__(self, controller: RunController): + super().__init__(controller) + self._emmebank_path = None + + # @LogStartEnd("prepare highway demand") + def run(self): + """Open combined demand OMX files from demand models and prepare for assignment.""" + self._emmebank_path = self.get_abs_path(self.config.emme.highway_database_path) + self._emmebank = self.controller.emme_manager.emmebank(self._emmebank_path) + self._create_zero_matrix() + for time in self.time_period_names(): + for klass in self.config.highway.classes: + self._prepare_demand(klass.name, klass.description, klass.demand, time) + + def _prepare_demand( + self, + name: str, + description: str, + demand_config: List[Dict[str, Union[str, float]]], + time_period: str, + ): + """Load demand from OMX files and save to Emme matrix for highway assignment. + + Average with previous demand (MSA) if the current iteration > 1 + + Args: + name (str): the name of the highway assignment class + description (str): the description for the highway assignment class + demand_config (dict): the list of file cross-reference(s) for the demand to be loaded + {"source": , + "name": , + "factor": } + time_period (str): the time time_period ID (name) + """ + scenario = self.get_emme_scenario(self._emmebank_path, time_period) + num_zones = len(scenario.zone_numbers) + demand = self._read_demand(demand_config[0], time_period, num_zones) + for file_config in demand_config[1:]: + demand = demand + self._read_demand(file_config, time_period, num_zones) + demand_name = f"{time_period}_{name}" + description = f"{time_period} {description} demand" + self._save_demand(demand_name, demand, scenario, description, apply_msa=True) + + def _read_demand(self, file_config, time_period, num_zones): + # Load demand from cross-referenced source file, + # the named demand model component under the key highway_demand_file + source = file_config["source"] + name = file_config["name"].format(period=time_period.upper()) + factor = file_config.get("factor") + path = self.get_abs_path(self.config[source].highway_demand_file) + return self._read(path.format(period=time_period), name, num_zones, factor) + + +# class PrepareTransitDemand(PrepareDemand): +# """Import transit demand.""" +# +# def run(self, time_period: Union[Collection[str], str] = None): +# """Open combined demand OMX files from demand models and prepare for assignment. +# +# Args: +# time_period: list of str names of time_periods, or name of a single time_period +# """ +# emmebank_path = self.get_abs_path(self.config.emme.transit_database_path) +# self._emmebank = self.controller.emme_manager.emmebank(emmebank_path) +# self._create_zero_matrix() diff --git a/tm2py/components/demand/household.py b/tm2py/components/demand/household.py new file mode 100644 index 00000000..e69de29b diff --git a/tm2py/components/demand/internal_external.py b/tm2py/components/demand/internal_external.py new file mode 100644 index 00000000..e69de29b diff --git a/tm2py/components/demand/visitor.py b/tm2py/components/demand/visitor.py new file mode 100644 index 00000000..e69de29b diff --git a/tm2py/components/network/__init__.py b/tm2py/components/network/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tm2py/components/network/highway/__init__.py b/tm2py/components/network/highway/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tm2py/components/network/highway/highway_assign.py b/tm2py/components/network/highway/highway_assign.py new file mode 100644 index 00000000..b4cbeb8a --- /dev/null +++ b/tm2py/components/network/highway/highway_assign.py @@ -0,0 +1,450 @@ +"""Highway assignment and skim component. + +Performs equilibrium traffic assignment and generates resulting skims. + +The assignmend is configured using the "highway" table in the source config. +See the config documentation for details. The traffic assignment runs according +to the list of assignment classes under highway.classes. + +Other relevant parameters from the config are + emme.num_processors: number of processors as integer or "MAX" or "MAX-N" + time_periods[].emme_scenario_id: Emme scenario number to use for each period + time_periods[].highway_capacity_factor + +The Emme network must have the following attributes available: + Link: + - "length" in feet + - "vdf", volume delay function (volume delay functions must also be setup) + - "@useclass", vehicle-class restrictions classification, auto-only, HOV only + - "@free_flow_time", the free flow time (in minutes) + - "@tollXX_YY", the toll for period XX and class subgroup (see truck + class) named YY, used together with @tollbooth to generate @bridgetoll_YY + and @valuetoll_YY + - "@maz_flow", the background traffic MAZ-to-MAZ SP assigned flow from highway_maz, + if controller.iteration > 0 + - modes: must be set on links and match the specified mode codes in + the traffic config + + Network results: + - @flow_XX: link PCE flows per class, where XX is the class name in the config + - timau: auto travel time + - volau: total assigned flow in PCE + + Notes: + - Output matrices are in miles, minutes, and cents (2010 dollars) and are stored/ + as real values; + - Intrazonal distance/time is one half the distance/time to the nearest neighbor; + - Intrazonal bridge and value tolls are assumed to be zero + +""" + +from __future__ import annotations +from contextlib import contextmanager as _context +import os +from typing import Dict, Union, List, TYPE_CHECKING + +import numpy as np + +from tm2py.components.component import Component +from tm2py.components.demand.demand import PrepareHighwayDemand +from tm2py.emme.manager import EmmeScenario +from tm2py.emme.matrix import MatrixCache, OMXManager +from tm2py.emme.network import NetworkCalculator +from tm2py.logger import LogStartEnd +from tm2py import tools + +if TYPE_CHECKING: + from tm2py.controller import RunController + + EmmeHighwayAnalysisSpec = Dict[ + str, + Union[ + str, + bool, + None, + Dict[ + str, + Union[str, bool, None, Dict[str, Union[str, bool, None]]], + ], + ], + ] + EmmeHighwayClassSpec = Dict[ + str, + Union[ + str, + Dict[str, Union[str, float, Dict[str, str]]], + List[EmmeHighwayAnalysisSpec], + ], + ] + EmmeTrafficAssignmentSpec = Dict[ + str, + Union[str, Union[str, bool, None, float, List[EmmeHighwayClassSpec]]], + ] + + +class HighwayAssignment(Component): + """Highway assignment and skims. + Args: + controller: parent RunController object + """ + + def __init__(self, controller: RunController): + super().__init__(controller) + self._num_processors = tools.parse_num_processors( + self.config.emme.num_processors + ) + self._matrix_cache = None + self._skim_matrices = [] + + @LogStartEnd("Highway assignment and skims", level="STATUS") + def run(self): + """Run highway assignment""" + demand = PrepareHighwayDemand(self.controller) + demand.run() + for time in self.time_period_names(): + scenario = self.get_emme_scenario( + self.config.emme.highway_database_path, time + ) + with self._setup(scenario, time): + iteration = self.controller.iteration + assign_classes = [ + AssignmentClass(c, time, iteration) + for c in self.config.highway.classes + ] + if iteration > 0: + self._copy_maz_flow(scenario) + else: + self._reset_background_traffic(scenario) + self._create_skim_matrices(scenario, assign_classes) + assign_spec = self._get_assignment_spec(assign_classes) + # self.logger.log_dict(assign_spec, level="DEBUG") + with self.logger.log_start_end( + "Run SOLA assignment with path analyses", level="INFO" + ): + assign = self.controller.emme_manager.tool( + "inro.emme.traffic_assignment.sola_traffic_assignment" + ) + assign(assign_spec, scenario, chart_log_interval=1) + + # Subtract non-time costs from gen cost to get the raw travel time + for emme_class_spec in assign_spec["classes"]: + self._calc_time_skim(emme_class_spec) + # Set intra-zonal for time and dist to be 1/2 nearest neighbour + for class_config in self.config.highway.classes: + self._set_intrazonal_values( + time, + class_config["name"], + class_config["skims"], + ) + self._export_skims(scenario, time) + + @_context + def _setup(self, scenario: EmmeScenario, time_period: str): + """Setup and teardown for Emme Matrix cache and list of skim matrices + + Args: + scenario: Emme scenario object + time_period: time period name + """ + self._matrix_cache = MatrixCache(scenario) + self._skim_matrices = [] + msg = f"Highway assignment for period {time_period}" + with self.logger.log_start_end(msg, level="STATUS"): + try: + yield + finally: + self._matrix_cache.clear() + self._matrix_cache = None + self._skim_matrices = [] + + def _copy_maz_flow(self, scenario: EmmeScenario): + """Copy maz_flow from MAZ demand assignment to ul1 for background traffic. + + Args: + scenario: Emme scenario object""" + self.logger.log_time( + "Copy @maz_flow to ul1 for background traffic", indent=True, level="DETAIL" + ) + net_calc = NetworkCalculator(scenario) + net_calc("ul1", "@maz_flow") + + def _reset_background_traffic(self, scenario: EmmeScenario): + """Set ul1 for background traffic to 0 (no maz-maz flow) + + Args: + scenario: Emme scenario object""" + self.logger.log_time( + "Set ul1 to 0 for background traffic", indent=True, level="DETAIL" + ) + net_calc = NetworkCalculator(scenario) + net_calc("ul1", "0") + + def _create_skim_matrices( + self, scenario: EmmeScenario, assign_classes: List[AssignmentClass] + ): + """Create matrices to store skim results in Emme database. + + Also add the matrices to list of self._skim_matrices. + + Args: + scenario: Emme scenario object + assign_classes: list of AssignmentClass objects + """ + create_matrix = self.controller.emme_manager.tool( + "inro.emme.data.matrix.create_matrix" + ) + + with self.logger.log_start_end("Creating skim matrices", level="DETAIL"): + for klass in assign_classes: + for matrix_name in klass.skim_matrices: + matrix = scenario.emmebank.matrix(f'mf"{matrix_name}"') + if not matrix: + matrix = create_matrix( + "mf", matrix_name, scenario=scenario, overwrite=True + ) + self.logger.log( + f"Create matrix name: {matrix_name}, id: {matrix.id}", + level="DEBUG", + ) + self._skim_matrices.append(matrix) + + def _get_assignment_spec( + self, assign_classes: List[AssignmentClass] + ) -> EmmeTrafficAssignmentSpec: + """Generate template Emme SOLA assignment specification + + Args: + assign_classes: list of AssignmentClass objects + + Returns + Emme specification for SOLA traffic assignment + + """ + relative_gap = self.config.highway.relative_gap + max_iterations = self.config.highway.max_iterations + # NOTE: mazmazvol as background traffic in link.data1 ("ul1") + base_spec = { + "type": "SOLA_TRAFFIC_ASSIGNMENT", + "background_traffic": { + "link_component": "ul1", + "turn_component": None, + "add_transit_vehicles": False, + }, + "classes": [klass.emme_highway_class_spec for klass in assign_classes], + "stopping_criteria": { + "max_iterations": max_iterations, + "best_relative_gap": 0.0, + "relative_gap": relative_gap, + "normalized_gap": 0.0, + }, + "performance_settings": {"number_of_processors": self._num_processors}, + } + return base_spec + + def _calc_time_skim(self, emme_class_spec: EmmeHighwayClassSpec): + """Calculate the real time skim =gen_cost-per_fac*link_costs. + + Args: + emme_class_spec: dictionary of the per-class spec sub-section from the + Emme SOLA assignment spec, classes list + """ + od_travel_times = emme_class_spec["results"]["od_travel_times"][ + "shortest_paths" + ] + if od_travel_times is not None: + # Total link costs is always the first analysis + cost = emme_class_spec["path_analyses"][0]["results"]["od_values"] + factor = emme_class_spec["generalized_cost"]["perception_factor"] + gencost_data = self._matrix_cache.get_data(od_travel_times) + cost_data = self._matrix_cache.get_data(cost) + time_data = gencost_data - (factor * cost_data) + self._matrix_cache.set_data(od_travel_times, time_data) + + def _set_intrazonal_values( + self, time_period: str, class_name: str, skims: List[str] + ): + """Set the intrazonal values to 1/2 nearest neighbour for time and distance skims. + + Args: + time_period: time period name (from config) + class_name: highway class name (from config) + skims: list of requested skims (from config) + """ + for skim_name in skims: + matrix_name = f"mf{time_period}_{class_name}_{skim_name}" + if skim_name in ["time", "distance", "freeflowtime", "hovdist", "tolldist"]: + data = self._matrix_cache.get_data(matrix_name) + # NOTE: sets values for external zones as well + np.fill_diagonal(data, np.inf) + data[np.diag_indices_from(data)] = 0.5 * np.nanmin(data, 1) + self._matrix_cache.set_data(matrix_name, data) + + def _export_skims(self, scenario: EmmeScenario, time_period: str): + """Export skims to OMX files by period. + + Args: + scenario: Emme scenario object + time_period: time period name + """ + # NOTE: skims in separate file by period + omx_file_path = self.get_abs_path( + self.config.highway.output_skim_path.format(period=time_period) + ) + os.makedirs(os.path.dirname(omx_file_path), exist_ok=True) + with OMXManager( + omx_file_path, "w", scenario, matrix_cache=self._matrix_cache + ) as omx_file: + omx_file.write_matrices(self._skim_matrices) + + +class AssignmentClass: + """Highway assignment class, represents data from config and conversion to Emme specs""" + + def __init__(self, class_config, time_period, iteration): + self.class_config = class_config + self.time_period = time_period + self.iteration = iteration + self.name = class_config["name"].lower() + self.skims = class_config.get("skims", []) + + @property + def emme_highway_class_spec(self) -> EmmeHighwayClassSpec: + """Construct and return Emme traffic assignment class specification + + Converted from input config (highway.classes), see Emme Help for + SOLA traffic assignment for specification details. + Adds time_period as part of demand and skim matrix names. + + Returns: + A nested dictionary corresponding to the expected Emme traffic + class specification used in the SOLA assignment. + """ + if self.iteration == 0: + demand_matrix = 'ms"zero"' + else: + demand_matrix = f'mf"{self.time_period}_{self.name}"' + class_spec = { + "mode": self.class_config.mode_code, + "demand": demand_matrix, + "generalized_cost": { + "link_costs": f"@cost_{self.name.lower()}", # cost in $0.01 + # $/hr -> min/$0.01 + "perception_factor": 0.6 / self.class_config.value_of_time, + }, + "results": { + "link_volumes": f"@flow_{self.name.lower()}", + "od_travel_times": { + "shortest_paths": f"mf{self.time_period}_{self.name}_time" + }, + }, + "path_analyses": self.emme_class_analysis, + } + return class_spec + + @property + def emme_class_analysis(self) -> List[EmmeHighwayAnalysisSpec]: + """Construct and return a list of path analyses specs which generate the required skims. + + Returns: + A list of nested dictionaries corresponding to the Emme path analysis + (per-class) specification used in the SOLA assignment. + """ + class_analysis = [] + if "time" in self.skims: + class_analysis.append( + self.emme_analysis_spec( + f"@cost_{self.name}".lower(), + f"mf{self.time_period}_{self.name}_cost", + ) + ) + for skim_type in self.skims: + if skim_type == "time": + continue + if "_" in skim_type: + skim_type, group = skim_type.split("_") + else: + group = "" + matrix_name = f"mf{self.time_period}_{self.name}_{skim_type}{group}" + class_analysis.append( + self.emme_analysis_spec( + self.skim_analysis_link_attribute(skim_type, group), + matrix_name, + ) + ) + return class_analysis + + @property + def skim_matrices(self) -> List[str]: + """Returns: List of skim matrix names for this class.""" + skim_matrices = [] + if "time" in self.skims: + skim_matrices.extend( + [ + f"{self.time_period}_{self.name}_time", + f"{self.time_period}_{self.name}_cost", + ] + ) + for skim_type in self.skims: + if skim_type == "time": + continue + if "_" in skim_type: + skim_type, group = skim_type.split("_") + else: + group = "" + skim_matrices.append(f"{self.time_period}_{self.name}_{skim_type}{group}") + return skim_matrices + + @staticmethod + def emme_analysis_spec(link_attr: str, matrix_name: str) -> EmmeHighwayAnalysisSpec: + """Returns Emme highway class path analysis spec. + + See Emme Help for SOLA assignment for full specification details. + Args: + link_attr: input link attribute for which to sum values along the paths + matrix_name: full matrix name to store the result of the path analysis + + Returns: + The nested dictionary specification which will generate the skim + of link attribute values. + """ + analysis_spec = { + "link_component": link_attr, + "turn_component": None, + "operator": "+", + "selection_threshold": {"lower": None, "upper": None}, + "path_to_od_composition": { + "considered_paths": "ALL", + "multiply_path_proportions_by": { + "analyzed_demand": False, + "path_value": True, + }, + }, + "results": { + "od_values": matrix_name, + "selected_link_volumes": None, + "selected_turn_volumes": None, + }, + } + return analysis_spec + + @staticmethod + def skim_analysis_link_attribute(skim: str, group: str) -> str: + """Return the link attribute name for the specified skim type and group. + + Args: + skim: name of skim requested, one of dist, hovdist, tolldist, freeflowtime, + bridgetoll, or valuetoll + group: subgroup name for the bridgetoll or valuetoll, corresponds to one of + the names from config.highway.tolls.dst_vehicle_group_names + Returns: + A string of the link attribute name used in the analysis. + """ + lookup = { + "dist": "length", # NOTE: length must be in miles + "hovdist": "@hov_length", + "tolldist": "@toll_length", + "freeflowtime": "@free_flow_time", + "bridgetoll": f"@bridgetoll_{group}", + "valuetoll": f"@valuetoll_{group}", + } + return lookup[skim] diff --git a/tm2py/components/network/highway/highway_maz.py b/tm2py/components/network/highway/highway_maz.py new file mode 100644 index 00000000..21437d87 --- /dev/null +++ b/tm2py/components/network/highway/highway_maz.py @@ -0,0 +1,791 @@ +"""Assigns and skims MAZ-to-MAZ demand along shortest generalized cost path. + +MAZ to MAZ demand is read in from separate OMX matrices as defined under +the config table highway.maz_to_maz.demand_county_groups, + +The demand is expected to be short distance (e.g. <0.5 miles), or within the +same TAZ. The demand is grouped into bins of origin -> all destinations, by +distance (straight-line) to furthest destination. This limits the size of the +shortest path calculated to the minimum required. +The bin edges have been predefined after testing as (in miles): + [0.0, 0.9, 1.2, 1.8, 2.5, 5.0, 10.0, max_dist] + +Input: +Emme network with: + Link attributes: + - time attribute, either timau (resulting VDF congested time) + or @free_flow_time + Node attributes: @maz_id, x, y, and #node_county +Demand matrices under highway.maz_to_maz.demand_file, +and can have a placeholder + auto_{period}_MAZ_AUTO_{number}_{period}.omx + +Output: +The resulting MAZ-MAZ flows are saved in link @maz_flow which is +used as background traffic in the equilibrium Highway assignment. +""" + +from __future__ import annotations + +import array as _array +from collections import defaultdict as _defaultdict +from contextlib import contextmanager as _context +from math import sqrt as _sqrt +import os +from typing import Dict, List, Union, BinaryIO, TYPE_CHECKING + +import numpy as np +import pandas as pd + +# from tables import NoSuchNodeError + +from tm2py.components.component import Component +from tm2py.emme.manager import EmmeNode +from tm2py.emme.matrix import OMXManager +from tm2py.emme.network import NetworkCalculator +from tm2py.logger import LogStartEnd +from tm2py.tools import parse_num_processors + +if TYPE_CHECKING: + from tm2py.controller import RunController + +_default_bin_edges = [0.0, 0.9, 1.2, 1.8, 2.5, 5.0, 10.0] +# Using text file format for now, can upgrade to binary format (faster) once +# compatibility with new networks is verified +_USE_BINARY = False +NumpyArray = np.array + + +class AssignMAZSPDemand(Component): + """MAZ-to-MAZ shortest-path highway assignment. + + Calculates shortest path between MAZs with demand in the Emme network + and assigns flow. + """ + + # skip Too many instance attributes recommendation, it is OK as is + # pylint: disable=R0902 + + def __init__(self, controller: RunController): + """MAZ-to-MAZ shortest-path highway assignment. + + Args: + controller: parent Controller object + """ + super().__init__(controller) + self._scenario = None + # bins: performance parameter: crow-fly distance bins + # to limit shortest path calculation by origin to furthest destination + # semi-exposed for performance testing + self._bin_edges = _default_bin_edges + self._debug = False + + # Internal attributes to track data through the sequence of steps + self._eb_dir = None + self._mazs = None + self._demand = None + self._max_dist = 0 + self._network = None + self._root_index = None + self._leaf_index = None + + @LogStartEnd() + def run(self): + """Run MAZ-to-MAZ shortest path assignment.""" + emme_manager = self.controller.emme_manager + emmebank = emme_manager.emmebank( + self.get_abs_path(self.config.emme.highway_database_path) + ) + self._eb_dir = os.path.dirname(emmebank.path) + county_groups = {} + for group in self.config.highway.maz_to_maz.demand_county_groups: + county_groups[group.number] = group.counties + for time in self.time_period_names(): + with self.logger.log_start_end(f"period {time}"): + self._scenario = self.get_emme_scenario(emmebank.path, time) + with self._setup(time): + self._prepare_network() + for i, names in county_groups.items(): + maz_ids = self._get_county_mazs(names) + if len(maz_ids) == 0: + self.logger.log( + f"warning: no mazs for counties {', '.join(names)}" + ) + continue + self._process_demand(time, i, maz_ids) + demand_bins = self._group_demand() + for i, demand_group in enumerate(demand_bins): + self._find_roots_and_leaves(demand_group["demand"]) + self._set_link_cost_maz() + self._run_shortest_path(time, i, demand_group["dist"]) + self._assign_flow(time, i, demand_group["demand"]) + + @_context + def _setup(self, time: str): + """Context setup / teardown, initializes internal attributes. + + Args: + time: name of the time period + """ + self._mazs = None + self._demand = _defaultdict(lambda: []) + self._max_dist = 0 + self._network = None + self._root_index = None + self._leaf_index = None + attributes = [ + ("LINK", "@link_cost", "total cost MAZ-MAZ"), + ("LINK", "@link_cost_maz", "cost MAZ-MAZ, unused MAZs blocked"), + ("NODE", "@maz_root", "Flag for MAZs which are roots"), + ("NODE", "@maz_leaf", "Flag for MAZs which are leaves"), + ] + with self.controller.emme_manager.temp_attributes_and_restore( + self._scenario, attributes + ): + try: + yield + finally: + if not self._debug: + self._mazs = None + self._demand = None + self._network = None + self._root_index = None + self._leaf_index = None + # delete sp path files + for bin_no in range(len(self._bin_edges)): + file_path = os.path.join( + self._eb_dir, f"sp_{time}_{bin_no}.ebp" + ) + if os.path.exists(file_path): + os.remove(file_path) + + def _prepare_network(self): + """Calculate link cost (travel time + bridge tolls + operating cost) and load network. + + Reads Emme network from disk for later node lookups. Optimized to only load + attribute values of interest, additional attributes must be added in + order to be read from disk. + """ + if self._scenario.has_traffic_results: + time_attr = "(@free_flow_time.max.timau)" + else: + time_attr = "@free_flow_time" + self.logger.log(f"Calculating link costs using time {time_attr}", level="DEBUG") + vot = self.config.highway.maz_to_maz.value_of_time + op_cost = self.config.highway.maz_to_maz.operating_cost_per_mile + net_calc = NetworkCalculator(self._scenario) + net_calc("@link_cost", f"{time_attr} + 0.6 / {vot} * (length * {op_cost})") + self._network = self.controller.emme_manager.get_network( + self._scenario, {"NODE": ["@maz_id", "x", "y", "#node_county"], "LINK": []} + ) + self._network.create_attribute("LINK", "temp_flow") + + def _get_county_mazs(self, counties: List[str]) -> List[EmmeNode]: + """Get all MAZ nodes which are located in one of these counties. + + Used the node attribute #node_county to identify the node location. + Name must be an exact match. Catches a mapping of the county names + to nodes so nodes are processed only once. + + Args: + counties: list of county names + + Returns: + List of MAZ nodes (Emme Node) which are in these counties. + """ + network = self._network + # NOTE: every maz must have a valid #node_county + if self._mazs is None: + self._mazs = _defaultdict(lambda: []) + for node in network.nodes(): + if node["@maz_id"]: + self._mazs[node["#node_county"]].append(node) + mazs = [] + for county in counties: + mazs.extend(self._mazs[county]) + return sorted(mazs, key=lambda n: n["@maz_id"]) + + def _process_demand(self, time: str, index: int, maz_ids: List[EmmeNode]): + """Loads the demand from file and groups by origin node. + + Sets the demand to self._demand for later processing, grouping the demand in + a dictionary by origin node (Emme Node object) to list of dictionaries + {"orig": orig_node, "dest": dest_node, "dem": demand, "dist": dist} + + Args: + time: time period name + index: group index of the demand file, used to find the file by name + maz_ids: indexed list of MAZ ID nodes for the county group + (active counties for this demand file) + """ + data = self._read_demand_array(time, index) + origins, destinations = data.nonzero() + for orig, dest in zip(origins, destinations): + # skip intra-maz demand + if orig == dest: + continue + orig_node = maz_ids[orig] + dest_node = maz_ids[dest] + dist = _sqrt( + (dest_node.x - orig_node.x) ** 2 + (dest_node.y - orig_node.y) ** 2 + ) + if dist > self._max_dist: + self._max_dist = dist + self._demand[orig_node].append( + { + "orig": orig_node, + "dest": dest_node, + "dem": data[orig][dest], + "dist": dist, + } + ) + + def _read_demand_array(self, time: str, index: int) -> NumpyArray: + """Load the demand from file with the specified time and index name. + + Args: + time: time period name + index: group index of the demand file, used to find the file by name + """ + file_path_tmplt = self.get_abs_path(self.config.highway.maz_to_maz.demand_file) + omx_file_path = self.get_abs_path( + file_path_tmplt.format(period=time, number=index) + ) + with OMXManager(omx_file_path, "r") as omx_file: + demand_array = omx_file.read("M0") + return demand_array + + def _group_demand( + self, + ) -> List[Dict[str, Union[float, List[Dict[str, Union[float, EmmeNode]]]]]]: + """Process the demand loaded from files and create groups based on the + origin to the furthest destination with demand. + + Returns: + List of dictionaries, containing the demand in the format + {"orig": EmmeNode, "dest": EmmeNode, "dem": float (demand value)} + + """ + # group demand from same origin into distance bins by furthest + # distance destination to limit shortest path search radius + bin_edges = self._bin_edges[:] + if bin_edges[-1] < self._max_dist / 5280.0: + bin_edges.append(self._max_dist / 5280.0) + + demand_groups = [ + {"dist": edge, "demand": []} for i, edge in enumerate(bin_edges[1:]) + ] + for data in self._demand.values(): + max_dist = max(entry["dist"] for entry in data) / 5280.0 + for group in demand_groups: + if max_dist < group["dist"]: + group["demand"].extend(data) + break + for group in demand_groups: + self.logger.log_time( + f"bin dist {group['dist']}, size {len(group['demand'])}", level="DEBUG" + ) + # Filter out groups without any demand + demand_groups = [group for group in demand_groups if group["demand"]] + return demand_groups + + def _find_roots_and_leaves(self, demand: List[Dict[str, Union[float, EmmeNode]]]): + """Label available MAZ root nodes and leaf nodes for the path calculation. + + The MAZ nodes which are found as origins in the demand are "activated" + by setting @maz_root to non-zero, and similarly the leaves have @maz_leaf + set to non-zero. + + Args: + demand: list of dictionaries, containing the demand in the format + {"orig": EmmeNode, "dest": EmmeNode, "dem": float (demand value)} + """ + network = self._network + attrs_to_init = [("NODE", ["@maz_root", "@maz_leaf"]), ("LINK", ["maz_cost"])] + for domain, attrs in attrs_to_init: + for name in attrs: + if name in network.attributes(domain): + network.delete_attribute(domain, name) + network.create_attribute(domain, name) + root_maz_ids = {} + leaf_maz_ids = {} + for data in demand: + o_node, d_node = data["orig"], data["dest"] + root_maz_ids[o_node.number] = o_node["@maz_root"] = o_node["@maz_id"] + leaf_maz_ids[d_node.number] = d_node["@maz_leaf"] = d_node["@maz_id"] + self._root_index = {p: i for i, p in enumerate(sorted(root_maz_ids.keys()))} + self._leaf_index = {q: i for i, q in enumerate(sorted(leaf_maz_ids.keys()))} + self.controller.emme_manager.copy_attr_values( + "NODE", self._network, self._scenario, ["@maz_root", "@maz_leaf"] + ) + + def _set_link_cost_maz(self): + """Set link cost used in the shortest path forbidden using unavailable connectors. + + Copy the pre-calculated cost @link_cost to @link_cost_maz, + setting value to 1e20 on connectors to unused zone leaves / from + unused roots. + """ + # forbid egress from MAZ nodes which are not demand roots / + # access to MAZ nodes which are not demand leafs + net_calc = NetworkCalculator(self._scenario) + net_calc.add_calc("@link_cost_maz", "@link_cost") + net_calc.add_calc("@link_cost_maz", "1e20", "@maz_root=0 and !@maz_id=0") + net_calc.add_calc("@link_cost_maz", "1e20", "@maz_leafj=0 and !@maz_idj=0") + net_calc.run() + + def _run_shortest_path(self, time: str, bin_no: int, max_radius: float): + """Run the shortest path tool to generate paths between the marked nodes. + + Args: + time: time period name + bin_no: bin number (id) for this demand segment + max_radius: max unit coordinate distance to limit search tree + """ + shortest_paths_tool = self.controller.emme_manager.tool( + "inro.emme.network_calculation.shortest_path" + ) + max_radius = max_radius * 5280 + 100 # add some buffer for rounding error + ext = "ebp" if _USE_BINARY else "txt" + file_name = f"sp_{time}_{bin_no}.{ext}" + num_processors = parse_num_processors(self.config.emme.num_processors) + spec = { + "type": "SHORTEST_PATH", + "modes": [self.config.highway.maz_to_maz.mode_code], + "root_nodes": "@maz_root", + "leaf_nodes": "@maz_leaf", + "link_cost": "@link_cost_maz", + "path_constraints": { + "max_radius": max_radius, + "uturn_allowed": False, + "through_leaves": False, + "through_centroids": False, + "exclude_forbidden_turns": False, + }, + "results": { + "skim_output": { + "file": "", + "format": "TEXT", + "return_numpy": False, + "analyses": [], + }, + "path_output": { + "format": "BINARY" if _USE_BINARY else "TEXT", + "file": os.path.join(self._eb_dir, file_name), + }, + }, + "performance_settings": { + "number_of_processors": num_processors, + "direction": "FORWARD", + "method": "STANDARD", + }, + } + shortest_paths_tool(spec, self._scenario) + + def _assign_flow( + self, time: str, bin_no: int, demand: List[Dict[str, Union[float, EmmeNode]]] + ): + """Assign the demand along the paths generated from the shortest path tool. + + Args: + time: time period name + bin_no: bin number (id) for this demand segment + demand: list of dictionaries, containing the demand in the format + {"orig": EmmeNode, "dest": EmmeNode, "dem": float (demand value)} + """ + if _USE_BINARY: + self._assign_flow_binary(time, bin_no, demand) + else: + self._assign_flow_text(time, bin_no, demand) + + def _assign_flow_text( + self, time: str, bin_no: int, demand: List[Dict[str, Union[float, EmmeNode]]] + ): + """Assign the demand along the paths generated from the shortest path tool. + + The paths are read from a text format file, see Emme help for details. + Demand is summed in self._network (in memory) using temp_flow attribute + and written to scenario (Emmebank / disk) @maz_flow. + + Args: + time: time period name + bin_no: bin number (id) for this demand segment + demand: list of dictionaries, containin the demand in the format + {"orig": EmmeNode, "dest": EmmeNode, "dem": float (demand value)} + """ + paths = self._load_text_format_paths(time, bin_no) + not_assigned, assigned = 0, 0 + for data in demand: + orig, dest, dem = data["orig"].number, data["dest"].number, data["dem"] + path = paths.get(orig, {}).get(dest) + if path is None: + not_assigned += dem + continue + i_node = orig + for j_node in path: + link = self._network.link(i_node, j_node) + link["temp_flow"] += dem + i_node = j_node + assigned += dem + self.logger.log_time( + f"ASSIGN bin {bin_no}: total: {len(demand)}", level="DEBUG" + ) + self.logger.log_time( + f"assigned: {assigned}, not assigned: {not_assigned}", level="DEBUG" + ) + + def _load_text_format_paths( + self, time: str, bin_no: int + ) -> Dict[int, Dict[int, List[int]]]: + """Load all paths from text file and return as nested dictionary. + + Args: + time: time period name + bin_no: bin number (id) for this demand segment + + Returns: + All paths as a nested dictionary, path = paths[origin][destination], + using the node IDs as integers. + """ + paths = _defaultdict(lambda: {}) + with open( + os.path.join(self._eb_dir, f"sp_{time}_{bin_no}.txt"), + "r", + encoding="utf8", + ) as paths_file: + for line in paths_file: + nodes = [int(x) for x in line.split()] + paths[nodes[0]][nodes[-1]] = nodes[1:] + return paths + + def _assign_flow_binary( + self, time: str, bin_no: int, demand: List[Dict[str, Union[float, EmmeNode]]] + ): + """Assign the demand along the paths generated from the shortest path tool. + + The paths are read from a binary format file, see Emme help for details. + Demand is summed in self._network (in memory) using temp_flow attribute + and written to scenario (Emmebank / disk) @maz_flow. + + Args: + time: time period name + bin_no: bin number (id) for this demand segment + demand: list of dictionaries, containin the demand in the format + {"orig": EmmeNode, "dest": EmmeNode, "dem": float (demand value)} + """ + file_name = f"sp_{time}_{bin_no}.ebp" + with open(os.path.join(self._eb_dir, file_name), "rb") as paths_file: + # read set of path pointers by Orig-Dest sequence from file + offset, leaves_nb, path_indicies = self._get_path_indices(paths_file) + assigned = 0 + not_assigned = 0 + bytes_read = offset * 8 + # for all orig-dest pairs with demand, load path from file + for data in demand: + # get file position based on orig-dest index + start, end = self._get_path_location( + data["orig"].number, data["dest"].number, leaves_nb, path_indicies + ) + # no path found, disconnected zone + if start == end: + not_assigned += data["dem"] + continue + paths_file.seek(start * 4 + offset * 8) + self._assign_path_flow(paths_file, start, end, data["dem"]) + assigned += data["dem"] + bytes_read += (end - start) * 4 + self.controller.emme_manager.copy_attr_values( + "LINK", self._network, self._scenario, ["temp_flow"], ["@maz_flow"] + ) + self.logger.log_time( + f"ASSIGN bin {bin_no}, total {len(demand)}, assign " + f"{assigned}, not assign {not_assigned}, bytes {bytes_read}", + level="DEBUG", + ) + + @staticmethod + def _get_path_indices(paths_file: BinaryIO) -> [int, int, _array.array]: + """Get the path header indices. + + See the Emme Shortest path tool doc for additional details on reading + this file. + + Args: + paths_file: binary file access to the generated paths file + + Returns: + 2 ints + array of ints: offset, leafs_nb, path_indicies + offset: starting index to read the paths + leafs_nb: number of leafs in the shortest path file + path_indicies: array of the start index for each root, leaf path in paths_file. + """ + # read first 4 integers from file (Q=64-bit unsigned integers) + header = _array.array("Q") + header.fromfile(paths_file, 4) + roots_nb, leaves_nb = header[2:4] + # Load sequence of path indices (positions by orig-dest index), + # pointing to list of path node IDs in file + path_indicies = _array.array("Q") + path_indicies.fromfile(paths_file, roots_nb * leaves_nb + 1) + offset = roots_nb * leaves_nb + 1 + 4 + return offset, leaves_nb, path_indicies + + def _get_path_location( + self, + orig: EmmeNode, + dest: EmmeNode, + leaves_nb: int, + path_indicies: _array.array, + ) -> [int, int]: + """Get the location in the paths_file to read. + + Args: + orig: Emme Node object, origin MAZ to query the path + dest: Emme Node object, destination MAZ to query the path + leaves_nb: number of leaves + path_indicies: array of the start index for each root, leaf path in paths_file. + + Returns: + Two integers, start, end + start: starting index to read Node ID bytes from paths_file + end: ending index to read bytes from paths_file + """ + p_index = self._root_index[orig] + q_index = self._leaf_index[dest] + index = p_index * leaves_nb + q_index + start = path_indicies[index] + end = path_indicies[index + 1] + return start, end + + def _assign_path_flow( + self, paths_file: BinaryIO, start: int, end: int, demand: float + ): + """Add demand to link temp_flow for the path. + + Args: + paths_file: binary file access to read path from + start: starting index to read Node ID bytes from paths_file + end: ending index to read bytes from paths_file + demand: flow demand to add on link + """ + # load sequence of Node IDs which define the path (L=32-bit unsigned integers) + path = _array.array("L") + path.fromfile(paths_file, end - start) + # proccess path to sequence of links and add flow + path_iter = iter(path) + i_node = next(path_iter) + for j_node in path_iter: + link = self._network.link(i_node, j_node) + link["temp_flow"] += demand + i_node = j_node + + +class SkimMAZCosts(Component): + """MAZ-to-MAZ shortest-path skim of time, distance and toll""" + + def __init__(self, controller: RunController): + """MAZ-to-MAZ shortest-path skim of time, distance and toll + Args: + controller: parent RunController object + """ + super().__init__(controller) + self._scenario = None + self._network = None + + @LogStartEnd() + def run(self): + """Run shortest path skims for all available MAZ-to-MAZ O-D pairs. + + Runs a shortest path builder for each county, using a maz_skim_cost + to limit the search. The valid gen cost (time + cost), distance and toll (drive alone) + are written to CSV at the output_skim_file path: + FROM_ZONE, TO_ZONE, COST, DISTANCE, BRIDGETOLL + + The following config inputs are used directly in this component. Note also + that the network mode_code is prepared in the highway_network component + using the excluded_links. + + config.highway.maz_to_maz: + skim_period: name of the period used for the skim, must match one the + defined config.time_periods + demand_county_groups: used for the list of counties, creates a list out + of all listed counties under [].counties + output_skim_file: relative path to save the skims + value_of_time: value of time used to convert tolls and auto operating cost + operating_cost_per_mile: auto operating cost + max_skim_cost: max cost value used to limit the shortest path search + mode_code: + + config.emme.num_processors + + + """ + ref_period = None + ref_period_name = self.config.highway.maz_to_maz.skim_period + for period in self.config.time_periods: + if period.name == ref_period_name: + ref_period = period + break + if ref_period is None: + raise Exception( + "highway.maz_to_maz.skim_period: is not the name of an existing time_period" + ) + self._scenario = self.get_emme_scenario( + self.config.emme.highway_database_path, ref_period.name + ) + # prepare output file and write header + output = self.get_abs_path(self.config.highway.maz_to_maz.output_skim_file) + os.makedirs(os.path.dirname(output), exist_ok=True) + with open(output, "w", encoding="utf8") as output_file: + output_file.write("FROM_ZONE, TO_ZONE, COST, DISTANCE, BRIDGETOLL\n") + counties = [] + for group in self.config.highway.maz_to_maz.demand_county_groups: + counties.extend(group.counties) + with self._setup(): + self._prepare_network() + for county in counties: + num_roots = self._mark_roots(county) + if num_roots == 0: + continue + sp_values = self._run_shortest_path() + self._export_results(sp_values) + + @_context + def _setup(self): + """Creates the temp attributes used in the component.""" + attributes = [ + ("LINK", "@link_cost", "total cost MAZ-MAZ"), + ("NODE", "@maz_root", "selected roots (origins)"), + ] + with self.controller.emme_manager.temp_attributes_and_restore( + self._scenario, attributes + ): + try: + yield + finally: + self._network = None # clear network obj ref to free memory + + @LogStartEnd() + def _prepare_network(self): + """Calculates the link cost in @link_cost and loads the network to self._network""" + net_calc = NetworkCalculator(self._scenario) + if self._scenario.has_traffic_results: + time_attr = "(@free_flow_time.max.timau)" + else: + time_attr = "@free_flow_time" + vot = self.config.highway.maz_to_maz.value_of_time + op_cost = self.config.highway.maz_to_maz.operating_cost_per_mile + net_calc("@link_cost", f"{time_attr} + 0.6 / {vot} * (length * {op_cost})") + self._network = self.controller.emme_manager.get_network( + self._scenario, {"NODE": ["@maz_id", "#node_county"]} + ) + + def _mark_roots(self, county: str) -> int: + """Mark the available roots in the county.""" + count_roots = 0 + for node in self._network.nodes(): + if node["@maz_id"] > 0 and node["#node_county"] == county: + node["@maz_root"] = node["@maz_id"] + count_roots += 1 + else: + node["@maz_root"] = 0 + values = self._network.get_attribute_values("NODE", ["@maz_root"]) + self._scenario.set_attribute_values("NODE", ["@maz_root"], values) + return count_roots + + def _run_shortest_path(self) -> Dict[str, NumpyArray]: + """Run shortest paths tool and return dictionary of skim results name, numpy arrays. + + O-D pairs are limited by a max cost value from config.highway.maz_to_maz.max_skim_cost, + from roots marked by @maz_root to all available leaves at @maz_id. + + Returns: + A dictionary with keys "COST", "DISTANCE", and "BRIDGETOLL", and numpy + arrays of SP values for available O-D pairs + """ + shortest_paths_tool = self.controller.emme_manager.tool( + "inro.emme.network_calculation.shortest_path" + ) + num_processors = parse_num_processors(self.config.emme.num_processors) + max_cost = float(self.config.highway.maz_to_maz.max_skim_cost) + spec = { + "type": "SHORTEST_PATH", + "modes": [self.config.highway.maz_to_maz.mode_code], + "root_nodes": "@maz_root", + "leaf_nodes": "@maz_id", + "link_cost": "@link_cost", + "path_constraints": { + "max_cost": max_cost, + "uturn_allowed": False, + "through_leaves": False, + "through_centroids": False, + "exclude_forbidden_turns": False, + }, + "results": { + "skim_output": { + "return_numpy": True, + "analyses": [ + { + "component": "SHORTEST_PATH_COST", + "operator": "+", + "name": "COST", + "description": "", + }, + { + "component": "length", + "operator": "+", + "name": "DISTANCE", + "description": "", + }, + { + "component": "@bridgetoll_da", + "operator": "+", + "name": "BRIDGETOLL", + "description": "", + }, + ], + "format": "OMX", + } + }, + "performance_settings": { + "number_of_processors": num_processors, + "direction": "FORWARD", + "method": "STANDARD", + }, + } + sp_values = shortest_paths_tool(spec, self._scenario) + return sp_values + + def _export_results(self, sp_values: Dict[str, NumpyArray]): + """Write matrix skims to CSV. + + The matrices are filtered to omit rows for which the COST is + < 0 or > 1e19 (Emme uses 1e20 to indicate inaccessible zone pairs). + + sp_values: dictionary of matrix costs, with the three keys + "COST", "DISTANCE", and "BRIDGETOLL" and Numpy arrays of values + """ + # get list of MAZ IDS + roots = [ + node["@maz_root"] for node in self._network.nodes() if node["@maz_root"] + ] + leaves = [node["@maz_id"] for node in self._network.nodes() if node["@maz_id"]] + # build dataframe with output data and to/from MAZ ids + root_ids = np.repeat(roots, len(leaves)) + leaf_ids = leaves * len(roots) + result_df = pd.DataFrame( + { + "FROM_ZONE": root_ids, + "TO_ZONE": leaf_ids, + "COST": sp_values["COST"].flatten(), + "DISTANCE": sp_values["DISTANCE"].flatten(), + "BRIDGETOLL": sp_values["BRIDGETOLL"].flatten(), + } + ) + # drop 0's / 1e20 + result_df = result_df.query("COST > 0 & COST < 1e19") + # write remaining values to text file + # FROM_ZONE,TO_ZONE,COST,DISTANCE,BRIDGETOLL + output = self.get_abs_path(self.config.highway.maz_to_maz.output_skim_file) + with open(output, "a", newline="", encoding="utf8") as output_file: + result_df.to_csv(output_file, header=False, index=False) diff --git a/tm2py/components/network/highway/highway_network.py b/tm2py/components/network/highway/highway_network.py new file mode 100644 index 00000000..ae1cfec7 --- /dev/null +++ b/tm2py/components/network/highway/highway_network.py @@ -0,0 +1,313 @@ +"""Module for highway network preparation steps. + +Creates required attributes and populates input values needed +for highway assignments. The toll values, VDFs, per-class cost +(tolls+operating costs), modes and skim link attributes are calculated. + +The following link attributes are used as input: + - "@capclass": link capclass index + - "length": standard link length, in miles + - "@tollbooth": label to separate bridgetolls from valuetolls + - "@tollseg": toll segment, used to index toll value lookups from the toll file + (under config.highway.tolls.file_path) + - "@ft": functional class, used to assign VDFs + +The following keys and tables are used from the config: + highway.tolls.file_path: relative path to input toll file + highway.tolls.src_vehicle_group_names: names used in tolls file for + toll class values + highway.tolls.dst_vehicle_group_names: corresponding names used in + network attributes toll classes + highway.tolls.tollbooth_start_index: index to split point bridge tolls + (< this value) from distance value tolls (>= this value) + highway.classes: the list of assignment classes, see the notes under + highway_assign for detailed explanation + highway.capclass_lookup: the lookup table mapping the link @capclass setting + to capacity (@capacity), free_flow_speed (@free_flow_speec) and + critical_speed (used to calculate @ja for akcelik type functions) + highway.generic_highway_mode_code: unique (with other mode_codes) single + character used to label entire auto network in Emme + highway.maz_to_maz.mode_code: unique (with other mode_codes) single + character used to label MAZ local auto network including connectors + +The following link attributes are created (overwritten) and are subsequently used in +the highway assignments. + - "@flow_XX": link PCE flows per class, where XX is the class name in the config + - "@maz_flow": Assigned MAZ-to-MAZ flow + +The following attributes are calculated: + - vdf: volume delay function to use + - "@capacity": total link capacity + - "@ja": akcelik delay parameter + - "@hov_length": length with HOV lanes + - "@toll_length": length with tolls + - "@bridgetoll_YY": the bridge toll for class subgroup YY + - "@valuetoll_YY": the "value", non-bridge toll for class subgroup YY + - "@cost_YY": total cost for class YY +""" + + +from typing import Dict, List, Set + +from tm2py.components.component import Component +from tm2py.logger import LogStartEnd +from tm2py.emme.manager import EmmeScenario, EmmeNetwork + + +class PrepareNetwork(Component): + """Highway network preparation""" + + @LogStartEnd("prepare network attributes and modes") + def run(self): + """Run network preparation step""" + for time in self.time_period_names(): + with self.controller.emme_manager.logbook_trace( + f"prepare for highway assignment {time}" + ): + scenario = self.get_emme_scenario( + self.config.emme.highway_database_path, time + ) + self._create_class_attributes(scenario, time) + network = scenario.get_network() + self._set_tolls(network, time) + self._set_vdf_attributes(network, time) + self._set_link_modes(network) + self._calc_link_skim_lengths(network) + self._calc_link_class_costs(network) + scenario.publish_network(network) + + def _create_class_attributes(self, scenario: EmmeScenario, time_period: str): + """Create required network attributes including per-class cost and flow attributes.""" + create_attribute = self.controller.emme_manager.tool( + "inro.emme.data.extra_attribute.create_extra_attribute" + ) + attributes = { + "LINK": [ + ("@capacity", "total link capacity"), + ("@ja", "akcelik delay parameter"), + ("@maz_flow", "Assigned MAZ-to-MAZ flow"), + ("@hov_length", "length with HOV lanes"), + ("@toll_length", "length with tolls"), + ] + } + # toll field attributes by bridge and value and toll definition + dst_veh_groups = self.config.highway.tolls.dst_vehicle_group_names + for dst_veh in dst_veh_groups: + for toll_type in "bridge", "value": + attributes["LINK"].append( + ( + f"@{toll_type}toll_{dst_veh}", + f"{toll_type} toll value for {dst_veh}", + ) + ) + # results for link cost and assigned flow + for assign_class in self.config.highway.classes: + attributes["LINK"].append( + ( + f"@cost_{assign_class.name.lower()}", + f'{time_period} {assign_class["description"]} total costs'[:40], + ) + ) + attributes["LINK"].append( + ( + f"@flow_{assign_class.name.lower()}", + f'{time_period} {assign_class["description"]} link volume'[:40], + ) + ) + for domain, attrs in attributes.items(): + for name, desc in attrs: + create_attribute(domain, name, desc, overwrite=True, scenario=scenario) + + def _set_tolls(self, network: EmmeNetwork, time_period: str): + """Set the tolls in the network from the toll reference file.""" + toll_index = self._get_toll_indices() + src_veh_groups = self.config.highway.tolls.src_vehicle_group_names + dst_veh_groups = self.config.highway.tolls.dst_vehicle_group_names + tollbooth_start_index = self.config.highway.tolls.tollbooth_start_index + for link in network.links(): + if link["@tollbooth"]: + index = ( + link["@tollbooth"] * 1000 + + link["@tollseg"] * 10 + + link["@useclass"] + ) + data_row = toll_index.get(index) + if data_row is None: + self.logger.log( + f"set tolls failed index lookup {index}, link {link.id}", + level="TRACE", + ) + continue # tolls will remain at zero + # if index is below tollbooth start index then this is a bridge + # (point toll), available for all traffic assignment classes + if link["@tollbooth"] < tollbooth_start_index: + for src_veh, dst_veh in zip(src_veh_groups, dst_veh_groups): + link[f"@bridgetoll_{dst_veh}"] = ( + data_row[f"toll{time_period.lower()}_{src_veh}"] * 100 + ) + else: # else, this is a tollway with a per-mile charge + for src_veh, dst_veh in zip(src_veh_groups, dst_veh_groups): + link[f"@valuetoll_{dst_veh}"] = ( + data_row[f"toll{time_period.lower()}_{src_veh}"] + * link.length + * 100 + ) + + def _get_toll_indices(self) -> Dict[int, Dict[str, str]]: + """Get the mapping of toll lookup table from the toll reference file.""" + toll_file_path = self.get_abs_path(self.config.highway.tolls.file_path) + tolls = {} + with open(toll_file_path, "r", encoding="UTF8") as toll_file: + header = next(toll_file).split(",") + for line in toll_file: + data = dict(zip(header, line.split(","))) + tolls[int(data["fac_index"])] = data + return tolls + + def _set_vdf_attributes(self, network: EmmeNetwork, time_period: str): + """Set capacity, VDF and critical speed on links""" + capacity_map = {} + critical_speed_map = {} + for row in self.config.highway.capclass_lookup: + if row.get("capacity") is not None: + capacity_map[row["capclass"]] = row.get("capacity") + if row.get("critical_speed") is not None: + critical_speed_map[row["capclass"]] = row.get("critical_speed") + tp_mapping = { + tp.name: tp.highway_capacity_factor for tp in self.config.time_periods + } + period_capacity_factor = tp_mapping[time_period] + akcelik_vdfs = [3, 4, 5, 7, 8, 10, 11, 12, 13, 14] + for link in network.links(): + cap_lanehour = capacity_map[link["@capclass"]] + link["@capacity"] = cap_lanehour * period_capacity_factor * link["@lanes"] + link.volume_delay_func = int(link["@ft"]) + # re-mapping links with type 99 to type 7 "local road of minor importance" + if link.volume_delay_func == 99: + link.volume_delay_func = 7 + # num_lanes not used directly, but set for reference + link.num_lanes = max(min(9.9, link["@lanes"]), 1.0) + if link.volume_delay_func in akcelik_vdfs and link["@free_flow_speed"] > 0: + dist = link.length + critical_speed = critical_speed_map[link["@capclass"]] + t_c = dist / critical_speed + t_o = dist / link["@free_flow_speed"] + link["@ja"] = 16 * (t_c - t_o) ** 2 + + def _set_link_modes(self, network: EmmeNetwork): + """Set the link modes based on the per-class 'excluded_links' set.""" + # first reset link modes (script run more than once) + # "generic_highway_mode_code" must already be created (in import to Emme script) + auto_mode = {network.mode(self.config.highway.generic_highway_mode_code)} + used_modes = { + network.mode(assign_class.mode_code) + for assign_class in self.config.highway.classes + } + used_modes.add(network.mode(self.config.highway.maz_to_maz.mode_code)) + for link in network.links(): + link.modes -= used_modes + if link["@drive_link"]: + link.modes |= auto_mode + for mode in used_modes: + if mode is not None: + network.delete_mode(mode) + + # Create special access/egress mode for MAZ connectors + maz_access_mode = network.create_mode( + "AUX_AUTO", self.config.highway.maz_to_maz.mode_code + ) + maz_access_mode.description = "MAZ access" + # create modes from class spec + # (duplicate mode codes allowed provided the excluded_links is the same) + mode_excluded_links = {} + for assign_class in self.config.highway.classes: + if assign_class.mode_code in mode_excluded_links: + if ( + assign_class.excluded_links + != mode_excluded_links[assign_class.mode_code] + ): + ex_links1 = mode_excluded_links[assign_class.mode_code] + ex_links2 = assign_class.excluded_links + raise Exception( + f"config error: highway.classes, duplicated mode codes " + f"('{assign_class.mode_code}') with different excluded " + f"links: {ex_links1} and {ex_links2}" + ) + continue + mode = network.create_mode("AUX_AUTO", assign_class.mode_code) + mode.description = assign_class.name + mode_excluded_links[mode.id] = assign_class.excluded_links + + dst_veh_groups = self.config.highway.tolls.dst_vehicle_group_names + for link in network.links(): + modes = set(m.id for m in link.modes) + if link.i_node["@maz_id"] + link.j_node["@maz_id"] > 0: + modes.add(maz_access_mode.id) + link.modes = modes + continue + if not link["@drive_link"]: + continue + exclude_links_map = { + "is_sr": link["@useclass"] in [2, 3], + "is_sr2": link["@useclass"] == 2, + "is_sr3": link["@useclass"] == 3, + "is_auto_only": link["@useclass"] in [2, 3, 4], + } + for dst_veh in dst_veh_groups: + exclude_links_map[f"is_toll_{dst_veh}"] = ( + link[f"@valuetoll_{dst_veh}"] > 0 + ) + self._apply_exclusions( + self.config.highway.maz_to_maz.excluded_links, + maz_access_mode.id, + modes, + exclude_links_map, + ) + for assign_class in self.config.highway.classes: + self._apply_exclusions( + assign_class.excluded_links, + assign_class.mode_code, + modes, + exclude_links_map, + ) + link.modes = modes + + @staticmethod + def _apply_exclusions( + excluded_links_criteria: List[str], + mode_code: str, + modes_set: Set[str], + link_values: Dict[str, bool], + ): + """Apply the exclusion criteria to set the link modes.""" + for criteria in excluded_links_criteria: + if link_values[criteria]: + return + modes_set.add(mode_code) + + def _calc_link_skim_lengths(self, network: EmmeNetwork): + """Calculate the length attributes used in the highway skims.""" + tollbooth_start_index = self.config.highway.tolls.tollbooth_start_index + for link in network.links(): + # distance in hov lanes / facilities + if 2 <= link["@useclass"] <= 3: + link["@hov_length"] = link.length + else: + link["@hov_length"] = 0 + # distance on non-bridge toll facilities + if link["@tollbooth"] > tollbooth_start_index: + link["@toll_length"] = link.length + else: + link["@toll_length"] = 0 + + def _calc_link_class_costs(self, network: EmmeNetwork): + """Calculate the per-class link cost from the tolls and operating costs.""" + for assign_class in self.config.highway.classes: + cost_attr = f"@cost_{assign_class.name.lower()}" + op_cost = assign_class["operating_cost_per_mile"] + toll_factor = assign_class.get("toll_factor") + if toll_factor is None: + toll_factor = 1.0 + for link in network.links(): + toll_value = sum(link[toll_attr] for toll_attr in assign_class["toll"]) + link[cost_attr] = link.length * op_cost + toll_value * toll_factor diff --git a/tm2py/components/network/transit/__init__.py b/tm2py/components/network/transit/__init__.py new file mode 100644 index 00000000..5b4136b3 --- /dev/null +++ b/tm2py/components/network/transit/__init__.py @@ -0,0 +1,3 @@ +"""Transit assignment and skim module""" +from .transit_assign import TransitAssignment +from .transit_skim import TransitSkim diff --git a/tm2py/components/network/transit/transit_assign.py b/tm2py/components/network/transit/transit_assign.py new file mode 100644 index 00000000..5039df8f --- /dev/null +++ b/tm2py/components/network/transit/transit_assign.py @@ -0,0 +1,9 @@ +"""Transit assignment module""" + +from ...component import Component + +# from ....controller import RunController + + +class TransitAssignment(Component): + """Run transit assignment.""" diff --git a/tm2py/components/network/transit/transit_skim.py b/tm2py/components/network/transit/transit_skim.py new file mode 100644 index 00000000..051044be --- /dev/null +++ b/tm2py/components/network/transit/transit_skim.py @@ -0,0 +1,9 @@ +"""Transit skims module""" + +from ...component import Component + +# from ....controller import RunController + + +class TransitSkim(Component): + """Run transit skims""" diff --git a/tm2py/config.py b/tm2py/config.py new file mode 100644 index 00000000..ab0d1d14 --- /dev/null +++ b/tm2py/config.py @@ -0,0 +1,696 @@ +"""Config implementation and schema. +""" +# pylint: disable=too-many-instance-attributes + +from abc import ABC +from typing import List, Tuple, Union, Optional +from typing_extensions import Literal + +from pydantic import Field, validator +from pydantic.dataclasses import dataclass +import toml + + +class ConfigItem(ABC): + """Base class to add partial dict-like interface to tm2py model configuration. + + Allow use of .items() ["X"] and .get("X") .to_dict() from configuration. + + Not to be constructed directly. To be used a mixin for dataclasses + representing config schema. + Do not use "get" "to_dict", or "items" for key names. + """ + + def __getitem__(self, key): + return getattr(self, key) + + def items(self): + """D.items() -> a set-like object providing a view on D's items""" + return self.__dict__.items() + + def get(self, key, default=None): + """Return the value for key if key is in the dictionary, else default.""" + return self.__dict__.get(key, default) + + +@dataclass(frozen=True) +class ScenarioConfig(ConfigItem): + """Scenario related parameters + + Properties: + verify: optional, default False if specified as True components will run + additional post-process verify step to validate results / outputs + (not implemented yet) + maz_landuse_file: relative path to maz_landuse_file used by multiple + components + year: model year, must be at least 2005 + """ + + maz_landuse_file: str + year: int = Field(ge=2005) + verify: Optional[bool] = Field(default=False) + + +ComponentNames = Literal[ + "create_tod_scenarios", + "active_modes", + "air_passenger", + "prepare_network_highway", + "highway_maz_assign", + "highway", + "highway_maz_skim", + "transit", + "household", + "visitor", + "internal_external", + "truck", +] +EmptyString = Literal[""] + + +@dataclass(frozen=True) +class RunConfig(ConfigItem): + """Model run parameters + + Properties: + start_iteration: start iteration number, 0 to include initial_components + end_iteration: final iteration number + start_component: name of component to start with, will skip components + list prior to this component + initial_components: list of components to run as initial (0) iteration + global_iteration_components: list of component to run at every iteration, in order + final_components: list of components to run after final iteration, in order + """ + + initial_components: Tuple[ComponentNames, ...] + global_iteration_components: Tuple[ComponentNames, ...] + final_components: Tuple[ComponentNames, ...] + start_iteration: int = Field(ge=0) + end_iteration: int = Field(gt=0) + start_component: Optional[Union[ComponentNames, EmptyString]] = Field(default="") + + @classmethod + @validator("end_iteration") + def end_iteration_gt_start(cls, value, values): + """Validate end_iteration greater than start_iteration""" + if "start_iteration" in values: + assert ( + value > values["start_iteration"] + ), "must be greater than start_iteration" + return value + + +@dataclass(frozen=True) +class TimePeriodConfig(ConfigItem): + """Time time period entry""" + + name: str + length_hours: float = Field(gt=0) + highway_capacity_factor: float = Field(gt=0) + emme_scenario_id: int = Field(ge=1) + + +@dataclass(frozen=True) +class HouseholdConfig(ConfigItem): + """Household (residents) model parameters""" + + highway_demand_file: str + transit_demand_file: str + + +@dataclass(frozen=True) +class AirPassengerDemandAggregationConfig(ConfigItem): + """Air passenger demand aggregation input parameters""" + + result_class_name: str + src_group_name: str + access_modes: Tuple[str, ...] + + +@dataclass(frozen=True) +class AirPassengerConfig(ConfigItem): + """Air passenger model parameters""" + + highway_demand_file: str + input_demand_folder: str + reference_start_year: str + reference_end_year: str + demand_aggregation: Tuple[AirPassengerDemandAggregationConfig, ...] + + +@dataclass(frozen=True) +class InternalExternalConfig(ConfigItem): + """Internal <-> External model parameters""" + + highway_demand_file: str + input_demand_file: str + reference_year: int + toll_choice_time_coefficient: float + value_of_time: float + shared_ride_2_toll_factor: float + shared_ride_3_toll_factor: float + operating_cost_per_mile: float + + +@dataclass(frozen=True) +class TruckConfig(ConfigItem): + """Truck model parameters""" + + highway_demand_file: str + k_factors_file: str + friction_factors_file: str + value_of_time: float + operating_cost_per_mile: float + toll_choice_time_coefficient: float + max_balance_iterations: int + max_balance_relative_error: float + + +@dataclass(frozen=True) +class ActiveModeShortestPathSkimConfig(ConfigItem): + """Active mode skim entry""" + + mode: str + roots: str + leaves: str + output: str + max_dist_miles: float = None + + +@dataclass(frozen=True) +class ActiveModesConfig(ConfigItem): + """Active Mode skim parameters""" + + emme_scenario_id: int + shortest_path_skims: Tuple[ActiveModeShortestPathSkimConfig, ...] + + +@dataclass(frozen=True) +class HighwayCapClassConfig(ConfigItem): + """Highway link capacity and speed ('capclass') index entry + + Properties: + capclass: cross index for link @capclass lookup + capacity: value for link capacity, PCE / hour + free_flow_speed: value for link free flow speed, miles / hour + critical_speed: value for cirtical speed (Ja) used in Akcelik + type functions + """ + + capclass: int = Field(ge=0) + capacity: float = Field(ge=0) + free_flow_speed: float = Field(ge=0) + critical_speed: float = Field(ge=0) + + +@dataclass(frozen=True) +class HighwayClassDemandConfig(ConfigItem): + """Highway class input source for demand. + + Used to specify where to find related demand file for this + highway class. Multiple + + Properties: + source: reference name of the component section for the + source "highway_demand_file" location, one of: + "household", "air_passenger", "internal_external", "truck" + name: name of matrix in the OMX file, can include "{period}" + placeholder + factor: optional, multiplicative factor to generate PCEs from + trucks or convert person-trips to vehicle-trips for HOVs + """ + + name: str = Field() + source: str = Literal["household", "air_passenger", "internal_external", "truck"] + factor: float = Field(default=1.0, gt=0) + + +@dataclass(frozen=True) +class HighwayClassConfig(ConfigItem): + """Highway assignment class definition. + + Note that excluded_links, skims and toll attribute names include + vehicle groups ("{vehicle}") which reference the list of + highway.toll.dst_vehicle_group_names (see HighwayTollsConfig). + The default example model config uses: + "da", "sr2", "sr3", "vsm", sml", "med", "lrg" + + Example single class config: + name = "da" + description= "drive alone" + mode_code= "d" + [[highway.classes.demand]] + source = "household" + name = "SOV_GP_{period}" + [[highway.classes.demand]] + source = "air_passenger" + name = "da" + [[highway.classes.demand]] + source = "internal_external" + name = "da" + excluded_links = ["is_toll_da", "is_sr2"], + value_of_time = 18.93, # $ / hr + operating_cost_per_mile = 17.23, # cents / mile + toll = ["@bridgetoll_da"] + skims = ["time", "dist", "freeflowtime", "bridgetoll_da"], + + Properties: + name: short (up to 10 character) unique reference name for the class. + used in attribute and matrix names + description: longer text used in attribute and matrix descriptions + mode_code: single character mode, used to generate link.modes to + identify subnetwork, generated from "excluded_links" keywords. + Should be unique in list of classes, unless multiple classes + have identical excluded_links specification. Cannot be the + same as used for highway.maz_to_maz.mode_code. + value_of_time: value of time for this class in $ / hr + operating_cost_per_mile: vehicle operating cost in cents / mile + demand: list of OMX file and matrix keyname references, + see HighwayClassDemandConfig + excluded_links: list of keywords to identify links to exclude from + this class' available subnetwork (generate link.modes) + Options are: + - "is_sr": is reserved for shared ride (@useclass in 2,3) + - "is_sr2": is reserved for shared ride 2+ (@useclass == 2) + - "is_sr3": is reserved for shared ride 3+ (@useclass == 3) + - "is_auto_only": is reserved for autos (non-truck) (@useclass != 1) + - "is_toll_{vehicle}": has a value (non-bridge) toll for the {vehicle} toll group + toll: list of additional toll cost link attribute (values stored in cents), + summed, one of "@bridgetoll_{vehicle}", "@valuetoll_{vehicle}" + toll_factor: optional, factor to apply to toll values in cost calculation + pce: optional, passenger car equivalent to convert assigned demand in + PCE units to vehicles for total assigned vehicle calculations + skims: list of skim matrices to generate + Options are: + "time": pure travel time in minutes + "dist": distance in miles + "hovdist": distance on HOV facilities (is_sr2 or is_sr3) + "tolldist": distance on toll facilities + (@tollbooth > highway.tolls.tollbooth_start_index) + "freeflowtime": free flow travel time in minutes + "bridgetoll_{vehicle}": bridge tolls, {vehicle} refers to toll group + "valuetoll_{vehicle}": other, non-bridge tolls, {vehicle} refers to toll group + """ + + name: str = Field(min_length=1, max_length=10) + description: str = Field(default="") + mode_code: str = Field(min_length=1, max_length=1) + value_of_time: float = Field(gt=0) + operating_cost_per_mile: float = Field(ge=0) + pce: Optional[float] = Field(default=1.0, gt=0) + # Note that excluded_links, skims, and tolls validated under HighwayConfig to include + # highway.toll.dst_vehicle_group_names names + excluded_links: Tuple[str, ...] = Field() + skims: Tuple[str, ...] = Field() + toll: Tuple[str, ...] = Field() + toll_factor: Optional[float] = Field(default=None, gt=0) + demand: Tuple[HighwayClassDemandConfig, ...] = Field() + + +@dataclass(frozen=True) +class HighwayTollsConfig(ConfigItem): + """Highway assignment and skim input tolls and related parameters. + + Properties: + file_path: source relative file path for the highway tolls index CSV + tollbooth_start_index: tollbooth separates links with "bridge" tolls + (index < this value) vs. "value" tolls. These toll attributes + can then be referenced separately in the highway.classes[].tolls + list + src_vehicle_group_names: name used for the vehicle toll CSV column IDs, + of the form "toll{period}_{vehicle}" + dst_vehicle_group_names: list of names used in destination network + for the corresponding vehicle group. Length of list must be the same + as src_vehicle_group_names. Used for toll related attributes and + resulting skim matrices. Cross-referenced in list of highway.classes[]: + excluded_links: "is_toll_{vehicle}" + tolls: "@bridgetoll_{vehicle}", "@valuetoll_{vehicle}" + skims: "bridgetoll_{vehicle}", "valuetoll_{vehicle}" + """ + + file_path: str = Field() + tollbooth_start_index: int = Field(gt=1) + src_vehicle_group_names: Tuple[str, ...] = Field() + dst_vehicle_group_names: Tuple[str, ...] = Field() + + @classmethod + @validator("dst_vehicle_group_names", always=True) + def dst_vehicle_group_names_length(cls, value, values): + """Validate dst_vehicle_group_names has same length as src_vehicle_group_names""" + if "src_vehicle_group_names" in values: + assert len(value) == len( + values["src_vehicle_group_names"] + ), "must be same length as src_vehicle_group_names" + return value + + +COUNTY_NAMES = Literal[ + "San Francisco", + "San Mateo", + "Santa Clara", + "Alameda", + "Contra Costa", + "Solano", + "Napa", + "Sonoma", + "Marin", +] + + +@dataclass(frozen=True) +class DemandCountyGroupConfig(ConfigItem): + """Grouping of counties for assignment and demand files + + Properties: + number: id number for this group, must be unique + counties: list of one or more county names + """ + + number: int = Field() + counties: Tuple[COUNTY_NAMES, ...] = Field() + + +@dataclass(frozen=True) +class HighwayMazToMazConfig(ConfigItem): + """Highway MAZ to MAZ shortest path assignment and skim parameters + + Properties: + mode_code: single character mode, used to generate link.modes to + identify subnetwork, generated from "excluded_links" keywords, + plus including MAZ connectors. + value_of_time: value of time for this class in $ / hr + operating_cost_per_mile: vehicle operating cost in cents / mile + max_skim_cost: max shortest path distance to search for MAZ-to-MAZ + skims, in generized costs units (includes operating cost + converted to minutes) + excluded_links: list of keywords to identify links to exclude from + MAZ-to-MAZ paths, see HighwayClassConfig.excluded_links + demand_file: relative path to find the input demand files + can have use a placeholder for {period} and {number}, where the + {period} is the time_period.name (see TimePeriodConfig) + and {number} is the demand_count_groups[].number + (see DemandCountyGroupConfig) + e.g.: auto_{period}_MAZ_AUTO_{number}_{period}.omx + demand_county_groups: List of demand county names and + skim_period: period name to use for the shotest path skims, must + match one of the names listed in the time_periods + output_skim_file: relative path to resulting MAZ-to-MAZ skims + """ + + mode_code: str = Field(min_length=1, max_length=1) + value_of_time: float = Field(gt=0) + operating_cost_per_mile: float = Field(ge=0) + max_skim_cost: float = Field(gt=0) + excluded_links: Tuple[str, ...] = Field() + demand_file: str = Field() + demand_county_groups: Tuple[DemandCountyGroupConfig, ...] = Field() + skim_period: str = Field() + output_skim_file: str = Field() + + @classmethod + @validator("demand_county_groups") + def unique_group_numbers(cls, value): + """Validate list of demand_county_groups has unique .number values""" + group_ids = [group.number for group in value] + assert len(group_ids) == len(set(group_ids)), "-> number value must be unique" + return value + + +@dataclass(frozen=True) +class HighwayConfig(ConfigItem): + """Highway assignment and skims parameters + + Properties: + generic_highway_mode_code: single character unique mode ID for entire + highway network (no excluded_links) + relative_gap: target relative gap stopping criteria + max_iterations: maximum iterations stopping criteria + area_type_buffer_dist_miles: used to in calculation to categorize link @areatype + The area type is determined based on the average density of nearby + (within this buffer distance) MAZs, using (pop+jobs*2.5)/acres + output_skim_path: relative path template for output skims in OMX format + tolls: input toll specification, see HighwayTollsConfig + maz_to_maz: maz-to-maz shortest path assignment and skim specification, + see HighwayMazToMazConfig + classes: highway assignment multi-class setup and skim specification, + see HighwayClassConfig + capclass_lookup: index cross-reference table from the link @capclass value + to the free-flow speed, capacity, and critical speed values + """ + + generic_highway_mode_code: str = Field(min_length=1, max_length=1) + relative_gap: float = Field(ge=0) + max_iterations: int = Field(ge=0) + area_type_buffer_dist_miles: float = Field(gt=0) + output_skim_path: str = Field() + tolls: HighwayTollsConfig = Field() + maz_to_maz: HighwayMazToMazConfig = Field() + classes: Tuple[HighwayClassConfig, ...] = Field() + capclass_lookup: Tuple[HighwayCapClassConfig, ...] = Field() + + @classmethod + @validator("capclass_lookup") + def unique_capclass_numbers(cls, value): + """Validate list of capclass_lookup has unique .capclass values""" + capclass_ids = [i.capclass for i in value] + error_msg = "-> capclass value must be unique in list" + assert len(capclass_ids) == len(set(capclass_ids)), error_msg + return value + + @classmethod + @validator("classes", pre=True) + def unique_class_names(cls, value): + """Validate list of classes has unique .name values""" + class_names = [highway_class["name"] for highway_class in value] + error_msg = "-> name value must be unique in list" + assert len(class_names) == len(set(class_names)), error_msg + return value + + @classmethod + @validator("classes") + def validate_class_mode_excluded_links(cls, value, values): + """Validate list of classes has unique .mode_code or .excluded_links match""" + # validate if any mode IDs are used twice, that they have the same excluded links sets + mode_excluded_links = {values["generic_highway_mode_code"]: set([])} + for i, highway_class in enumerate(value): + # maz_to_maz.mode_code must be unique + if "maz_to_maz" in values: + assert ( + highway_class["mode_code"] != values["maz_to_maz"]["mode_code"] + ), f"-> {i} -> mode_code: cannot be the same as the highway.maz_to_maz.mode_code" + # make sure that if any mode IDs are used twice, they have the same excluded links sets + if highway_class.mode_code in mode_excluded_links: + ex_links1 = highway_class["excluded_links"] + ex_links2 = mode_excluded_links[highway_class["mode_code"]] + error_msg = ( + f"-> {i}: duplicated mode codes ('{highway_class['mode_code']}') " + f"with different excluded links: {ex_links1} and {ex_links2}" + ) + assert ex_links1 == ex_links2, error_msg + mode_excluded_links[highway_class.mode_code] = highway_class.excluded_links + return value + + @classmethod + @validator("classes") + def validate_class_keyword_lists(cls, value, values): + """Validate classes .skims, .toll, and .excluded_links values""" + if "tolls" not in values: + return value + avail_skims = ["time", "dist", "hovdist", "tolldist", "freeflowtime"] + available_link_sets = ["is_sr", "is_sr2", "is_sr3", "is_auto_only"] + avail_toll_attrs = [] + for name in values["tolls"].dst_vehicle_group_names: + toll_types = [f"bridgetoll_{name}", f"valuetoll_{name}"] + avail_skims.extend(toll_types) + avail_toll_attrs.extend(["@" + name for name in toll_types]) + available_link_sets.append(f"is_toll_{name}") + + # validate class skim name list and toll attribute against toll setup + def check_keywords(class_num, key, val, available): + extra_keys = set(val) - set(available) + error_msg = ( + f" -> {class_num} -> {key}: unrecognized {key} name(s): " + f"{','.join(extra_keys)}. Available names are: {', '.join(available)}" + ) + assert not extra_keys, error_msg + + for i, highway_class in enumerate(value): + check_keywords(i, "skim", highway_class["skims"], avail_skims) + check_keywords(i, "toll", highway_class["toll"], avail_toll_attrs) + check_keywords( + i, + "excluded_links", + highway_class["excluded_links"], + available_link_sets, + ) + return value + + +@dataclass(frozen=True) +class TransitModeConfig(ConfigItem): + """Transit mode definition (see also mode in the Emme API)""" + + type: Literal["WALK", "ACCESS", "EGRESS", "LOCAL", "PREMIUM"] + assign_type: Literal["TRANSIT", "AUX_TRANSIT"] + mode_id: str = Field(min_length=1, max_length=1) + name: str = Field(max_length=10) + in_vehicle_perception_factor: Optional[float] = Field(default=None, ge=0) + speed_miles_per_hour: Optional[float] = Field(default=None, gt=0) + + @classmethod + @validator("in_vehicle_perception_factor", always=True) + def in_vehicle_perception_factor_valid(cls, value, values): + """Validate in_vehicle_perception_factor exists if assign_type is TRANSIT""" + if "assign_type" in values and values["assign_type"] == "TRANSIT": + assert value is not None, "must be specified when assign_type==TRANSIT" + return value + + @classmethod + @validator("speed_miles_per_hour", always=True) + def speed_miles_per_hour_valid(cls, value, values): + """Validate speed_miles_per_hour exists if assign_type is AUX_TRANSIT""" + if "assign_type" in values and values["assign_type"] == "AUX_TRANSIT": + assert value is not None, "must be specified when assign_type==AUX_TRANSIT" + return value + + +@dataclass(frozen=True) +class TransitVehicleConfig(ConfigItem): + """Transit vehicle definition (see also transit vehicle in the Emme API)""" + + vehicle_id: int + mode: str + name: str + auto_equivalent: Optional[float] = Field(default=0, ge=0) + seated_capacity: Optional[int] = Field(default=None, ge=0) + total_capacity: Optional[int] = Field(default=None, ge=0) + + +@dataclass(frozen=True) +class TransitConfig(ConfigItem): + """Transit assignment parameters""" + + modes: Tuple[TransitModeConfig, ...] + vehicles: Tuple[TransitVehicleConfig, ...] + + apply_msa_demand: bool + value_of_time: float + effective_headway_source: str + initial_wait_perception_factor: float + transfer_wait_perception_factor: float + walk_perception_factor: float + initial_boarding_penalty: float + transfer_boarding_penalty: float + max_transfers: int + output_skim_path: str + fares_path: str + fare_matrix_path: str + fare_max_transfer_distance_miles: float + use_fares: bool + override_connector_times: bool + input_connector_access_times_path: Optional[str] = Field(default=None) + input_connector_egress_times_path: Optional[str] = Field(default=None) + output_stop_usage_path: Optional[str] = Field(default=None) + + +@dataclass(frozen=True) +class EmmeConfig(ConfigItem): + """Emme-specific parameters. + + Properties: + all_day_scenario_id: scenario ID to use for all day + (initial imported) scenario with all time period data + project_path: relative path to Emme desktop project (.emp) + highway_database_path: relative path to highway Emmebank + active_database_paths: list of relative paths to active mode Emmebanks + transit_database_path: relative path to transit Emmebank + num_processors: the number of processors to use in Emme procedures, + either as an integer, or value MAX, MAX-N. Typically recommend + using MAX-1 (on desktop systems) or MAX-2 (on servers with many + logical processors) to leave capacity for background / other tasks. + """ + + all_day_scenario_id: int + project_path: str + highway_database_path: str + active_database_paths: Tuple[str, ...] + transit_database_path: str + num_processors: str = Field(regex=r"(?i)^MAX$|^MAX[\s]*-[\s]*[\d]+$|^[\d]+$") + + +@dataclass(frozen=True) +class Configuration(ConfigItem): + """Configuration: root of the model configuration""" + + scenario: ScenarioConfig + run: RunConfig + time_periods: Tuple[TimePeriodConfig, ...] + household: HouseholdConfig + air_passenger: AirPassengerConfig + internal_external: InternalExternalConfig + truck: TruckConfig + active_modes: ActiveModesConfig + highway: HighwayConfig + transit: TransitConfig + emme: EmmeConfig + + @classmethod + def load_toml(cls, path: Union[str, List[str]]): + """Load configuration from .toml files(s) + + Normally the config is split into a scenario_config.toml file and a + model_config.toml file. + + Args: + path: a valid system path to a TOML format config file or list of paths + + Returns: + A Configuration object + """ + if isinstance(path, str): + path = [path] + data = _load_toml(path[0]) + for path_item in path[1:]: + _merge_dicts(data, _load_toml(path_item)) + return cls(**data) + + @classmethod + @validator("highway") + def maz_skim_period_exists(cls, value, values): + """Validate highway.maz_to_maz.skim_period refers to a valid period""" + if "time_periods" in values: + time_period_names = set(time.name for time in values["time_periods"]) + assert ( + value.maz_to_maz.skim_period in time_period_names + ), "maz_to_maz -> skim_period -> name not found in time_periods list" + return value + + +def _load_toml(path: str) -> dict: + """Load config from toml file at path""" + with open(path, "r", encoding="utf-8") as toml_file: + data = toml.load(toml_file) + return data + + +def _merge_dicts(right, left, path=None): + """Merges the contents of nested dict left into nested dict right. + + Raises errors in case of namespace conflicts. + Args: + right: dict, modified in place + left: dict to be merged into right + path: default None, sequence of keys to be reported in case of + error in merging nested dictionaries + """ + if path is None: + path = [] + for key in left: + if key in right: + if isinstance(right[key], dict) and isinstance(left[key], dict): + _merge_dicts(right[key], left[key], path + [str(key)]) + else: + path = ".".join(path + [str(key)]) + raise Exception(f"duplicate keys in source .toml files: {path}") + else: + right[key] = left[key] diff --git a/tm2py/controller.py b/tm2py/controller.py new file mode 100644 index 00000000..defd6ba3 --- /dev/null +++ b/tm2py/controller.py @@ -0,0 +1,166 @@ +"""RunController - model operation controller. + +Main interface to start a TM2PY model run. Provide one or more configuration +files in .toml format (by convention a scenario.toml and a model.toml) + + Typical usage example: + from tm2py.controller import RunController + controller = RunController( + [r"example_union\\scenario.toml", r"example_union\\model.toml"]) + controller.run() + + Or from the command-line: + python \\tm2py\\tm2py\\controller.py –s scenario.toml –m model.toml + +""" + +import itertools +import os +from typing import Union, List + +from tm2py.config import Configuration +from tm2py.emme.manager import EmmeManager +from tm2py.logger import Logger +from tm2py.components.component import Component +from tm2py.components.network.highway.highway_assign import HighwayAssignment +from tm2py.components.network.highway.highway_network import PrepareNetwork +from tm2py.components.network.highway.highway_maz import AssignMAZSPDemand, SkimMAZCosts + +# mapping from names referenced in config.run to imported classes +# NOTE: component names also listed as literal in tm2py.config for validation +component_cls_map = { + "prepare_network_highway": PrepareNetwork, + "highway": HighwayAssignment, + "highway_maz_assign": AssignMAZSPDemand, + "highway_maz_skim": SkimMAZCosts, +} + +# pylint: disable=too-many-instance-attributes + + +class RunController: + """Main operational interface for model runs. + + Provide one or more config files in TOML (*.toml) format, and a run directory. + If the run directory is not provided the root directory of the first config_file is used. + + Properties: + config: root Configuration object + logger: logger object + top_sheet: placeholder for top sheet functionality (not implemented yet) + trace: placeholder for trace functionality (not implemented yet) + run_dir: root run directory for the model run + iteration: current running (or last started) iteration + component: current running (or last started) Component object + emme_manager: EmmeManager object for centralized Emme-related (highway and + transit assignments and skims) utilities. + complete_components: list of components which have completed, tuple of + (iteration, name, Component object) + """ + + def __init__(self, config_file: Union[List[str], str] = None, run_dir: str = None): + if not isinstance(config_file, list): + config_file = [config_file] + if run_dir is None: + run_dir = os.path.abspath(os.path.dirname(config_file[0])) + self._run_dir = run_dir + + self.config = Configuration.load_toml(config_file) + self.logger = Logger(self) + self.top_sheet = None + self.trace = None + self.completed_components = [] + + # mapping from defined names referenced in config to Component objects + self._component_map = {k: v(self) for k, v in component_cls_map.items()} + self._emme_manager = None + self._iteration = None + self._component = None + self._queued_components = [] + self._queue_components() + + @property + def run_dir(self) -> str: + """The root run directory of the model run""" + return self._run_dir + + @property + def iteration(self) -> int: + """Current iteration of model""" + return self._iteration + + @property + def component(self) -> Component: + """Current component of model""" + return self._component + + @property + def emme_manager(self) -> EmmeManager: + """Cached Emme Manager object""" + if self._emme_manager is None: + self._init_emme_manager() + return self._emme_manager + + def _init_emme_manager(self): + """Initialize Emme manager, start Emme desktop App, and initialize Modeller""" + self._emme_manager = EmmeManager() + project = self._emme_manager.project( + os.path.join(self.run_dir, self.config.emme.project_path) + ) + # Initialize Modeller to use Emme assignment tools and other APIs + self._emme_manager.modeller(project) + + def run(self): + """Main interface to run model""" + self._iteration = None + self.validate_inputs() + for iteration, name, component in self._queued_components: + if self._iteration != iteration: + self.logger.log_time(f"Start iteration {iteration}") + self._iteration = iteration + self._component = component + component.run() + self.completed_components.append((iteration, name, component)) + + def _queue_components(self): + """Add components per iteration to queue according to input Config""" + self._queued_components = [] + if self.config.run.start_iteration == 0: + self._queued_components += [ + (0, c_name, self._component_map[c_name]) + for c_name in self.config.run.initial_components + ] + iteration_nums = range( + max(1, self.config.run.start_iteration), self.config.run.end_iteration + 1 + ) + iteration_components = [ + self._component_map[c_name] + for c_name in self.config.run.global_iteration_components + ] + self._queued_components += list( + itertools.product( + iteration_nums, + iteration_components, + self.config.run.global_iteration_components, + ) + ) + self._queued_components += [ + (self.config.run.end_iteration + 1, self._component_map[c_name]) + for c_name in self.config.run.final_components + ] + + if self.config.run.start_component: + start_index = [ + idx + for idx, c in enumerate(self._queued_components) + if self.config.run.start_component == c[1] + ][0] + self._queued_components = self._queued_components[start_index:] + + def validate_inputs(self): + """Validate input state prior to run""" + already_validated_components = set() + for _, name, component in self._queued_components: + if name not in already_validated_components: + component.validate_inputs() + already_validated_components.add(name) diff --git a/tm2py/emme/__init__.py b/tm2py/emme/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tm2py/emme/manager.py b/tm2py/emme/manager.py new file mode 100644 index 00000000..13077a56 --- /dev/null +++ b/tm2py/emme/manager.py @@ -0,0 +1,317 @@ +"""Module for Emme Manager for centralized management of Emme projects + +Centralized location for Emme API imports, which are automatically replaced +by unittest.Mock / MagicMock to support testing where Emme is not installed. + +Contains EmmeManager class for access to common Emme-related procedures +(common-code / utility-type methods) and caching access to Emme project, +and Modeller. +""" + +from contextlib import contextmanager as _context +import os +from socket import error as _socket_error +from typing import Any, Dict, List, Union + +# PyLint cannot build AST from compiled Emme libraries +# so disabling relevant import module checks +# pylint: disable=E0611, E0401, E1101 +from inro.emme.database.emmebank import Emmebank +from inro.emme.network import Network as EmmeNetwork +from inro.emme.database.scenario import Scenario as EmmeScenario +from inro.emme.database.matrix import Matrix as EmmeMatrix # pylint: disable=W0611 +from inro.emme.network.node import Node as EmmeNode # pylint: disable=W0611 +import inro.emme.desktop.app as _app +from inro.modeller import Modeller as EmmeModeller, logbook_write, logbook_trace + +EmmeDesktopApp = _app.App + +# "Emme Manager requires Emme to be installed unless running in a test environment." +# "Please install Emme and try again." + + +# Cache running Emme projects from this process (simple singleton implementation) +_EMME_PROJECT_REF = {} + + +class EmmeManager: + """Centralized cache for Emme project and related calls for traffic and transit assignments. + + Wraps Emme Desktop API (see Emme API Reference for additional details on the Emme objects). + """ + + def __init__(self): + # mapping of Emme project path to Emme Desktop API object for reference + # (projects are opened only once) + self._project_cache = _EMME_PROJECT_REF + + def close_all(self): + """ + Close all open cached Emme project(s). + + Should be called at the end of the model process / Emme assignments. + """ + while self._project_cache: + _, app = self._project_cache.popitem() + app.close() + + def create_project(self, project_dir: str, name: str) -> EmmeDesktopApp: + """Create, open and return Emme project + + Args: + project_dir: path to Emme root directory for new Emme project + name: name for the Emme project + + Returns: + Emme Desktop App object, see Emme API Reference, Desktop section for details. + """ + emp_path = _app.create_project(project_dir, name) + return self.project(emp_path) + + def project(self, project_path: str) -> EmmeDesktopApp: + """Return already open Emme project, or open new Desktop session if not found. + + Args: + project_path: valid path to Emme project *.emp file + + Returns: + Emme Desktop App object, see Emme API Reference, Desktop section for details. + """ + project_path = os.path.normcase(os.path.realpath(project_path)) + emme_project = self._project_cache.get(project_path) + if emme_project: + try: # Check if the Emme window was closed + emme_project.current_window() + except _socket_error: + emme_project = None + # if window is not opened in this process, start a new one + if emme_project is None: + if not os.path.isfile(project_path): + raise Exception(f"Emme project path does not exist {project_path}") + emme_project = _app.start_dedicated( + visible=True, user_initials="inro", project=project_path + ) + self._project_cache[project_path] = emme_project + return emme_project + + @staticmethod + def emmebank(path: str) -> Emmebank: + """Open and return the Emmebank at path. + + Args: + path: valid system path pointing to an Emmebank file + Returns: + Emmebank object, see Emme API Reference, Database section for details. + """ + if not path.endswith("emmebank"): + path = os.path.join(path, "emmebank") + return Emmebank(path) + + def change_emmebank_dimensions( + self, emmebank: Emmebank, dimensions: Dict[str, int] + ): + """Change the Emmebank dimensions as specified. See the Emme API help for details. + + Args: + emmebank: the Emmebank object to change the dimensions + dimensions: dictionary of the specified dimensions to set. + """ + dims = emmebank.dimensions + new_dims = dims.copy() + new_dims.update(dimensions) + if dims != new_dims: + change_dimensions = self.tool( + "inro.emme.data.database.change_database_dimensions" + ) + change_dimensions(new_dims, emmebank, keep_backup=False) + + def modeller(self, emme_project: EmmeDesktopApp = None) -> EmmeModeller: + """Initialize and return Modeller object. + + If Modeller has not already been initialized it will do so on + specified Emme project, or the first Emme project opened if not provided. + If already initialized Modeller will reference whichever project was used + first. + + Args: + emme_project: open 'Emme Desktop' application (inro.emme.desktop.app) + + Returns: + Emme Modeller object, see Emme API Reference, Modeller section for details. + """ + # pylint: disable=E0611, E0401, E1101 + try: + return EmmeModeller() + except AssertionError as error: + if emme_project is None: + if self._project_cache: + emme_project = next(iter(self._project_cache.values())) + else: + raise Exception( + "modeller not yet initialized and no cached Emme project," + " emme_project arg must be provided" + ) from error + return EmmeModeller(emme_project) + + def tool(self, namespace: str): + """Return the Modeller tool at namespace. + + Returns: + Corresponding Tool object, see Emme Help for full details. + """ + return self.modeller().tool(namespace) + + @staticmethod + @_context + def temp_attributes_and_restore( + scenario: EmmeScenario, attributes: List[List[str]] + ): + """Create temp extra attribute and network field, and backup values and state and restore. + + Allows the use of temporary attributes which may conflict with existing attributes. + The temp created attributes are deleted at the end, and if there were pre-existing + attributes with the same names the values are restored. + + Note that name conflicts may still arise in the shorthand inheritance systems + for the network hierarchy tree (@node attribute reserves -> @nodei, @nodej, etc, + see Emme help Network calculations for full list) which will raise an error in the + Emme API. + + Args: + scenario: Emme scenario object + attributes: list of attribute details, where details is a list of 3 items + for extra attributes and 4 for network fields: domain, name, description[, atype] + """ + attrs_to_delete = [] + fields_to_delete = [] + attrs_to_restore = dict( + (d, []) for d in ["NODE", "LINK", "TURN", "TRANSIT_LINE", "TRANSIT_SEGMENT"] + ) + for details in attributes: + domain, name, desc = details[:3] + attr = scenario.extra_attribute(name) + field = scenario.network_field(domain, name) + if attr or field: + attrs_to_restore[domain].append(name) + elif name.startswith("@"): + attr = scenario.create_extra_attribute(domain, name) + attr.description = desc + attrs_to_delete.append(name) + else: + atype = details[3] + field = scenario.create_nertwork_field(domain, name, atype) + field.description = desc + fields_to_delete.append((domain, name)) + backup = [] + for domain, names in attrs_to_restore.items(): + if names: + backup.append( + (domain, names, scenario.get_attribute_values(domain, names)) + ) + try: + yield + finally: + for name in attrs_to_delete: + scenario.delete_extra_attribute(name) + for domain, name in fields_to_delete: + scenario.delete_network_field(domain, name) + for domain, names, values in backup: + scenario.set_attribute_values(domain, names, values) + + @staticmethod + def copy_attr_values( + domain: str, + src: Union[EmmeScenario, EmmeNetwork], + dst: Union[EmmeScenario, EmmeNetwork], + src_names: List[str], + dst_names: List[str] = None, + ): + """Copy attribute values between Emme scenario (on disk) and network (in memory). + + Args: + domain: attribute domain, one of "NODE", "LINK", "TURN", "TRANSIT_LINE", + "TRANSIT_SEGMENT" + src: source Emme scenario or network to load values from + dst: destination Emme scenario or network to save values to + src_names: names of the attributes for loading values + dst_names: optional, names of the attributes to save values as, defaults + to using the src_names if not specified + + Returns: + Emme Modeller object, see Emme API Reference, Modeller section for details. + """ + if dst_names is None: + dst_names = src_names + values = src.get_attribute_values(domain, src_names) + dst.set_attribute_values(domain, dst_names, values) + + def get_network( + self, scenario: EmmeScenario, attributes: Dict[str, List[str]] = None + ) -> EmmeNetwork: + """Read partial Emme network from the scenario for the domains and attributes specified. + + Optimized load of network object from scenario (disk / emmebank) for only the + domains specified, and only reads the attributes specified. The attributes is a + dictionary with keys for the required domains, and values as lists of the + attributes required by domain. + + Wrapper for scenario.get_partial_network followed by scenario.get_attribute_values + and network.set_attribute_values. + + Args: + scenario: Emme scenario object, see Emme API reference + attributes: dictionary of domain names to lists of attribute names + + Returns: + Emme Network object, see Emme API Reference, Network section for details. + """ + if attributes is None: + return scenario.get_network() + network = scenario.get_partial_network( + attributes.keys(), include_attributes=False + ) + for domain, attrs in attributes.items(): + if attrs: + self.copy_attr_values(domain, scenario, network, attrs) + return network + + @staticmethod + def logbook_write(name: str, value: str = None, attributes: Dict[str, Any] = None): + """Write an entry to the Emme Logbook at the current nesting level. + + Wrapper for inro.modeller.logbook_write. + + Args: + name: The title of the logbook entry + attributes: Optional. A Python dictionary of key-value pairs to be + displayed in the logbook entry detailed view. + value: Optional. An HTML string value to be displayed in main detail + pane of the logbook entry + """ + # pylint: disable=E0611, E0401, E1101 + attributes = attributes if attributes else {} + logbook_write(name, value=value, attributes=attributes) + + @staticmethod + @_context + def logbook_trace(name: str, value: str = None, attributes: Dict[str, Any] = None): + """Write an entry to the Modeller logbook and create a nest in the Logbook. + + Wrapper for inro.modeller.logbook_trace. Used in the with statement, e.g. + + ``` + with _emme_tools.logbook_trace('My nest'): + _emme_tools.logbook_write('This entry is nested') + ``` + + Args: + name: The title of the logbook entry + attributes: Optional. A Python dictionary of key-value pairs to be + displayed in the logbook entry detailed view. + value: Optional. An HTML string value to be displayed in main detail + pane of the logbook entry. + """ + # pylint: disable=E0611, E0401, E1101 + attributes = attributes if attributes else {} + with logbook_trace(name, value=value, attributes=attributes): + yield diff --git a/tm2py/emme/matrix.py b/tm2py/emme/matrix.py new file mode 100644 index 00000000..9059cce8 --- /dev/null +++ b/tm2py/emme/matrix.py @@ -0,0 +1,264 @@ +"""Module for Emme-related matrix management. + +Contains the MatrixCache class for write through matrix data management of Emme +matrices (in Emmebank) to avoid repeated read-from-disk of skim matrices +during post-assignment processing and export to OMX. + +Contains the OMXManager which is a thin wrapper on the openmatrix (OMX) +library for transfer between Emme (emmebank) <-> OMX files. Integrates with +the MatrixCache to support easy write from Emmebank without re-reading data +from disk. +""" + +from typing import List, Union, Dict + +from numpy import array as NumpyArray, resize +import openmatrix as _omx + +from tm2py.emme.manager import EmmeScenario, EmmeMatrix + + +class MatrixCache: + """Write through cache of Emme matrix data via Numpy arrays + + Args: + scenario: reference scenario for the active Emmebank and matrix zone system + """ + + def __init__(self, scenario: EmmeScenario): + self._scenario = scenario + self._emmebank = scenario.emmebank + # mapping from matrix object to last read/write timestamp for cache invalidation + self._timestamps = {} + # cache of Emme matrix data, key: matrix object, value: numpy array of data + self._data = {} + + def get_data(self, matrix: Union[str, EmmeMatrix]) -> NumpyArray: + """Get Emme matrix data as numpy array. + + Args: + matrix: Emme matrix object or unique name / ID for Emme matrix in Emmebank + + Returns: + The Numpy array of values for this matrix / matrix ID. + """ + if isinstance(matrix, str): + matrix = self._emmebank.matrix(matrix) + timestamp = matrix.timestamp + prev_timestamp = self._timestamps.get(matrix) + if prev_timestamp is None or (timestamp != prev_timestamp): + self._timestamps[matrix] = matrix.timestamp + self._data[matrix] = matrix.get_numpy_data(self._scenario.id) + return self._data[matrix] + + def set_data(self, matrix: Union[str, EmmeMatrix], data: NumpyArray): + """Set numpy array to Emme matrix (write through cache). + + Args: + matrix: Emme matrix object or unique name / ID for Emme matrix in Emmebank + data: Numpy array, must match the scenario zone system + """ + if isinstance(matrix, str): + matrix = self._emmebank.matrix(matrix) + matrix.set_numpy_data(data, self._scenario.id) + self._timestamps[matrix] = matrix.timestamp + self._data[matrix] = data + + def clear(self): + """Clear the cache.""" + self._timestamps = {} + self._data = {} + + +# disable too-many-instance-attributes recommendation +# pylint: disable=R0902 +class OMXManager: + """Wrapper for the OMX interface to write from Emme matrices and numpy arrays. + + Write from Emmebank or Matrix Cache to OMX file, or read from OMX to Numpy. + Also supports with statement. + + Args: + file_path: path of OMX file + mode: "r", "w" or "a" + scenario: Emme scenario object for zone system and reference + Emmebank + omx_key: "ID_NAME", "NAME", "ID", format for generating + OMX key from Emme matrix data + matrix_cache: optional, Matrix Cache to support write data + from cache (instead of always reading from Emmmebank) + mask_max_value: optional, max value above which to write + zero instead ("big to zero" behavior) + """ + + def __init__( + self, + file_path: str, + mode: str = "r", + scenario: EmmeScenario = None, + omx_key: str = "NAME", + matrix_cache: MatrixCache = None, + mask_max_value: float = None, + ): # pylint: disable=R0913 + self._file_path = file_path + self._mode = mode + self._scenario = scenario + self._omx_key = omx_key + self._mask_max_value = mask_max_value + self._omx_file = None + self._emme_matrix_cache = matrix_cache + self._read_cache = {} + + def _generate_name(self, matrix: EmmeMatrix) -> str: + if self._omx_key == "ID_NAME": + return f"{matrix.id}_{matrix.name}" + if self._omx_key == "NAME": + return matrix.name + if self._omx_key == "ID": + return matrix.id + raise Exception(f"invalid omx_key: {self._omx_key}") + + def open(self): + """Open the OMX file.""" + self._omx_file = _omx.open_file(self._file_path, self._mode) + + def close(self): + """Close the OMX file.""" + if self._omx_file is not None: + self._omx_file.close() + self._omx_file = None + self._read_cache = {} + + def __enter__(self): + self.open() + if self._mode in ["a", "w"] and self._scenario is not None: + try: + self._omx_file.create_mapping( + "zone_number", self._scenario.zone_numbers + ) + except LookupError: + pass + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + def write_matrices(self, matrices: List[Union[EmmeMatrix, str]]): + """Write the list of emme matrices to OMX file. + + Args: + matrices: list of Emme matrix objects or names / IDs + of matrices in Emmebank, or dictionary of + name: Emme matrix object/ Emme matrix ID + """ + if isinstance(matrices, dict): + for key, matrix in matrices.items(): + self.write_matrix(matrix, key) + else: + for matrix in matrices: + self.write_matrix(matrix) + + def write_matrix(self, matrix: [str, EmmeMatrix], name=None): + """Write Emme matrix (as name or ID or Emme matrix object). + + Args: + matrix: Emme matrix object or name / ID of matrix in Emmebank + name: optional name to use for OMX key, if not specified the + omx_key format will be used to generate a name from the + Emme matrix data + """ + if self._mode not in ["a", "w"]: + raise Exception(f"{self._file_path}: open in read-only mode") + if isinstance(matrix, str): + matrix = self._scenario.emmebank.matrix(matrix) + if name is None: + name = self._generate_name(matrix) + if self._emme_matrix_cache: + numpy_array = self._emme_matrix_cache.get_data(matrix) + else: + numpy_array = matrix.get_numpy_data(self._scenario.id) + if matrix.type == "DESTINATION": + n_zones = len(numpy_array) + numpy_array = resize(numpy_array, (1, n_zones)) + elif matrix.type == "ORIGIN": + n_zones = len(numpy_array) + numpy_array = resize(numpy_array, (n_zones, 1)) + attrs = {"description": matrix.description} + self.write_array(numpy_array, name, attrs) + + def write_clipped_array( + self, + numpy_array: NumpyArray, + name: str, + a_min: float, + a_max: float = None, + attrs: Dict[str, str] = None, + ): # pylint: disable=R0913 + """Write array with min and max values capped. + + Args: + numpy_array: Numpy array + name: name to use for the OMX key + a_min: minimum value to clip array data + a_max: optional maximum value to clip array data + attrs: additional attribute key value pairs to write to OMX file + """ + if a_max is not None: + numpy_array = numpy_array.clip(a_min, a_max) + else: + numpy_array = numpy_array.clip(a_min) + self.write_array(numpy_array, name, attrs) + + def write_array( + self, numpy_array: NumpyArray, name: str, attrs: Dict[str, str] = None + ): + """Write array with name and optional attrs to OMX file. + + Args: + numpy_array:: Numpy array + name: name to use for the OMX key + attrs: additional attribute key value pairs to write to OMX file + """ + if self._mode not in ["a", "w"]: + raise Exception(f"{self._file_path}: open in read-only mode") + shape = numpy_array.shape + if len(shape) == 2: + chunkshape = (1, shape[0]) + else: + chunkshape = None + if self._mask_max_value: + numpy_array[numpy_array > self._mask_max_value] = 0 + numpy_array = numpy_array.astype(dtype="float64", copy=False) + self._omx_file.create_matrix( + name, obj=numpy_array, chunkshape=chunkshape, attrs=attrs + ) + + def read(self, name: str) -> NumpyArray: + """Read OMX data as numpy array (standard interface). + + Caches matrix data (arrays) already read from disk. + + Args: + name: name of OMX matrix + + Returns: + Numpy array from OMX file + """ + if name in self._read_cache: + return self._read_cache[name] + data = self._omx_file[name].read() + self._read_cache[name] = data + return data + + def read_hdf5(self, path: str) -> NumpyArray: + """Read data directly from PyTables interface. + + Support for hdf5 formats that don't have full OMX compatibility. + + Args: + path: hdf5 reference path to matrix data + + Returns: + Numpy array from OMX file + """ + return self._omx_file.get_node(path).read() diff --git a/tm2py/emme/network.py b/tm2py/emme/network.py new file mode 100644 index 00000000..99f80709 --- /dev/null +++ b/tm2py/emme/network.py @@ -0,0 +1,109 @@ +"""Module for Emme network calculations. + +Contains NetworkCalculator class to generate Emme format specifications for +the Network calculator.""" + +from typing import Union, Dict, List + +import tm2py.emme.manager as _manager + +EmmeScenario = _manager.EmmeScenario +EmmeNetworkCalcSpecification = Dict[str, Union[str, Dict[str, str]]] + + +class NetworkCalculator: + """Simple wrapper interface to the Emme Network calculator + + Used to generate the standard network calculator specification (dictionary) + from argument inputs. Useful when NOT (commonly) using selection or + aggregation options, and mostly running link expression calculations + + Args: + scenario: Emme scenario object + """ + + def __init__(self, scenario: EmmeScenario): + self._scenario = scenario + emme_manager = _manager.EmmeManager() + modeller = emme_manager.modeller() + self._network_calc = modeller.tool( + "inro.emme.network_calculation.network_calculator" + ) + self._specs = [] + + def __call__( + self, + result: str, + expression: str, + selections: Union[str, Dict[str, str]] = None, + aggregation: Dict[str, str] = None, + ) -> Dict[str, float]: + """Run a network calculation in the scenario, see the Emme help for more. + + Args: + result: Name of network attribute + expression: Calculation expression + selections: Selection expression nest. Defaults to {"link": "all"} if + not specified, and is used as a link selection expression + if specified as a string. + aggregation: Aggregation operators if aggregating between network domains. + + Returns: + A dictionary report with min, max, average and sum of the calculation + expression. See Emme help 'Network calculator' for more. + """ + spec = self._format_spec(result, expression, selections, aggregation) + return self._network_calc(spec, self._scenario) + + def add_calc( + self, + result: str, + expression: str, + selections: Union[str, Dict[str, str]] = None, + aggregation: Dict[str, str] = None, + ): + """Add calculation to list of network calculations to run. + + Args: + result: Name of network attribute + expression: Calculation expression + selections: Selection expression nest. Defaults to {"link": "all"} if + not specified, and is used as a link selection expression + if specified as a string. + aggregation: Aggregation operators if aggregating between network domains. + """ + self._specs.append( + self._format_spec(result, expression, selections, aggregation) + ) + + def run(self) -> List[Dict[str, float]]: + """Run accumulated network calculations all at once. + + Returns: + A list of dictionary reports with min, max, average and sum of the + calculation expression. See Emme help 'Network calculator' for more. + """ + reports = self._network_calc(self._specs, self._scenario) + self._specs = [] + return reports + + @staticmethod + def _format_spec( + result: str, + expression: str, + selections: Union[str, Dict[str, str]], + aggregation: Dict[str, str], + ) -> EmmeNetworkCalcSpecification: + spec = { + "result": result, + "expression": expression, + "aggregation": aggregation, + "type": "NETWORK_CALCULATION", + } + if selections is not None: + if isinstance(selections, str): + selections = {"link": selections} + spec["selections"] = selections + else: + spec["selections"] = {"link": "all"} + return spec diff --git a/tm2py/examples.py b/tm2py/examples.py new file mode 100644 index 00000000..c772488b --- /dev/null +++ b/tm2py/examples.py @@ -0,0 +1,49 @@ +"""Download and unzip examples for tm2py, used in tests""" + + +import os + +from .tools import download_unzip + +_ROOT_DIR = r".." + +_DEFAULT_EXAMPLE_URL = ( + r"https://mtcdrive.box.com/shared/static/3entr016e9teq2wt46x1os3fjqylfoge.zip" +) +_DEFAULT_EXAMPLE_SUBDIR = r"examples" +_DEFAULT_EXAMPLE_NAME = "UnionCity" + + +def get_example( + example_name: str = _DEFAULT_EXAMPLE_NAME, + example_subdir: str = _DEFAULT_EXAMPLE_SUBDIR, + root_dir: str = _ROOT_DIR, + retrieval_url: str = _DEFAULT_EXAMPLE_URL, +) -> str: + """Returns example directory; downloads if necessary from retrieval URL. + + Args: + example_name (str, optional): Used to retrieve sub-folder or create it if doesn't exist. + Defaults to _DEFAULT_EXAMPLE_NAME. + example_subdir (str, optional): Where to find examples within root dir. Defaults + to _DEFAULT_EXAMPLE_SUBDIR. + root_dir (str, optional): Root dir of project. Defaults to _ROOT_DIR. + retrieval_url (str, optional): URL to retrieve example data zip from. Defaults + to _DEFAULT_EXAMPLE_URL. + + Raises: + FileNotFoundError: If can't find the files after trying to download it. + + Returns: + str: Path to example data. + """ + _example_dir = os.path.join(root_dir, example_subdir) + _this_example_dir = os.path.join(_example_dir, example_name) + if os.path.isdir(_this_example_dir): + return _this_example_dir + + download_unzip(retrieval_url, _example_dir, _this_example_dir) + if not os.path.isdir(_this_example_dir): + raise FileNotFoundError(f"example {_this_example_dir} not found") + + return _this_example_dir diff --git a/tm2py/logger.py b/tm2py/logger.py new file mode 100644 index 00000000..922b581a --- /dev/null +++ b/tm2py/logger.py @@ -0,0 +1,102 @@ +"""Logging module +""" +from contextlib import contextmanager as _context +from datetime import datetime +import functools + + +class Logger: + """Logger""" + + def __init__(self, controller): + super().__init__() + self._controller = controller + self._indentation = 0 + + @staticmethod + def log(text: str, level: str = "INFO"): + """Placeholder logging method + + Args: + text (str): text to log + level (str): logging level of the message text + """ + if level: + print(text) + + def log_time(self, msg: str, level: str = "INFO", indent: bool = True): + """Log message with timestamp + + Args: + msg (str): message text + level (str): logging level + indent (bool): if true indent any messages based on the number of open contexts + """ + timestamp = datetime.now().strftime("%d-%b-%Y (%H:%M:%S)") + if indent: + indent = " " * self._indentation + self.log(f"{timestamp}: {indent}{msg}", level) + else: + self.log(f"{timestamp}: {msg}", level) + + def log_start(self, msg: str, level: str = "INFO"): + """Log message with timestamp and 'Start'. + + Args: + msg (str): message text + level (str): logging level + """ + self.log_time(f"Start {msg}", level, indent=True) + self._indentation += 1 + + def log_end(self, msg: str, level: str = "INFO"): + """Log message with timestamp and 'End'. + + Args: + msg (str): message text + level (str): logging level + """ + self._indentation -= 1 + self.log_time(f"End {msg}", level, indent=True) + + @_context + def log_start_end(self, msg: str, level: str = "INFO"): + """Use with 'with' statement to log the start and end time with message. + + Args: + msg (str): message text + level (str): logging level + """ + self.log_start(msg, level) + yield + self.log_end(msg, level) + + +# pylint: disable=too-few-public-methods + + +class LogStartEnd: + """Log the start and end time with optional message. + + Used as a Component method decorator. If msg is not provided a default message + is generated with the object class and method name. + + Args: + msg (str): message text to use in the start and end record + level (str): logging level + """ + + def __init__(self, msg: str = None, level: str = "INFO"): + self.msg = msg + self.level = level + + def __call__(self, func): + @functools.wraps(func) + def wrapper(obj, *args, **kwargs): + msg = self.msg or obj.__class__.__name__ + " " + func.__name__ + obj.logger.log_start(msg, self.level) + value = func(obj, *args, **kwargs) + obj.logger.log_end(msg, self.level) + return value + + return wrapper diff --git a/tm2py/template.py b/tm2py/template.py deleted file mode 100644 index dbc5a1ab..00000000 --- a/tm2py/template.py +++ /dev/null @@ -1,71 +0,0 @@ -"""Example Google style docstrings. - -This module demonstrates documentation as specified by the `Google Python -Style Guide`_. Docstrings may extend over multiple lines. Sections are created -with a section header and a colon followed by a block of indented text. - -Example: - Examples can be given using either the ``Example`` or ``Examples`` - sections. Sections support any reStructuredText formatting, including - literal blocks:: - - $ python example_google.py - -Section breaks are created by resuming unindented text. Section breaks -are also implicitly created anytime a new section starts. - -Attributes: - module_level_variable1 (int): Module level variables may be documented in - either the ``Attributes`` section of the module docstring, or in an - inline docstring immediately following the variable. - - Either form is acceptable, but the two should not be mixed. Choose - one convention to document module level variables and be consistent - with it. - -Todo: - * For module TODOs - * You have to also use ``sphinx.ext.todo`` extension - -.. _Google Python Style Guide: - https://google.github.io/styleguide/pyguide.html -""" - -module_level_variable_module_doc = 12345 - -module_level_variable_inline = 98765 -"""int: Module level variable documented inline. - -The docstring may span multiple lines. The type may optionally be specified -on the first line, separated by a colon. -""" - -class TemplateClass: - """The summary line for a class docstring should fit on one line. - - If the class has public attributes, they may be documented here - in an ``Attributes`` section and follow the same formatting as a - function's ``Args`` section. Alternatively, attributes may be documented - inline with the attribute's declaration (see __init__ method below). - - Properties created with the ``@property`` decorator should be documented - in the property's getter method. - - Attributes: - attr1 (str): Description of `attr1`. - attr2 (:obj:`int`, optional): Description of `attr2`. - - """ - def __init__(self,var1: int,var2: str = "default") -> None: - """One-line summary of what method does. - - Args: - var1: Description of var1. - var2: Description of var2. Defaults to "default". - """ - pass - - @property - def my_readonly_property(self): - """int: Double of var1.""" - return self.var1*2 \ No newline at end of file diff --git a/tm2py/tools.py b/tm2py/tools.py new file mode 100644 index 00000000..f90eab7e --- /dev/null +++ b/tm2py/tools.py @@ -0,0 +1,115 @@ +"""Tools module for common resources / shared code and "utilities" in the tm2py package.""" +from contextlib import contextmanager as _context +import multiprocessing +import os +import re +import urllib.request +import urllib.error +import urllib.parse +import zipfile + +from typing import Union + + +def parse_num_processors(value: Union[str, int, float]): + """Convert input value (parse if string) to number of processors. + Args: + value: an int, float or string; string value can be "X" or "MAX-X" + Returns: + An int of the number of processors to use + + Raises: + Exception: Input value exceeds number of available processors + Exception: Input value less than 1 processors + """ + max_processors = multiprocessing.cpu_count() + if isinstance(value, str): + result = value.upper() + if result == "MAX": + return max_processors + if re.match("^[0-9]+$", value): + return int(value) + result = re.split(r"^MAX[\s]*-[\s]*", result) + if len(result) == 2: + return max(max_processors - int(result[1]), 1) + raise Exception(f"Input value {value} is an int or string as 'MAX-X'") + + result = int(value) + if result > max_processors: + raise Exception(f"Input value {value} greater than available processors") + if result < 1: + raise Exception(f"Input value {value} less than 1 processors") + return value + + +@_context +def _urlopen(url): + """Access the url, following redirect if needed (i.e. box). + + Wrapper on urllib.request.urlopen. For use with a context manager (with statement). + + Args: + url (str): source URL to access + + Returns: + url.response object + + Raises: + ValueError: HTTP error from urllib + """ + request = urllib.request.Request(url) + # Handle Redirects using solution shown by user: metatoaster on StackOverflow + # https://stackoverflow.com/questions/62384020/python-3-7-urllib-request-doesnt-follow-redirect-url + try: + with urllib.request.urlopen(request) as response: + yield response + except urllib.error.HTTPError as error: + print("redirect error") + if error.status != 307: + raise ValueError(f"HTTP Error {error.status}") from error + redirected_url = urllib.parse.urljoin(url, error.headers["Location"]) + with urllib.request.urlopen(redirected_url) as response: + yield response + + +def _download(url: str, target_destination: str): + """Download file with redirects (i.e. box) + + Args: + url (str): source URL to download data from + target_destination (str): destination file path to save download + """ + with _urlopen(url) as response: + with open(target_destination, "wb") as out_file: + out_file.write(response.read()) + + +def _unzip(target_zip: str, target_dir: str): + """Unzip file at target_zip to directory at target_dir. + + Args: + target_zip: path to existing, valid, zip file + target_dir: path to directory + """ + with zipfile.ZipFile(target_zip, "r") as zip_ref: + zip_ref.extractall(target_dir) + + +def download_unzip( + url: str, out_base_dir: str, target_dir: str, zip_filename: str = "test_data.zip" +) -> None: + """Downloads and unzips a file from a URL. The zip file is removed after extraction. + + Args: + url (str): Full URL do download from. + out_base_dir (str): Where to unzip the file. + target_dir (str): What to unzip the file as. + zip_filename (str, optional): Filename to store zip file as. Defaults to "test_data.zip". + """ + target_zip = os.path.join(out_base_dir, zip_filename) + if not os.path.isdir(out_base_dir): + os.makedirs(out_base_dir) + urllib.request.Request(url) + _download(url, target_zip) + _unzip(target_zip, target_dir) + os.remove(target_zip)