diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..a0e751d --- /dev/null +++ b/.gitattributes @@ -0,0 +1,2 @@ +*.nc filter=lfs diff=lfs merge=lfs -text +*.csv filter=lfs diff=lfs merge=lfs -text diff --git a/.gitignore b/.gitignore index 3652051..484bd40 100644 --- a/.gitignore +++ b/.gitignore @@ -21,6 +21,7 @@ rvic.egg-info # Configuration Files # ####################### *cfg +samples/cases/* # Packages # ############ diff --git a/.travis.yml b/.travis.yml index 660863e..5e08005 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,31 +1,35 @@ language: python +sudo: false # use container based build +notifications: + email: false + +matrix: + fast_finish: true + include: + - python: 2.7 + - python: 3.4 + - python: 3.5 -python: - - "2.7" # Setup anaconda before_install: - - wget http://repo.continuum.io/miniconda/Miniconda-2.2.2-Linux-x86_64.sh -O miniconda.sh - - chmod +x miniconda.sh - - ./miniconda.sh -b - - export PATH=/home/travis/anaconda/bin:$PATH - # Update conda itself - - conda update --yes conda - # The next couple lines fix a crash with multiprocessing on Travis and are not specific to using Miniconda - - sudo rm -rf /dev/shm - - sudo ln -s /run/shm /dev/shm + - if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then + wget http://repo.continuum.io/miniconda/Miniconda-3.16.0-Linux-x86_64.sh -O miniconda.sh; + else + wget http://repo.continuum.io/miniconda/Miniconda3-3.16.0-Linux-x86_64.sh -O miniconda.sh; + fi + - bash miniconda.sh -b -p $HOME/miniconda + - export PATH="$HOME/miniconda/bin:$PATH" + - hash -r + - conda config --set always_yes yes --set changeps1 no + - conda update -q conda + - conda info -a # Install packages install: - - conda create --yes --name=rvic_test_env python=$TRAVIS_PYTHON_VERSION --file=ci/requirements-2.7.txt - - source activate rvic_test_env + - conda create --yes --name=test python=$TRAVIS_PYTHON_VERSION numpy scipy pandas netcdf4 matplotlib pytest + - source activate test + # - conda install -c https://conda.anaconda.org/ioos cdo + # - pip install cdo - python setup.py install # Run test script: - - which python - - python --version - - conda list - - pip freeze - - echo $PATH - - python -c 'import rvic; print rvic.__file__' - - cd tests - - pwd - - python run_tests.py unit + - py.test diff --git a/ci/requirements-2.7-dev.txt b/ci/requirements-2.7-dev.txt deleted file mode 100644 index c1220c8..0000000 --- a/ci/requirements-2.7-dev.txt +++ /dev/null @@ -1,7 +0,0 @@ -cdo==1.2.3 -matplotlib==1.3.1 -netCDF4==1.0.8 -numpy==1.8.1 -scipy==0.13.3 -pandas==0.13.1 -pytest==2.5.2 diff --git a/ci/requirements-2.7.txt b/ci/requirements-2.7.txt deleted file mode 100644 index cfe6cda..0000000 --- a/ci/requirements-2.7.txt +++ /dev/null @@ -1,6 +0,0 @@ -matplotlib==1.3.1 -netCDF4==1.0.8 -numpy==1.8.1 -scipy==0.13.3 -pandas==0.13.1 -pytest==2.5.2 diff --git a/config/rvic_convert_example.cfg b/config/rvic_convert_example.cfg deleted file mode 100644 index ffeddd8..0000000 --- a/config/rvic_convert_example.cfg +++ /dev/null @@ -1,95 +0,0 @@ -#-- ========================================================================== --# -#-- --# -#-- This RVIC namelist contains options and paths for the --# -#-- development of the RVIC parameter file based on existing --# -#-- UHS files from either the C or Fortran Version of the model. --# -#-- --# -#-- ========================================================================== --# - -# Note: namelist is parsed by the python ConfigParser module. %(Interploation) is -# supported inside [sections] only. - -[OPTIONS] -#-- ====================================== --# -#--Level to log output at (char) --# -# valid values: DEBUG, INFO, WARNING, ERROR, CRITICAL -LOG_LEVEL:INFO - -#--Print output to console in addition to the log file (bool) --# -# valid values: True, False -VERBOSE:True - -#--case description (char) --# -CASEID:example_case_name - -#--routing domain grid shortname (char) --# -GRIDID: example_grid_id - -#--case run directory (char) --# -CASE_DIR:/path/to/%(CASEID)s/ - -#-- Output parameter file format (char) --# -# Valid Values: NETCDF3_CLASSIC, NETCDF3_64BIT, NETCDF4_CLASSIC, and NETCDF4 -# For use with CESM, NETCDF3_CLASSIC is reccomended. -NETCDF_FORMAT:NETCDF3_CLASSIC - -#-- Output parameter file compression options --# -# Descriptions of these options can be found in -NETCDF_ZLIB: False -NETCDF_COMPLEVEL: 4 -NETCDF_SIGFIGS: None - -#-- Length of unit hydrograph subset in days (int) --# -SUBSET_DAYS:10 - -#-- Constrain the final unit hydrographs sum to be less than or equal to the domain fractions --# -# True when routing to coastal grid cells, else False -CONSTRAIN_FRACTIONS:False -#-- ====================================== --# - -[UHS_FILES] -#-- ====================================== --# -#-- Routing program UHS files were created in (char) --# -# Valid Values: C, Fortran -ROUT_PROGRAM:C - -#-- Location of UHS files (char) --# -ROUT_DIR: /path/to/inputUHS/ - -#-- Path to stations file (char) --# -STATION_FILE: %(ROUT_DIR)s/stations.route_c -#-- ====================================== --# - -[ROUTING] -#-- ====================================== --# -#-- Output Interval --# -# Timestep of unit hydrographs. -OUTPUT_INTERVAL: 86400 -#-- ====================================== --# - -[DOMAIN] -#-- ====================================== --# -#-- Path to cesm complient domain file, this needs to match the dimensions of -# the grid the UHS files were developed on (char) --# -FILE_NAME: /path/to/domain.lnd.nc - -#-- netCDF Variable Names --# -LONGITUDE_VAR: lon -LATITUDE_VAR: lat -LAND_MASK_VAR: mask -FRACTION_VAR: frac -AREA_VAR: area -#-- ====================================== --# - -[NEW_DOMAIN] -#-- ====================================== --# -#-- Path to cesm complient domain file final routing to be done on (char) --# -FILE_NAME: /path/to/domain.rvic.nc - -#-- netCDF Variable Names --# -LONGITUDE_VAR: lon -LATITUDE_VAR: lat -LAND_MASK_VAR: mask -FRACTION_VAR: frac -AREA_VAR: area -#-- ====================================== --# diff --git a/docs/development/whats_new.md b/docs/development/whats_new.md new file mode 100644 index 0000000..adba2d5 --- /dev/null +++ b/docs/development/whats_new.md @@ -0,0 +1,16 @@ +# What's New + +v1.1.0 (25 October 2015) + +This release contains a number of bug and compatibility fixes. + +### Enhancements + +- Compatibility with Python 3.4 and 3.5 ([GH38](https://github.com/UW-Hydro/RVIC/pull/38)). +- Simplified multiprocessing in `parameters` which should improve stability ([GH60](https://github.com/UW-Hydro/RVIC/pull/60)). + +### Bug Fixes + +- Fixed bug that caused `REMAP=False` to fail ([GH60](https://github.com/UW-Hydro/RVIC/pull/60)). +- Improvements were made to the `SEARCH_FOR_CHANNEL` option in `parameters`. +- Fixed bug where last timestep of history output was not populated ([GH71](https://github.com/UW-Hydro/RVIC/pull/71)). diff --git a/docs/support/faq.md b/docs/support/faq.md index 8ac6a32..67705df 100644 --- a/docs/support/faq.md +++ b/docs/support/faq.md @@ -4,7 +4,7 @@ At the moment, no. However, many of the pieces needed to support this feature are already included in RVIC. This is a feature that has been identified for a future version of RVIC. ### Can route flows to every grid cell in my domain using RVIC? -Yes, it is possible to route to every grid cell in the RVIC domain. That said, it may not be the most efficent way to use RVIC. RVIC is a source to sink routing model which means it doesn't track where streamflow is along its flow path. When you route to every grid cell in the domain, you are duplicating a lot of calculations. There are other routing models that route flow from grid cell to grid cell. +Yes, it is possible to route to every grid cell in the RVIC domain. That said, it may not be the most efficient way to use RVIC. RVIC is a source to sink routing model which means it doesn't track where streamflow is along its flow path. When you route to every grid cell in the domain, you are duplicating a lot of calculations. There are other routing models that route flow from grid cell to grid cell. ### Is there a C or Fortran Version of RVIC? -RVIC has been coupled in the Community Earth System Model (CESM) as the streamflow routing model. For that project, a Fortran version of the "convolution" step was written. At this time, there is not a C version of this routing model. In the future, a C binding may be created to coupled with the stand-alone VIC model version 5. +RVIC has been coupled in the Community Earth System Model (CESM) as the streamflow routing model. For that project, a Fortran version of the "convolution" step was written. Work is currently underway to couple RVIC to the stand-alone image driver in VIC 5.0 ([VIC231](https://github.com/UW-Hydro/VIC/pull/231)). diff --git a/docs/user-guide/convolution.md b/docs/user-guide/convolution.md index 7d7a3d1..8349d34 100644 --- a/docs/user-guide/convolution.md +++ b/docs/user-guide/convolution.md @@ -28,9 +28,6 @@ The flux file(s) must be in netCDF format and have a `time` dimension as well as 3. **CASE_DIR** - Description: case run directory - Type: char -4. **RVIC_TAG** - - Description: RVIC Tag - - Type: char 5. **CASEID** - Description: Case ID - Type: char diff --git a/docs/user-guide/install.md b/docs/user-guide/install.md index 8386c1a..0409470 100644 --- a/docs/user-guide/install.md +++ b/docs/user-guide/install.md @@ -1,16 +1,31 @@ # Installing RVIC ## Dependencies -- [Python 2.7](http://www.python.org/) -- [Numpy](http://www.numpy.org) -- [Scipy](http://www.scipy.org/) -- [netcdf4-python](https://code.google.com/p/netcdf4-python/) +- [Python 2.7 or later including Python3](http://www.python.org) +- [NumPy](http://www.numpy.org) +- [SciPy](http://www.scipy.org) +- [netcdf4-python](https://code.google.com/p/netcdf4-python) +- [pandas](http://pandas.pydata.org) If using `REMAP=True`: -- [cdo](https://code.zmaw.de/projects/cdo) +- [CDO](https://code.zmaw.de/projects/cdo) - [cdo.py](https://github.com/Try2Code/cdo-bindings) +## Installing using a package manager + +RVIC is available via [PyPi](https://pypi.python.org/pypi/rvic): + +``` +pip install rvic +``` + +or Anaconda via the UW-Hydro channel: + +``` +conda install --channel https://conda.anaconda.org/UW-Hydro rvic +``` + ## Building RVIC ### Option 1: Using Anaconda diff --git a/docs/user-guide/parameters.md b/docs/user-guide/parameters.md index cc14ff3..ef8526d 100644 --- a/docs/user-guide/parameters.md +++ b/docs/user-guide/parameters.md @@ -58,6 +58,8 @@ For irregular grids, you need to come up with a netCDF file that contains these - area (in m2 or rad2 or similar) - coordinate vars (lon and lat or similar) +*Note: If `REMAP==False`, the resolution of the routing data grid and domain grid must be the same. Furthermore, the domain grid must be a subset of the routing grid. RVIC will raise an error if either of these conditions is not met.* + ## Pour Points File A pour points CSV is needed to develop the parameter file. This is simply a csv file containing a list of the pour point locations with 2 or 3 columns (required: lons, lats; optional: names/contributing area/etc). One header row is required. diff --git a/rvic/__init__.py b/rvic/__init__.py index e69de29..8f32b37 100644 --- a/rvic/__init__.py +++ b/rvic/__init__.py @@ -0,0 +1 @@ +from .version import short_version as __version__ diff --git a/rvic/clib/rvic_convolution.c b/rvic/clib/rvic_convolution.c index 7be64b5..69cf2b9 100644 --- a/rvic/clib/rvic_convolution.c +++ b/rvic/clib/rvic_convolution.c @@ -1,16 +1,17 @@ #include -void convolve(const int nsources, /*scalar - number of sources*/ - const int noutlets, /*scalar - length of subset*/ - const int subset_length, /*scalar - length of subset*/ - const int x_size, - const int* source2outlet_ind, /*1d array - source to outlet mapping*/ - const int* source_y_ind, /*1d array - source y location*/ - const int* source_x_ind, /*1d array - source x location*/ - const int* source_time_offset, /*1d array - source time offset*/ - const double* unit_hydrograph, /*2d array[times][sources] - unit hydrographs*/ - const double* aggrunin, /*2d array[ysize][xsize] - vic runoff flux*/ - double* ring) /*2d array[times][outlets] - convolution ring*/ +void +convolve(const int nsources, /*scalar - number of sources*/ + const int noutlets, /*scalar - length of subset*/ + const int subset_length, /*scalar - length of subset*/ + const int x_size, + const int *source2outlet_ind, /*1d array - source to outlet mapping*/ + const int *source_y_ind, /*1d array - source y location*/ + const int *source_x_ind, /*1d array - source x location*/ + const int *source_time_offset, /*1d array - source time offset*/ + const double *unit_hydrograph, /*2d array[times][sources] - unit hydrographs*/ + const double *aggrunin, /*2d array[ysize][xsize] - vic runoff flux*/ + double *ring) /*2d array[times][outlets] - convolution ring*/ { int s, i, j; /*counters*/ int y, x, offset, outlet; /*2d indicies*/ @@ -18,15 +19,14 @@ void convolve(const int nsources, /*scalar - number of sources*/ /*Loop through all sources*/ for (s = 0; s < nsources; s++) { - outlet = source2outlet_ind[s]; y = source_y_ind[s]; x = source_x_ind[s]; offset = source_time_offset[s]; - //1d index location - //2d-->1d indexing goes like this: ind = y*x_size + x - xyind = y*x_size + x; + // 1d index location + // 2d-->1d indexing goes like this: ind = y*x_size + x + xyind = y * x_size + x; /* Do the convolution */ // i is the position in the unit hydrograph @@ -34,7 +34,7 @@ void convolve(const int nsources, /*scalar - number of sources*/ for (i = 0; i < subset_length; i++) { j = i + offset; - //1d index locations + // 1d index locations rind = j * noutlets + outlet; uhind = i * nsources + s; diff --git a/rvic/convert.py b/rvic/convert.py index 0007619..48ebe60 100644 --- a/rvic/convert.py +++ b/rvic/convert.py @@ -1,19 +1,20 @@ -""" +# -*- coding: utf-8 -*- +''' Read a set of uhs files and write an RVIC parameter file -""" +''' from logging import getLogger -from core.log import init_logger, close_logger, LOG_NAME -from core.utilities import make_directories, copy_inputs, read_domain -from core.utilities import tar_inputs -from core.convert import read_station_file, read_uhs_files, move_domain -from core.param_file import finish_params -from core.config import read_config +from .core.log import init_logger, close_logger, LOG_NAME +from .core.utilities import make_directories, copy_inputs, read_domain +from .core.utilities import tar_inputs +from .core.convert import read_station_file, read_uhs_files, move_domain +from .core.param_file import finish_params +from .core.config import read_config # -------------------------------------------------------------------- # # Top level driver -def convert(config_file, numofproc): +def convert(config_file): # ---------------------------------------------------------------- # # Initilize @@ -29,7 +30,7 @@ def convert(config_file, numofproc): # ---------------------------------------------------------------- # # Run log.info('getting outlets now') - outlets = uhs2param_run(dom_data, outlets, config_dict, directories) + outlets = uhs2param_run(dom_data, outlets, config_dict) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # @@ -59,7 +60,8 @@ def uhs2param_init(config_file): # copy inputs to $case_dir/inputs and update configuration config_dict = copy_inputs(config_file, directories['inputs']) options = config_dict['OPTIONS'] - config_dict['POUR_POINTS'] = {'FILE_NAME': config_dict['UHS_FILES']['STATION_FILE']} + config_dict['POUR_POINTS'] = { + 'FILE_NAME': config_dict['UHS_FILES']['STATION_FILE']} config_dict['ROUTING']['FILE_NAME'] = 'unknown' config_dict['UH_BOX'] = {'FILE_NAME': 'unknown'} # ---------------------------------------------------------------- # @@ -70,17 +72,16 @@ def uhs2param_init(config_file): options['VERBOSE']) for direc in directories: - log.info('%s directory is %s' % (direc, directories[direc])) + log.info('%s directory is %s', direc, directories[direc]) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Read domain file (if applicable) - dom_data, DomVats, DomGats = read_domain(config_dict['DOMAIN']) - log.info('Opened Domain File: %s' % config_dict['DOMAIN']['FILE_NAME']) + dom_data = read_domain(config_dict['DOMAIN'])[0] + log.info('Opened Domain File: %s', config_dict['DOMAIN']['FILE_NAME']) if 'NEW_DOMAIN' in config_dict: - new_dom_data, new_DomVats, \ - new_DomGats = read_domain(config_dict['NEW_DOMAIN']) + new_dom_data = read_domain(config_dict['NEW_DOMAIN'])[0] log.info('Opened New Domain File: %s', config_dict['NEW_DOMAIN']['FILE_NAME']) else: @@ -99,7 +100,7 @@ def uhs2param_init(config_file): # -------------------------------------------------------------------- # # run -def uhs2param_run(dom_data, outlets, config_dict, directories): +def uhs2param_run(dom_data, outlets, config_dict): # ---------------------------------------------------------------- # # Read uhs files @@ -113,9 +114,9 @@ def uhs2param_run(dom_data, outlets, config_dict, directories): # -------------------------------------------------------------------- # # def uhs2param_final(outlets, dom_data, new_dom_data, config_dict, directories): - """ + ''' Make the RVIC Parameter File - """ + ''' log = getLogger(LOG_NAME) diff --git a/rvic/convolution.py b/rvic/convolution.py index bcad366..eb656b7 100644 --- a/rvic/convolution.py +++ b/rvic/convolution.py @@ -1,4 +1,5 @@ -""" +# -*- coding: utf-8 -*- +''' This is the convolution routine developed in preparation in coupling RVIC to CESM. Eventually, this will be the offline RVIC model. @@ -12,32 +13,38 @@ Made necessary changes to run routines to accept the new parameter file structure. Major updates to the... -""" +''' import os from collections import OrderedDict from logging import getLogger -from core.log import init_logger, close_logger, LOG_NAME -from core.utilities import make_directories, read_domain -from core.utilities import write_rpointer, tar_inputs -from core.variables import Rvar -from core.time_utility import Dtime -from core.read_forcing import DataModel -from core.history import Tape -from core.share import NcGlobals, RVIC_TRACERS -from core.config import read_config +from .core.log import init_logger, close_logger, LOG_NAME +from .core.utilities import make_directories, read_domain +from .core.utilities import write_rpointer, tar_inputs +from .core.variables import Rvar +from .core.time_utility import Dtime +from .core.read_forcing import DataModel +from .core.history import Tape +from .core.share import NcGlobals, RVIC_TRACERS +from .core.config import read_config +from .core.pycompat import iteritems # -------------------------------------------------------------------- # # Top Level Driver -def convolution(config_file, numofproc=1): - """ +def convolution(config_file): + ''' Top level driver for RVIC convolution model. - """ + + Parameters + ---------- + config_file : str + Path to RVIC convolution configuration file. + ''' # ---------------------------------------------------------------- # # Initilize - hist_tapes, data_model, rout_var, dom_data,\ - time_handle, directories, config_dict = convolution_init(config_file) + hist_tapes, data_model, rout_var, \ + time_handle, directories = convolution_init(config_file) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # @@ -49,8 +56,7 @@ def convolution(config_file, numofproc=1): # ---------------------------------------------------------------- # # Run time_handle, hist_tapes = convolution_run(hist_tapes, data_model, rout_var, - dom_data, time_handle, - directories, config_dict) + time_handle, directories) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # @@ -64,11 +70,42 @@ def convolution(config_file, numofproc=1): # -------------------------------------------------------------------- # # Initialize RVIC def convolution_init(config_file): - """ - - Read Grid File - - Load the unit hydrograph files (put into point_dict) - - Load the initial state file and put it in convolution rings - """ + ''' + Initialize the RVIC convolution routine + + This function performs these main tasks: + - Reads the configuration file + - Sets up the RVIC case directories + - Copies all input files to the case directory + - Initializes the logging + - Read domain file + - Load the RVIC parameter file + - Load the initial state file and put it in convolution rings + - Setup time and history file objects + + Parameters + ---------- + config_file : str + Path to RVIC convolution configuration file. + + Returns + ---------- + hist_tapes : OrderedDict + Ordered dictionary of History objects + data_model : DataModel + DataModel instance containing the forcings + rout_var : Rvar + Rvar instance containing the RVIC parameters / unit hydrographs + dom_data : dict + Dictionary of arrays of mask, fraction, lats, lons, etc. + This dictionary includes all the variables from the domain netCDF file. + time_handle : Dtime + Dtime instance containing information about run length, time + directories : dict + Dictionary of directories created by this function. + config_dict : dict + Dictionary of values from the configuration file. + ''' # ---------------------------------------------------------------- # # Read Configuration files @@ -82,8 +119,7 @@ def convolution_init(config_file): # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # - # Copy Inputs to $case_dir/inputs and update configuration - #config_dict = copy_inputs(config_file, directories['inputs']) + # unpack options options = config_dict['OPTIONS'] # ---------------------------------------------------------------- # @@ -108,8 +144,7 @@ def convolution_init(config_file): # ---------------------------------------------------------------- # # Read Domain File domain = config_dict['DOMAIN'] - dom_data, dom_vatts, dom_gatts = read_domain(domain, - lat0_is_min=data_model.lat0_is_min) + dom_data = read_domain(domain, lat0_is_min=data_model.lat0_is_min)[0] # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # @@ -173,19 +208,20 @@ def convolution_init(config_file): # make sure history file fields are all in list form if numtapes == 1: - for var, value in history.iteritems(): + for var, value in iteritems(history): if not isinstance(value, list): history[var] = list([value]) - global_atts = NcGlobals(title='RVIC history file', - casename=options['CASEID'], - casestr=options['CASESTR'], - RvicPourPointsFile=os.path.split(rout_var.RvicPourPointsFile)[1], - RvicUHFile=os.path.split(rout_var.RvicUHFile)[1], - RvicFdrFile=os.path.split(rout_var.RvicFdrFile)[1], - RvicDomainFile=os.path.split(domain['FILE_NAME'])[1]) + global_atts = NcGlobals( + title='RVIC history file', + casename=options['CASEID'], + casestr=options['CASESTR'], + RvicPourPointsFile=os.path.split(rout_var.RvicPourPointsFile)[1], + RvicUHFile=os.path.split(rout_var.RvicUHFile)[1], + RvicFdrFile=os.path.split(rout_var.RvicFdrFile)[1], + RvicDomainFile=os.path.split(domain['FILE_NAME'])[1]) - for j in xrange(numtapes): + for j in range(numtapes): tapename = 'Tape.{0}'.format(j) log.info('setting up History %s', tapename) hist_tapes[tapename] = Tape(time_handle.time_ord, @@ -208,23 +244,42 @@ def convolution_init(config_file): glob_ats=global_atts) # loop over again and print summary - for tapename, tape in hist_tapes.iteritems(): + for tapename, tape in iteritems(hist_tapes): log.info('==========%s==========', tapename) log.info(tape) tape.write_initial() # ---------------------------------------------------------------- # - return (hist_tapes, data_model, rout_var, dom_data, time_handle, - directories, config_dict) + return hist_tapes, data_model, rout_var, time_handle, directories # -------------------------------------------------------------------- # # -------------------------------------------------------------------- # -def convolution_run(hist_tapes, data_model, rout_var, dom_data, time_handle, - directories, config_dict): - """ +def convolution_run(hist_tapes, data_model, rout_var, time_handle, + directories): + ''' Main run loop for RVIC model. - """ + + Parameters + ---------- + hist_tapes : OrderedDict + Ordered dictionary of History objects + data_model : DataModel + DataModel instance containing the forcings + rout_var : Rvar + Rvar instance containing the RVIC parameters / unit hydrographs + time_handle : Dtime + Dtime instance containing information about run length, time + directories : dict + Dictionary of directories created by this function. + + Returns + ---------- + time_handle : Dtime + Dtime instance containing information about run length, time + hist_tapes : OrderedDict + Ordered dictionary of History objects + ''' data2tape = {} aggrunin = {} @@ -275,7 +330,7 @@ def convolution_run(hist_tapes, data_model, rout_var, dom_data, time_handle, data2tape['storage'] = rout_var.get_storage() # Update the history Tape(s) - for tapename, tape in hist_tapes.iteritems(): + for tapename, tape in iteritems(hist_tapes): log.debug('Updating Tape: %s', tapename) tape.update(data2tape, time_ord) # ------------------------------------------------------------ # @@ -286,7 +341,7 @@ def convolution_run(hist_tapes, data_model, rout_var, dom_data, time_handle, # History files history_files = [] history_restart_files = [] - for tapename, tape in hist_tapes.iteritems(): + for tapename, tape in iteritems(hist_tapes): log.debug('Writing Restart File for Tape: %s', tapename) # hist_fname, rest_fname = tape.write_restart() history_files.append(tape.filename) @@ -304,7 +359,7 @@ def convolution_run(hist_tapes, data_model, rout_var, dom_data, time_handle, if not stop_flag: timestamp, time_ord, stop_flag, \ rest_flag = time_handle.advance_timestep() - #check that we're still inline with convolution + # check that we're still inline with convolution if end_timestamp != timestamp: raise ValueError('timestamps do not match after convolution') else: @@ -313,7 +368,7 @@ def convolution_run(hist_tapes, data_model, rout_var, dom_data, time_handle, # ---------------------------------------------------------------- # # Make sure we write out the last history file - for tapename, tape in hist_tapes.iteritems(): + for tapename, tape in iteritems(hist_tapes): log.debug('Closing Tape: %s', tapename) tape.finish() # ---------------------------------------------------------------- # @@ -324,7 +379,15 @@ def convolution_run(hist_tapes, data_model, rout_var, dom_data, time_handle, # -------------------------------------------------------------------- # # Final def convolution_final(time_handle, hist_tapes): - """ Finalize RVIC Convolution""" + '''Finalize RVIC Convolution + + Parameters + ---------- + time_handle : Dtime + Dtime instance containing information about run length, time + hist_tapes : OrderedDict + Ordered dictionary of History objects + ''' # ---------------------------------------------------------------- # # Start log log = getLogger(LOG_NAME) @@ -333,13 +396,12 @@ def convolution_final(time_handle, hist_tapes): # ---------------------------------------------------------------- # # Write final log info - log.info("-----------------------------------------------------------") + log.info('-----------------------------------------------------------') log.info('Done with streamflow convolution') - log.info('Processed %i timesteps' % time_handle.timesteps) - for name, tape in hist_tapes.iteritems(): - log.info('Wrote %i history files from %s' % (tape.files_count, name)) - # log.info('Routed to %i points' % time_handle.points) - log.info("-----------------------------------------------------------") + log.info('Processed %i timesteps', time_handle.timesteps) + for name, tape in iteritems(hist_tapes): + log.info('Wrote %i history files from %s', tape.files_count, name) + log.info('-----------------------------------------------------------') # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # diff --git a/rvic/core/aggregate.py b/rvic/core/aggregate.py index a76a755..cc0c069 100644 --- a/rvic/core/aggregate.py +++ b/rvic/core/aggregate.py @@ -1,15 +1,15 @@ -#!/usr/local/bin/python -""" +# -*- coding: utf-8 -*- +''' aggregate.py -""" +''' import numpy as np -from collections import OrderedDict -from share import FILLVALUE_F -from utilities import find_nearest, latlon2yx -from variables import Point +from .share import FILLVALUE_F +from .utilities import find_nearest, latlon2yx +from .variables import Point from logging import getLogger -from log import LOG_NAME +from .log import LOG_NAME +from .pycompat import OrderedDict, iteritems, pyzip # -------------------------------------------------------------------- # # create logger @@ -20,9 +20,9 @@ # -------------------------------------------------------------------- # # Find target cells for pour points def make_agg_pairs(pour_points, dom_data, fdr_data, config_dict): - """ + ''' Group pour points by domain grid outlet cell - """ + ''' lons = pour_points['lons'] lats = pour_points['lats'] dom_lon = dom_data[config_dict['DOMAIN']['LONGITUDE_VAR']] @@ -33,7 +33,7 @@ def make_agg_pairs(pour_points, dom_data, fdr_data, config_dict): fdr_srcarea = fdr_data[config_dict['ROUTING']['SOURCE_AREA_VAR']] # ---------------------------------------------------------------- # - #Find Destination grid cells + # Find Destination grid cells log.info('Finding addresses now...') routys, routxs = latlon2yx(plats=lats, plons=lons, @@ -48,9 +48,9 @@ def make_agg_pairs(pour_points, dom_data, fdr_data, config_dict): # ---------------------------------------------------------------- # # Do the aggregation - outlets = {} + outlets = OrderedDict() - for i, (lat, lon) in enumerate(zip(lats, lons)): + for i, (lat, lon) in enumerate(pyzip(lats, lons)): # Define pour point object pour_point = Point(lat=lat, lon=lon, @@ -81,7 +81,7 @@ def make_agg_pairs(pour_points, dom_data, fdr_data, config_dict): # ---------------------------------------------------------------- # # Sort based on outlet total source area pour_point.source_area - outlets = OrderedDict(sorted(outlets.items(), + outlets = OrderedDict(sorted(list(iteritems(outlets)), key=lambda t: t[1].upstream_area, reverse=True)) # ---------------------------------------------------------------- # @@ -99,11 +99,11 @@ def make_agg_pairs(pour_points, dom_data, fdr_data, config_dict): # ---------------------------------------------------------------- # # Print Debug Results log.info('\n------------------ SUMMARY OF MakeAggPairs ------------------') - log.info('NUMBER OF POUR POINTS IN INPUT LIST: %i' % num) - log.info('NUMBER OF POINTS TO AGGREGATE TO: %i' % key_count) - log.info('NUMBER OF POUR POINTS AGGREGATED: %i' % pp_count) - log.info('EFFECIENCY OF: %.2f %%' % (100.*pp_count / num)) - log.info('UNASSIGNED POUR POINTS: %i' % (num-pp_count)) + log.info('NUMBER OF POUR POINTS IN INPUT LIST: %i', num) + log.info('NUMBER OF POINTS TO AGGREGATE TO: %i', key_count) + log.info('NUMBER OF POUR POINTS AGGREGATED: %i', pp_count) + log.info('EFFECIENCY OF: %.2f %%', (100. * pp_count / num)) + log.info('UNASSIGNED POUR POINTS: %i \n', (num - pp_count)) log.info('-------------------------------------------------------------\n') # ---------------------------------------------------------------- # @@ -114,11 +114,11 @@ def make_agg_pairs(pour_points, dom_data, fdr_data, config_dict): # -------------------------------------------------------------------- # # Aggregate the UH grids def aggregate(in_data, agg_data, res=0, pad=0, maskandnorm=False): - """ + ''' Add the two data sets together and return the combined arrays. Expand the horizontal dimensions as necessary to fit in_data with agg_data. The two data sets must include the coordinate variables lon,lat, and time. - """ + ''' # ---------------------------------------------------------------- # # find range of coordinates @@ -139,46 +139,52 @@ def aggregate(in_data, agg_data, res=0, pad=0, maskandnorm=False): # ---------------------------------------------------------------- # # make output arrays for lons/lats and initialize fraction/unit_hydrograph # pad output arrays so there is a space =pad around inputs - lats = np.arange((lat_min-res*pad), (lat_max+res*(pad+1)), res)[::-1] - lons = np.arange((lon_min-res*pad), (lon_max+res*(pad+1)), res) + lats = np.arange((lat_min - res * pad), + (lat_max + res * (pad + 1)), res)[::-1] + lons = np.arange((lon_min - res * pad), + (lon_max + res * (pad + 1)), res) fraction = np.zeros((len(lats), len(lons)), dtype=np.float64) unit_hydrograph = np.zeros((tshape, len(lats), len(lons)), dtype=np.float64) - log.debug('fraction shape %s' % str(fraction.shape)) - log.debug('unit_hydrograph shape %s' % str(unit_hydrograph.shape)) + log.debug('fraction shape %s', fraction.shape) + log.debug('unit_hydrograph shape %s', unit_hydrograph.shape) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # find target index locations of all corners for both datasets # Not that the lat inds are inverted ilat_min_ind = find_nearest(lats, np.max(in_data['lat'])) - ilat_max_ind = find_nearest(lats, np.min(in_data['lat']))+1 + ilat_max_ind = find_nearest(lats, np.min(in_data['lat'])) + 1 ilon_min_ind = find_nearest(lons, np.min(in_data['lon'])) - ilon_max_ind = find_nearest(lons, np.max(in_data['lon']))+1 + ilon_max_ind = find_nearest(lons, np.max(in_data['lon'])) + 1 - log.debug('in_data fraction shape: %s', - str(in_data['fraction'].shape)) + log.debug('in_data fraction shape: %s', in_data['fraction'].shape) if agg_data: alat_min_ind = find_nearest(lats, np.max(agg_data['lat'])) - alat_max_ind = find_nearest(lats, np.min(agg_data['lat']))+1 + alat_max_ind = find_nearest(lats, np.min(agg_data['lat'])) + 1 alon_min_ind = find_nearest(lons, np.min(agg_data['lon'])) - alon_max_ind = find_nearest(lons, np.max(agg_data['lon']))+1 + alon_max_ind = find_nearest(lons, np.max(agg_data['lon'])) + 1 - log.debug('agg_data fraction shape: %s', - str(agg_data['fraction'].shape)) + log.debug('agg_data fraction shape: %s', agg_data['fraction'].shape) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Place data - fraction[ilat_min_ind:ilat_max_ind, ilon_min_ind:ilon_max_ind] += in_data['fraction'] - unit_hydrograph[:, ilat_min_ind:ilat_max_ind, ilon_min_ind:ilon_max_ind] += in_data['unit_hydrograph'] + fraction[ilat_min_ind:ilat_max_ind, + ilon_min_ind:ilon_max_ind] += in_data['fraction'] + unit_hydrograph[:, + ilat_min_ind:ilat_max_ind, + ilon_min_ind:ilon_max_ind] += in_data['unit_hydrograph'] if agg_data: - fraction[alat_min_ind:alat_max_ind, alon_min_ind:alon_max_ind] += agg_data['fraction'] - unit_hydrograph[:, alat_min_ind:alat_max_ind, alon_min_ind:alon_max_ind] += agg_data['unit_hydrograph'] + fraction[alat_min_ind:alat_max_ind, + alon_min_ind:alon_max_ind] += agg_data['fraction'] + unit_hydrograph[:, alat_min_ind:alat_max_ind, + alon_min_ind:alon_max_ind] += \ + agg_data['unit_hydrograph'] # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # @@ -199,7 +205,6 @@ def aggregate(in_data, agg_data, res=0, pad=0, maskandnorm=False): # ---------------------------------------------------------------- # # Put all the data into agg_data variable and return to main - agg_data['timesteps'] = in_data['timesteps'] agg_data['unit_hydrograph_dt'] = in_data['unit_hydrograph_dt'] agg_data['lon'] = lons agg_data['lat'] = lats diff --git a/rvic/core/config.py b/rvic/core/config.py index 31cbf99..04fb9c6 100644 --- a/rvic/core/config.py +++ b/rvic/core/config.py @@ -1,11 +1,11 @@ -""" +# -*- coding: utf-8 -*- +''' config.py -""" +''' import os -from collections import OrderedDict -from ConfigParser import SafeConfigParser +from .pycompat import OrderedDict, SafeConfigParser class Config(object): @@ -28,9 +28,9 @@ class ParametersConfig(Config): # -------------------------------------------------------------------- # # Read the Configuration File def read_config(config_file): - """ + ''' Return a dictionary with subdictionaries of all configFile options/values - """ + ''' config = SafeConfigParser() config.optionxform = str config.read(config_file) @@ -49,10 +49,10 @@ def read_config(config_file): # -------------------------------------------------------------------- # # Find the type of the config options def config_type(value): - """ + ''' Parse the type of the configuration file option. First see the value is a bool, then try float, finally return a string. - """ + ''' val_list = [x.strip() for x in value.split(',')] if len(val_list) == 1: value = val_list[0] @@ -70,21 +70,21 @@ def config_type(value): return os.path.expandvars(value) else: try: - return map(float, val_list) - except: + return list(map(float, val_list)) + except (TypeError, ValueError): pass try: - return map(int, val_list) - except: + return list(map(int, val_list)) + except (TypeError, ValueError): return val_list # -------------------------------------------------------------------- # # -------------------------------------------------------------------- # def isfloat(x): - """Test of value is a float""" + '''Test if value is a float''' try: - a = float(x) + float(x) except ValueError: return False else: @@ -94,7 +94,7 @@ def isfloat(x): # -------------------------------------------------------------------- # def isint(x): - """Test if value is an integer""" + '''Test if value is an integer''' try: a = float(x) b = int(a) diff --git a/rvic/core/convert.py b/rvic/core/convert.py index 04bd4d9..03bf540 100644 --- a/rvic/core/convert.py +++ b/rvic/core/convert.py @@ -1,13 +1,15 @@ -""" +# -*- coding: utf-8 -*- +''' convert.py -""" +''' import os import re import numpy as np import logging -from log import LOG_NAME -from variables import Point -from share import FILLVALUE_I +from .log import LOG_NAME +from .variables import Point +from .share import FILLVALUE_I +from .pycompat import iteritems, pyrange # -------------------------------------------------------------------- # # create logger @@ -18,10 +20,10 @@ # -------------------------------------------------------------------- # # read station file def read_station_file(file_name, dom_data, config_dict): - """ + ''' Read a standard routing station file http://www.hydro.washington.edu/Lettenmaier/Models/VIC/Documentation/Routing/StationLocation.shtml - """ + ''' outlets = {} f = open(file_name, 'r') @@ -35,13 +37,13 @@ def read_station_file(file_name, dom_data, config_dict): uhs_file = f.readline().strip() # move to zero based index - y = int(y)-1 - x = int(x)-1 + y = int(y) - 1 + x = int(x) - 1 # make sure files exist log.info('On station: %s, active: %s', name, active) uhs2_file = os.path.join(config_dict['UHS_FILES']['ROUT_DIR'], - name+'.uh_s2') + name + '.uh_s2') if active == '1': if os.path.isfile(uhs_file): active = True @@ -60,8 +62,10 @@ def read_station_file(file_name, dom_data, config_dict): outlets[i].uhs_file = uhs_file outlets[i].cell_id = i outlets[i].outlet_decomp_ind = dom_data['cell_ids'][y, x] - outlets[i].lon = dom_data[config_dict['DOMAIN']['LONGITUDE_VAR']][y, x] - outlets[i].lat = dom_data[config_dict['DOMAIN']['LATITUDE_VAR']][y, x] + outlets[i].lon = \ + dom_data[config_dict['DOMAIN']['LONGITUDE_VAR']][y, x] + outlets[i].lat = \ + dom_data[config_dict['DOMAIN']['LATITUDE_VAR']][y, x] else: log.info('%s not active... skipping', name) f.close() @@ -72,7 +76,7 @@ def read_station_file(file_name, dom_data, config_dict): # -------------------------------------------------------------------- # # Read uhs files def read_uhs_files(outlets, dom_data, config_dict): - """ + ''' Read a standard routing uh_s file Format: line0: num_sources @@ -80,9 +84,9 @@ def read_uhs_files(outlets, dom_data, config_dict): line2: unit_hydrograph_time_series line1: ... line2: ... - """ + ''' if config_dict['UHS_FILES']['ROUT_PROGRAM'] == 'C': - for cell_id, outlet in outlets.iteritems(): + for cell_id, outlet in iteritems(outlets): log.info('Reading outlet %i: %s', cell_id, outlet.name) log.debug(outlet.uhs_file) f = open(outlet.uhs_file, 'r') @@ -98,63 +102,33 @@ def read_uhs_files(outlets, dom_data, config_dict): uh = [] # loop over the source points - for j in xrange(num_sources): + for j in pyrange(num_sources): line = re.sub(' +', ' ', f.readline()) lon, lat, fracs, x, y = line.split() # move to zero based index - y = int(y)-1 - x = int(x)-1 + y = int(y) - 1 + x = int(x) - 1 outlets[cell_id].lon_source[j] = float(lon) outlets[cell_id].lat_source[j] = float(lat) outlets[cell_id].fractions[j] = float(fracs) outlets[cell_id].x_source[j] = int(x) outlets[cell_id].y_source[j] = int(y) line = re.sub(' +', ' ', f.readline()) - uh.append(map(float, line.split())) + uh.append(list(map(float, line.split()))) - outlets[cell_id].unit_hydrograph = np.rot90(np.array(uh, - dtype=np.float64), - k=-1) - outlets[cell_id].source_decomp_ind = dom_data['cell_ids'][outlets[cell_id].y_source, outlets[cell_id].x_source] + outlets[cell_id].unit_hydrograph = np.rot90( + np.array(uh, dtype=np.float64), k=-1) + outlets[cell_id].source_decomp_ind = \ + dom_data['cell_ids'][outlets[cell_id].y_source, + outlets[cell_id].x_source] f.close() - outlets[cell_id].cell_id_source = dom_data['cell_ids'][outlets[cell_id].y_source, outlets[cell_id].x_source] + outlets[cell_id].cell_id_source = \ + dom_data['cell_ids'][outlets[cell_id].y_source, + outlets[cell_id].x_source] elif config_dict['UHS_FILES']['ROUT_PROGRAM'] == 'Fortran': raise ValueError('Fortran conversion not working...') - # log.info('parsing fortran uhs files') - # # setup for finding x, y inds - # dom_lon = dom_data[config_dict['DOMAIN']['LONGITUDE_VAR']] - # dom_lat = dom_data[config_dict['DOMAIN']['LATITUDE_VAR']] - # combined = np.dstack(([dom_lat.ravel(), dom_lon.ravel()]))[0] - # mytree = cKDTree(combined) - - # for cell_id, outlet in outlets.iteritems(): - # # read lons, lats, and unit hydrographs - # log.debug('reading %s' %outlet.ll_file) - # lons, lats = np.genfromtxt(outlet.ll_file, dtype="f8", unpack=True) - - # log.debug('reading %s' %outlet.uhs_file) - # uh = np.genfromtxt(outlet.uhs_file, dtype="f8") - # outlets[cell_id].unit_hydrograph = np.rot90(uh, k=-1) - # if len(lons) != uh.shape[0]: - # raise ValueError('length mismatch in ll file and uhs file %s %s' %(lons.shape, uh.shape)) - - # # now find the y_source and x_sources - # points = list(np.vstack((np.array(lats), np.array(lons))).transpose()) - # dist, indexes = mytree.query(points, k=1) - # yinds, xinds = np.unravel_index(np.array(indexes), dom_lat.shape) - - # # finally, get the fractions and the source_decomp_ind from the domain data - # outlets[cell_id].cell_id_source = dom_data['cell_ids'][yinds, xinds] - # outlets[cell_id].fractions = dom_data[config_dict['DOMAIN']['FRACTION_VAR']][yinds, xinds] - # print 'fractions', outlets[cell_id].fractions - - # # now store all the data - # outlets[cell_id].lat_source = lats - # outlets[cell_id].lon_source = lons - # outlets[cell_id].y_source = yinds - # outlets[cell_id].x_source = xinds else: raise ValueError('UHS_FILES[ROUT_PROGRAM] must be either C or Fortran') @@ -192,7 +166,7 @@ def move_domain(dom_data, new_dom_data, outlets): # ---------------------------------------------------------------- # # Adjust locations - for cell_id, outlet in outlets.iteritems(): + for cell_id, outlet in iteritems(outlets): outlets[cell_id].domy = new_y[outlet.domy] outlets[cell_id].domx = new_x[outlet.domx] @@ -200,8 +174,12 @@ def move_domain(dom_data, new_dom_data, outlets): outlets[cell_id].y_source = new_y[outlet.y_source] outlets[cell_id].x_source = new_x[outlet.x_source] - outlets[cell_id].outlet_decomp_ind = new_dom_data['cell_ids'][outlets[cell_id].domy, outlets[cell_id].domx] - outlets[cell_id].source_decomp_ind = new_dom_data['cell_ids'][outlets[cell_id].y_source, outlets[cell_id].x_source] + outlets[cell_id].outlet_decomp_ind = \ + new_dom_data['cell_ids'][outlets[cell_id].domy, + outlets[cell_id].domx] + outlets[cell_id].source_decomp_ind = \ + new_dom_data['cell_ids'][outlets[cell_id].y_source, + outlets[cell_id].x_source] # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # diff --git a/rvic/core/convolution_wrapper.py b/rvic/core/convolution_wrapper.py index 2f55225..1900c74 100644 --- a/rvic/core/convolution_wrapper.py +++ b/rvic/core/convolution_wrapper.py @@ -1,57 +1,44 @@ -""" +# -*- coding: utf-8 -*- +''' convolution_wrapper.py ctypes wrapper for rvic_convolution.c gcc -shared -o rvic_convolution.so rvic_convolution.c -""" +''' import os +import sysconfig import numpy as np import ctypes -SHAREDOBJECT = 'rvic_convolution.so' +SHAREDOBJECT = 'rvic_convolution' + sysconfig.get_config_var('SO') LIBPATH = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../')) try: _convolution = np.ctypeslib.load_library(SHAREDOBJECT, LIBPATH) except ImportError as ie: - print('looking for shared object {0} in {1}'.format(SHAREDOBJECT, LIBPATH)) + print('error looking for shared object {0} in {1}'.format(SHAREDOBJECT, + LIBPATH)) raise ImportError(ie) except OSError as oe: - print('looking for shared object {0} in {1}'.format(SHAREDOBJECT, LIBPATH)) + print('error looking for shared object {0} in {1}'.format(SHAREDOBJECT, + LIBPATH)) raise ImportError(oe) -_args = [ctypes.c_int, - ctypes.c_int, - ctypes.c_int, - ctypes.c_int, - np.ctypeslib.ndpointer(np.int32), - np.ctypeslib.ndpointer(np.int32), - np.ctypeslib.ndpointer(np.int32), - np.ctypeslib.ndpointer(np.int32), - np.ctypeslib.ndpointer(np.float64), - np.ctypeslib.ndpointer(np.float64), - np.ctypeslib.ndpointer(np.float64)] +_args = [ctypes.c_int, # nsources + ctypes.c_int, # noutlets + ctypes.c_int, # subset_length + ctypes.c_int, # xsize + np.ctypeslib.ndpointer(np.int32), # source2outlet_ind + np.ctypeslib.ndpointer(np.int32), # source_y_ind + np.ctypeslib.ndpointer(np.int32), # source_x_ind + np.ctypeslib.ndpointer(np.int32), # source_time_offset + np.ctypeslib.ndpointer(np.float64), # unit_hydrograph + np.ctypeslib.ndpointer(np.float64), # aggrunin + np.ctypeslib.ndpointer(np.float64) # ring + ] _convolution.convolve.argtypes = _args _convolution.convolve.restype = None - -def rvic_convolve(*args): - """args: - - nsources, - noutlets, - subset_length, - xsize, - source2outlet_ind, - source_y_ind, - source_x_ind, - source_time_offset, - unit_hydrograph, - aggrunin, - ring - """ - _convolution.convolve(*args) - - return +rvic_convolve = _convolution.convolve diff --git a/rvic/core/history.py b/rvic/core/history.py index 75fde4f..9629edf 100644 --- a/rvic/core/history.py +++ b/rvic/core/history.py @@ -1,4 +1,5 @@ -""" +# -*- coding: utf-8 -*- +''' history.py Summary: @@ -12,18 +13,20 @@ - __next_write_out_data: method to determine when to write the out_data container - finish: method to close all remaining history tapes. -""" +''' import os import numpy as np from netCDF4 import Dataset, date2num, num2date, stringtochar from datetime import datetime -from time_utility import ord_to_datetime +from .time_utility import ord_to_datetime from logging import getLogger -from log import LOG_NAME -from share import SECSPERDAY, HOURSPERDAY, TIMEUNITS, NC_INT, NC_FLOAT, NC_CHAR -from share import NC_DOUBLE, WATERDENSITY, MONTHSPERYEAR -import share +from .log import LOG_NAME +from .share import SECSPERDAY, HOURSPERDAY, TIMEUNITS +from .share import NC_INT, NC_FLOAT, NC_CHAR +from .share import NC_DOUBLE, WATERDENSITY, MONTHSPERYEAR +from .pycompat import iteritems +from . import share # -------------------------------------------------------------------- # @@ -35,16 +38,16 @@ # -------------------------------------------------------------------- # # RVIC History File Object class Tape(object): - """ History Tape Object""" + ''' History Tape Object''' # ---------------------------------------------------------------- # # Init - def __init__(self, time_ord, caseid, Rvar, tape_num=0, - fincl=['streamflow'], mfilt=1, ndens=2, nhtfrq=0, + def __init__(self, time_ord, caseid, rvar, tape_num=0, + fincl=('streamflow'), mfilt=1, ndens=2, nhtfrq=0, avgflag='A', units='kg m-2 s-1', file_format='NETCDF4_CLASSIC', outtype='grid', grid_lons=False, grid_lats=False, grid_area=None, out_dir='.', - calendar=None, glob_ats=None, zlib=True, complevel=4, + calendar='standard', glob_ats=None, zlib=True, complevel=4, least_significant_digit=None): self._tape_num = tape_num self._time_ord = time_ord # Days since basetime @@ -66,7 +69,7 @@ def __init__(self, time_ord, caseid, Rvar, tape_num=0, self._out_dir = out_dir self._glob_ats = glob_ats - self.__get_rvar(Rvar) # Get the initial Rvar fields + self.__get_rvar(rvar) # Get the initial rvar fields self._grid_shape = grid_area.shape self._out_data = {} @@ -86,7 +89,7 @@ def __init__(self, time_ord, caseid, Rvar, tape_num=0, # ------------------------------------------------------------ # # Get Grid Lons/Lats if outtype is grid - if outtype == 'grid': + if outtype.lower() == 'grid': self._out_data_shape = self._grid_shape if type(grid_lons) == np.ndarray and type(grid_lats) == np.ndarray: self._grid_lons = grid_lons @@ -94,8 +97,10 @@ def __init__(self, time_ord, caseid, Rvar, tape_num=0, else: raise ValueError('Must include grid lons / lats if ' 'outtype == grid') - else: + elif outtype.lower() == 'array': self._out_data_shape = (self._num_outlets, ) + else: + raise ValueError('Unknown value for outtype: {0}'.format(outtype)) # ------------------------------------------------------------ # # ------------------------------------------------------------ # @@ -160,20 +165,29 @@ def __init__(self, time_ord, caseid, Rvar, tape_num=0, # ------------------------------------------------------------ # # Determine the format of the output filename if self._avgflag == 'I': - self._fname_format = os.path.join(out_dir, - "%s.rvic.h%s%s.%%Y-%%m-%%d-%%H-%%M-%%S.nc" % (self._caseid, self._tape_num, self._avgflag.lower())) + self._fname_format = os.path.join( + out_dir, '%s.rvic.h%s%s.%%Y-%%m-%%d-%%H-%%M-%%S.nc' % + (self._caseid, self._tape_num, self._avgflag.lower())) else: if self._nhtfrq == 0: - self._fname_format = os.path.join(out_dir, - "%s.rvic.h%s%s.%%Y-%%m.nc" % (self._caseid, self._tape_num, self._avgflag.lower())) - elif (self._nhtfrq == -24) or (nhtfrq*self._dt == SECSPERDAY): - self._fname_format = os.path.join(out_dir, - "%s.rvic.h%s%s.%%Y-%%m-%%d.nc" % (self._caseid, self._tape_num, self._avgflag.lower())) + self._fname_format = os.path.join( + out_dir, + '%s.rvic.h%s%s.%%Y-%%m.nc' % + (self._caseid, self._tape_num, self._avgflag.lower())) + elif (self._nhtfrq == -24) or (nhtfrq * self._dt == SECSPERDAY): + self._fname_format = os.path.join( + out_dir, + '%s.rvic.h%s%s.%%Y-%%m-%%d.nc' % + (self._caseid, self._tape_num, self._avgflag.lower())) else: - self._fname_format = os.path.join(out_dir, - "%s.rvic.h%s%s.%%Y-%%m-%%d-%%H.nc" % (self._caseid, self._tape_num, self._avgflag.lower())) - self._rest_fname_format = os.path.join(out_dir, - "%s.rvic.rh%s.%%Y-%%m-%%d-%%H-%%M-%%S.nc" % (self._caseid, self._tape_num)) + self._fname_format = os.path.join( + out_dir, + '%s.rvic.h%s%s.%%Y-%%m-%%d-%%H.nc' % + (self._caseid, self._tape_num, self._avgflag.lower())) + self._rest_fname_format = os.path.join( + out_dir, + '%s.rvic.rh%s.%%Y-%%m-%%d-%%H-%%M-%%S.nc' % + (self._caseid, self._tape_num)) # ------------------------------------------------------------ # # ------------------------------------------------------------ # @@ -215,7 +229,7 @@ def __repr__(self): # ---------------------------------------------------------------- # # Update the history tapes with new fluxes def update(self, data2tape, time_ord): - """ Update the tape with new data""" + ''' Update the tape with new data''' # ------------------------------------------------------------ # # Check that the time_ord is in sync @@ -240,7 +254,7 @@ def update(self, data2tape, time_ord): # Update the fields for field in self._fincl: tracer = 'LIQ' - log.debug('updating {0}'.format(field)) + log.debug('updating %s', field) fdata = data2tape[field][tracer] if self._avgflag == 'A': self._temp_data[field] += fdata @@ -282,7 +296,7 @@ def write_initial(self): # ---------------------------------------------------------------- # def __next_write_out_data(self): - """determine the maximum size of out_data""" + '''determine the maximum size of out_data''' log.debug('determining size of out_data') @@ -335,7 +349,7 @@ def __next_write_out_data(self): if mfilt < 1: mfilt = 1 - self._out_data_write = mfilt-1 + self._out_data_write = mfilt - 1 self._out_times = np.empty(mfilt, dtype=np.float64) if self._avgflag != 'I': self._out_time_bnds = np.empty((mfilt, 2), dtype=np.float64) @@ -365,10 +379,18 @@ def __update_out_data(self): # Grid the fields self._out_data[field][self._out_data_i, self._outlet_y_ind, - self._outlet_x_ind] = self._temp_data[field][:] * self._units_mult + self._outlet_x_ind] = \ + self._temp_data[field][:] * self._units_mult # ---------------------------------------------------- # else: - self._out_data[field][self._out_data_i, :] = self._temp_data[field] * self._units_mult + self._out_data[field][self._out_data_i, :] = \ + self._temp_data[field] * self._units_mult + + # Check that all values are valid, if not, exit gracefully + if np.isnan(self._out_data[field][self._out_data_i].sum()): + raise ValueError('nan found in output field: {0}, most likely ' + 'there is a nan/missing/fill value in the' + 'input forcings') # ------------------------------------------------------------ # self._out_times[self._out_data_i] = self._write_ord @@ -392,7 +414,7 @@ def __update_out_data(self): # ---------------------------------------------------------------- # def finish(self): - """write out_data""" + '''write out_data''' log.debug('finishing tape %s', self._tape_num) if self._out_data_has_values: if self._outtype == 'grid': @@ -406,7 +428,7 @@ def finish(self): # ---------------------------------------------------------------- # # Get import rvar fields def __get_rvar(self, rvar): - """ Get the rvar Fields that are useful for writing output """ + ''' Get the rvar Fields that are useful for writing output ''' self._dt = rvar.unit_hydrograph_dt self._num_outlets = rvar.n_outlets self._outlet_decomp_ind = rvar.outlet_decomp_ind @@ -421,7 +443,7 @@ def __get_rvar(self, rvar): # ---------------------------------------------------------------- # # Determine next write time def __next_update_out_data(self): - """ Determine the count for when the next write should occur """ + ''' Determine the count for when the next write should occur ''' # ------------------------------------------------------------ # # If monthly, write at (YYYY,MM,1,0,0) # b0 is first timestep of next period @@ -453,15 +475,18 @@ def __next_update_out_data(self): # Get next file names and timeord if self._avgflag == 'I': self._write_ord = b1 - self.filename = num2date(b1, TIMEUNITS, - calendar=self._calendar).strftime(self._fname_format) + self.filename = num2date( + b1, TIMEUNITS, + calendar=self._calendar).strftime(self._fname_format) else: self._time_bnds = np.array([[b0, b1]]) self._write_ord = np.average(self._time_bnds) - self.filename = num2date(b0, TIMEUNITS, - calendar=self._calendar).strftime(self._fname_format) - self.rest_filename = num2date(b1, TIMEUNITS, - calendar=self._calendar).strftime(self._fname_format) + self.filename = num2date( + b0, TIMEUNITS, + calendar=self._calendar).strftime(self._fname_format) + self.rest_filename = num2date( + b1, TIMEUNITS, + calendar=self._calendar).strftime(self._fname_format) # ------------------------------------------------------------ # # ------------------------------------------------------------ # @@ -473,7 +498,7 @@ def __next_update_out_data(self): # ---------------------------------------------------------------- # # Average fields def __average(self): - """ Take the average based on the number of accumulated timesteps """ + ''' Take the average based on the number of accumulated timesteps ''' for field in self._fincl: self._temp_data[field] /= self._count # ---------------------------------------------------------------- # @@ -481,7 +506,7 @@ def __average(self): # ---------------------------------------------------------------- # # Write grid style history file def __write_grid(self): - """ Write history file """ + ''' Write history file ''' # ------------------------------------------------------------ # # Open file @@ -490,23 +515,23 @@ def __write_grid(self): # ------------------------------------------------------------ # # Time Variable - time = f.createDimension('time', None) + f.createDimension('time', None) time = f.createVariable('time', self._ncprec, ('time',)) - time[:] = self._out_times[:self._out_data_i+1] - for key, val in share.time.__dict__.iteritems(): + time[:] = self._out_times[:self._out_data_i + 1] + for key, val in iteritems(share.time): if val: - setattr(time, key, val) - time.calendar = self._calendar + setattr(time, key, val.encode()) + time.calendar = self._calendar.encode() if self._avgflag != 'I': - nv = f.createDimension('nv', 2) + f.createDimension('nv', 2) time.bounds = 'time_bnds' time_bnds = f.createVariable('time_bnds', self._ncprec, ('time', 'nv',), **self.ncvaropts) - time_bnds[:, :] = self._out_time_bnds[:self._out_data_i+1] + time_bnds[:, :] = self._out_time_bnds[:self._out_data_i + 1] # ------------------------------------------------------------ # # ------------------------------------------------------------ # @@ -523,13 +548,13 @@ def __write_grid(self): xc[:, :] = self._grid_lons yc[:, :] = self._grid_lats - for key, val in share.xc.__dict__.iteritems(): + for key, val in iteritems(share.xc): if val: - setattr(xc, key, val) + setattr(xc, key, val.encode()) - for key, val in share.yc.__dict__.iteritems(): + for key, val in iteritems(share.yc): if val: - setattr(yc, key, val) + setattr(yc, key, val.encode()) else: coords = ('lat', 'lon',) @@ -544,13 +569,13 @@ def __write_grid(self): lon[:] = self._grid_lons lat[:] = self._grid_lats - for key, val in share.lon.__dict__.iteritems(): + for key, val in iteritems(share.lon): if val: - setattr(lon, key, val) + setattr(lon, key, val.encode()) - for key, val in share.lat.__dict__.iteritems(): + for key, val in iteritems(share.lat): if val: - setattr(lat, key, val) + setattr(lat, key, val.encode()) # ------------------------------------------------------------ # # ------------------------------------------------------------ # @@ -560,31 +585,31 @@ def __write_grid(self): for field in self._fincl: var = f.createVariable(field, self._ncprec, tcoords, **self.ncvaropts) - var[:, :] = self._out_data[field][:self._out_data_i+1] + var[:, :] = self._out_data[field][:self._out_data_i + 1] - for key, val in getattr(share, field).__dict__.iteritems(): + for key, val in iteritems(getattr(share, field)): if val: - setattr(var, key, val) - var.units = self._units + setattr(var, key, val.encode()) + var.units = self._units.encode() if self._grid_lons.ndim > 1: - var.coordinates = " ".join(coords) + var.coordinates = ' '.join(coords).encode() # ------------------------------------------------------------ # # ------------------------------------------------------------ # # write global attributes self._glob_ats.update() - for key, val in self._glob_ats.atts.iteritems(): + for key, val in iteritems(self._glob_ats.atts): if val: - setattr(f, key, val) + setattr(f, key, val.encode()) # ------------------------------------------------------------ # f.close() - log.info('Finished writing %s' % self.filename) + log.info('Finished writing %s', self.filename) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Write array style history file def __write_array(self): - """ Write history file """ + ''' Write history file ''' # ------------------------------------------------------------ # # Open file @@ -593,35 +618,35 @@ def __write_array(self): # ------------------------------------------------------------ # # Time Variable - time = f.createDimension('time', None) + f.createDimension('time', None) time = f.createVariable('time', self._ncprec, ('time',), **self.ncvaropts) - time[:] = self._out_times[:self._out_data_i+1] - for key, val in share.time.__dict__.iteritems(): + time[:] = self._out_times[:self._out_data_i] + for key, val in iteritems(share.time): if val: - setattr(time, key, val) - time.calendar = self._calendar + setattr(time, key, val.encode()) + time.calendar = self._calendar.encode() if self._avgflag != 'I': - nv = f.createDimension('nv', 2) + f.createDimension('nv', 2) time.bounds = 'time_bnds' time_bnds = f.createVariable('time_bnds', self._ncprec, ('time', 'nv',), **self.ncvaropts) - time_bnds[:, :] = self._out_time_bnds[:self._out_data_i+1] + time_bnds[:, :] = self._out_time_bnds[:self._out_data_i] # ------------------------------------------------------------ # # ------------------------------------------------------------ # # Setup Coordinate Variables coords = ('outlets',) - outlets = f.createDimension('outlets', self._num_outlets) + f.createDimension('outlets', self._num_outlets) nocoords = coords + ('nc_chars',) char_names = stringtochar(self._outlet_name) - chars = f.createDimension(nocoords[1], char_names.shape[1]) + f.createDimension(nocoords[1], char_names.shape[1]) # ------------------------------------------------------------ # # ------------------------------------------------------------ # @@ -646,29 +671,29 @@ def __write_array(self): outlet_decomp_ind[:] = self._outlet_decomp_ind onm[:, :] = char_names - for key, val in share.outlet_lon.__dict__.iteritems(): + for key, val in iteritems(share.outlet_lon): if val: - setattr(outlet_lon, key, val) + setattr(outlet_lon, key, val.encode()) - for key, val in share.outlet_lat.__dict__.iteritems(): + for key, val in iteritems(share.outlet_lat): if val: - setattr(outlet_lat, key, val) + setattr(outlet_lat, key, val.encode()) - for key, val in share.outlet_y_ind.__dict__.iteritems(): + for key, val in iteritems(share.outlet_y_ind): if val: - setattr(outlet_y_ind, key, val) + setattr(outlet_y_ind, key, val.encode()) - for key, val in share.outlet_x_ind.__dict__.iteritems(): + for key, val in iteritems(share.outlet_x_ind): if val: - setattr(outlet_x_ind, key, val) + setattr(outlet_x_ind, key, val.encode()) - for key, val in share.outlet_decomp_ind.__dict__.iteritems(): + for key, val in iteritems(share.outlet_decomp_ind): if val: - setattr(outlet_decomp_ind, key, val) + setattr(outlet_decomp_ind, key, val.encode()) - for key, val in share.outlet_name.__dict__.iteritems(): + for key, val in iteritems(share.outlet_name): if val: - setattr(onm, key, val) + setattr(onm, key, val.encode()) # ------------------------------------------------------------ # # ------------------------------------------------------------ # @@ -678,105 +703,23 @@ def __write_array(self): for field in self._fincl: var = f.createVariable(field, self._ncprec, tcoords, **self.ncvaropts) - var[:, :] = self._out_data[field][:self._out_data_i+1] + var[:, :] = self._out_data[field][:self._out_data_i] - for key, val in getattr(share, field).__dict__.iteritems(): + for key, val in iteritems(getattr(share, field)): if val: - setattr(var, key, val) - var.units = self._units + setattr(var, key, val.encode()) + var.units = self._units.encode() # ------------------------------------------------------------ # # ------------------------------------------------------------ # # write global attributes self._glob_ats.update() - for key, val in self._glob_ats.atts.iteritems(): + for key, val in iteritems(self._glob_ats.atts): if val: - setattr(f, key, val) - f.featureType = "timeSeries" + setattr(f, key, val.encode()) + f.featureType = 'timeSeries'.encode() # ------------------------------------------------------------ # f.close() log.info('Finished writing %s', self.filename) # ---------------------------------------------------------------- # - - # ---------------------------------------------------------------- # - # write initial flux - # def write_restart(self): - # """Write history tape state, matches CESM-RVIC format""" - # """dims nx, ny, allrof, string_length, fname_lenp2, fname_len, - # len1, scalar, max_chars, max_nflds, max_flds""" - - # # ------------------------------------------------------------ # - # # Open file - # f = Dataset(self.rest_filename, 'w', self._file_format) - # # ------------------------------------------------------------ # - - # # ------------------------------------------------------------ # - # # Dimensions - # nx = f.createDimension('nx', self._grid_shape[1]) - # ny = f.createDimension('ny', self._grid_shape[0]) - # allrof = f.createDimension('allrof', ) - # string_length = f.createDimension('string_length', 8) - # fname_lenp2 = f.createDimension('fname_lenp2', 34) - # fname_len = f.createDimension('fname_len', 32) - # len1 = f.createDimension('len1', 1) - # scalar = f.createDimension('scalar', 1) - # max_chars = f.createDimension('max_chars', 128) - # max_nflds = f.createDimension('max_nflds', 2) - # max_flds = f.createDimension('max_flds', 15) - # # ------------------------------------------------------------ # - - # # ------------------------------------------------------------ # - # # Write Fields - # restvars = OrderedDict() - # restvars['nhtfrq'] = f.createVariable('nhtfrq', NC_INT, ('scalar', )) - # restvars['mfilt'] = f.createVariable('mfilt', NC_INT, ('scalar', )) - # restvars['ncprec'] = f.createVariable('ncprec', NC_INT, ('scalar', )) - # restvars['fincl'] = f.createVariable('fincl', NC_CHAR, ('max_flds', 'fname_lenp2',)) - # restvars['fexcl'] = f.createVariable('fexcl', NC_CHAR, ('max_flds', 'fname_lenp2',)) - # restvars['nflds'] = f.createVariable('nflds', NC_INT, ('scalar', )) - # restvars['ntimes'] = f.createVariable('ntimes', NC_INT, ('scalar', )) - # restvars['is_endhist'] = f.createVariable('is_endhist', NC_INT, ('scalar', )) - # restvars['begtime'] = f.createVariable('begtime', NC_DOUBLE, ('scalar', )) - # restvars['hpindex'] = f.createVariable('hpindex', NC_INT, ('max_nflds', )) - # restvars['avgflag'] = f.createVariable('avgflag', NC_CHAR, ('max_nflds', 'len1',)) - # restvars['name'] = f.createVariable('name', NC_CHAR, ('max_nflds', 'fname_len', )) - # restvars['long_name'] = f.createVariable('long_name', NC_CHAR, ('max_nflds', 'max_chars', )) - # restvars['units'] = f.createVariable('units', NC_CHAR, ('max_nflds', 'max_chars', )) - - # restvars['nhtfrq'][:] = self._nhtfrq - # restvars['mfilt'][:] = self._mfilt - # restvars['ncprec'][:] = self._ndens - # # restvars['fincl'][:, :] = self._fincl - # restvars['fexcl'][:, :] = self._fexcl - # restvars['nflds'][:] = len(self._fincl) - # restvars['ntimes'][:] = 1 - # restvars['is_endhist'][:] = 0 - # restvars['begtime'][:] = self._begtime - # restvars['hpindex'][:] = 'help' - # restvars['avgflag'][:, :] = self._avgflag - # restvars['name'][:, :] = self._fincl - # restvars['long_name'][:, :] = 'help' - # restvars['units'][:, :] = 'help' - - # for name, var in restvas.iteritems(): - # ncvar = getattr(share, name) - # for key, val in ncvar.__dict__.iteritems(): - # if val: - # setattr(var, key, val) - # # ------------------------------------------------------------ # - # # ------------------------------------------------------------ # - - # # ------------------------------------------------------------ # - # # write global attributes - # self._glob_ats.update() - # for key, val in self._glob_ats.atts.iteritems(): - # if val: - # setattr(f, key, val) - # f.title = "RVIC Restart History information, required to continue a simulation" - # f.comment = "This entire file NOT needed for drystart, startup, or branch simulations" - # f.featureType = "timeSeries" - # # ------------------------------------------------------------ # - - # return self.filename, self.rest_filename - # # ---------------------------------------------------------------- # # -------------------------------------------------------------------- # diff --git a/rvic/core/log.py b/rvic/core/log.py index e612f42..4f5f54d 100644 --- a/rvic/core/log.py +++ b/rvic/core/log.py @@ -1,6 +1,7 @@ -""" +# -*- coding: utf-8 -*- +''' log.py -""" +''' import os import sys from time import gmtime, strftime @@ -15,10 +16,10 @@ # -------------------------------------------------------------------- # # Fake stream file to handle stdin/stdout class StreamToFile(object): - """ + ''' Fake file-like stream object that redirects writes to a logger instance. http://www.electricmonk.nl/log/2011/08/14/redirect-stdout-and-stderr-to-a-logger-in-python/ - """ + ''' def __init__(self, logger_name=LOG_NAME, log_level=logging.INFO): self.logger = logging.getLogger(logger_name) self.log_level = log_level @@ -34,7 +35,7 @@ def flush(self): def init_logger(log_dir='./', log_level='DEBUG', verbose=False): - """ Setup the logger """ + ''' Setup the logger ''' logger = logging.getLogger(LOG_NAME) logger.setLevel(log_level) @@ -43,8 +44,8 @@ def init_logger(log_dir='./', log_level='DEBUG', verbose=False): # ---------------------------------------------------------------- # # create log file handler if log_dir: - log_file = os.path.join(log_dir, 'RVIC-'+strftime("%Y%m%d-%H%M%S", - gmtime())+'.log') + log_file = os.path.join(log_dir, 'RVIC-' + strftime('%Y%m%d-%H%M%S', + gmtime()) + '.log') fh = logging.FileHandler(log_file) fh.setLevel(log_level) fh.setFormatter(FORMATTER) @@ -71,7 +72,7 @@ def init_logger(log_dir='./', log_level='DEBUG', verbose=False): # ---------------------------------------------------------------- # logger.info('-------------------- INITIALIZED RVIC LOG ------------------') - logger.info('LOG LEVEL: %s' % log_level) + logger.info('LOG LEVEL: %s', log_level) logger.info('Logging To Console: %s', verbose) logger.info('LOG FILE: %s', log_file) logger.info('----------------------------------------------------------\n') @@ -81,7 +82,7 @@ def init_logger(log_dir='./', log_level='DEBUG', verbose=False): def close_logger(): - """Close the handlers of the logger""" + '''Close the handlers of the logger''' log = logging.getLogger(LOG_NAME) x = list(log.handlers) for i in x: @@ -90,3 +91,4 @@ def close_logger(): i.close() sys.stdout = sys.__stdout__ sys.stderr = sys.__stderr__ +# -------------------------------------------------------------------- # diff --git a/rvic/core/make_uh.py b/rvic/core/make_uh.py index e894fc6..aa8ba46 100644 --- a/rvic/core/make_uh.py +++ b/rvic/core/make_uh.py @@ -1,4 +1,5 @@ -""" +# -*- coding: utf-8 -*- +''' make_uh.py PROGRAM rout, Python-Version, written by Joe Hamman winter 2012/2013 @@ -13,14 +14,15 @@ July 2013, Joe Hamman Removed read and write functions to make more modular. Now called from make_parameters.py -""" +''' import numpy as np import logging from scipy.interpolate import interp1d -from utilities import latlon2yx -from share import SECSPERDAY -from log import LOG_NAME +from .utilities import latlon2yx +from .share import SECSPERDAY +from .log import LOG_NAME +from .pycompat import pyzip, pyrange # -------------------------------------------------------------------- # # create logger @@ -30,10 +32,10 @@ # -------------------------------------------------------------------- # def rout(pour_point, uh_box, fdr_data, fdr_atts, rout_dict): - """ + ''' Make the Unit Hydrograph Grid - """ - log.info("Starting routing program for point: %s", pour_point) + ''' + log.info('Starting routing program for point: %s', pour_point) # ---------------------------------------------------------------- # # Unpack a few structures uh_t = uh_box['time'] @@ -42,20 +44,22 @@ def rout(pour_point, uh_box, fdr_data, fdr_atts, rout_dict): # ---------------------------------------------------------------- # # Find Basin Dims and ID - basin_id = fdr_data[rout_dict['BASIN_ID_VAR']][pour_point.routy, pour_point.routx] + basin_id = fdr_data[rout_dict['BASIN_ID_VAR']][pour_point.routy, + pour_point.routx] log.info('Input Latitude: %f', pour_point.lat) log.info('Input Longitude: %f', pour_point.lon) log.info('Global Basid ID: %i', basin_id) - y_inds, x_inds = np.nonzero(fdr_data[rout_dict['BASIN_ID_VAR']] == basin_id) + y_inds, x_inds = \ + np.nonzero(fdr_data[rout_dict['BASIN_ID_VAR']] == basin_id) y = np.arange(len(fdr_data[rout_dict['LATITUDE_VAR']])) x = np.arange(len(fdr_data[rout_dict['LONGITUDE_VAR']])) x_min = min(x[x_inds]) - x_max = max(x[x_inds])+1 + x_max = max(x[x_inds]) + 1 y_min = min(y[y_inds]) - y_max = max(y[y_inds])+1 + y_max = max(y[y_inds]) + 1 # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # @@ -63,9 +67,12 @@ def rout(pour_point, uh_box, fdr_data, fdr_atts, rout_dict): basin = {} basin['lat'] = fdr_data[rout_dict['LATITUDE_VAR']][y_min:y_max] basin['lon'] = fdr_data[rout_dict['LONGITUDE_VAR']][x_min:x_max] - basin['basin_id'] = fdr_data[rout_dict['BASIN_ID_VAR']][y_min:y_max, x_min:x_max] - basin['flow_direction'] = fdr_data[rout_dict['FLOW_DIRECTION_VAR']][y_min:y_max, x_min:x_max] - basin['flow_distance'] = fdr_data[rout_dict['FLOW_DISTANCE_VAR']][y_min:y_max, x_min:x_max] + basin['basin_id'] = fdr_data[rout_dict['BASIN_ID_VAR']][y_min:y_max, + x_min:x_max] + basin['flow_direction'] = \ + fdr_data[rout_dict['FLOW_DIRECTION_VAR']][y_min:y_max, x_min:x_max] + basin['flow_distance'] = \ + fdr_data[rout_dict['FLOW_DISTANCE_VAR']][y_min:y_max, x_min:x_max] basin['velocity'] = fdr_data['velocity'][y_min:y_max, x_min:x_max] basin['diffusion'] = fdr_data['diffusion'][y_min:y_max, x_min:x_max] @@ -80,13 +87,17 @@ def rout(pour_point, uh_box, fdr_data, fdr_atts, rout_dict): # ---------------------------------------------------------------- # # Create the rout_data Dictionary rout_data = {'lat': basin['lat'], 'lon': basin['lon'], - 'dom_x_min': x_min, 'dom_y_min': y_min, - 'dom_x_max': x_max, 'dom_y_max': y_max} + 'basiny': pour_point.basiny[0], + 'basinx': pour_point.basinx[0]} log.debug('Clipping indicies:') - log.debug('dom_x_min: %s', rout_data['dom_x_min']) - log.debug('dom_x_max: %s', rout_data['dom_x_max']) - log.debug('dom_y_min: %s', rout_data['dom_y_min']) - log.debug('dom_y_max: %s', rout_data['dom_y_max']) + log.debug('rout_x_min: %s', x_min) + log.debug('rout_x_max: %s', x_max) + log.debug('rout_y_min: %s', y_min) + log.debug('rout_y_max: %s', y_max) + log.debug('pour_point.basiny: %s', pour_point.basiny) + log.debug('pour_point.basinx: %s', pour_point.basinx) + # ---------------------------------------------------------------- # + # ---------------------------------------------------------------- # # Determine/Set flow direction syntax # Flow directions {north, northeast, east, southeast, @@ -114,8 +125,8 @@ def rout(pour_point, uh_box, fdr_data, fdr_atts, rout_dict): # Find timestep (timestep is determined from uh_BOX input file) input_interval = find_ts(uh_t) rout_data['unit_hydrograph_dt'] = input_interval - t_cell = int(rout_dict['CELL_FLOWDAYS']*SECSPERDAY/input_interval) - t_uh = int(rout_dict['BASIN_FLOWDAYS']*SECSPERDAY/input_interval) + t_cell = int(rout_dict['CELL_FLOWDAYS'] * SECSPERDAY / input_interval) + t_uh = int(rout_dict['BASIN_FLOWDAYS'] * SECSPERDAY / input_interval) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # @@ -151,12 +162,9 @@ def rout(pour_point, uh_box, fdr_data, fdr_atts, rout_dict): # ---------------------------------------------------------------- # # Agregate to output timestep - rout_data['unit_hydrograph'], \ - rout_data['timesteps'] = adjust_uh_timestep(uh_s, t_uh, - input_interval, - rout_dict['OUTPUT_INTERVAL'], - catchment['x_inds'], - catchment['y_inds']) + rout_data['unit_hydrograph'] = adjust_uh_timestep( + uh_s, t_uh, input_interval, rout_dict['OUTPUT_INTERVAL'], + catchment['x_inds'], catchment['y_inds']) # ---------------------------------------------------------------- # return rout_data # -------------------------------------------------------------------- # @@ -165,10 +173,10 @@ def rout(pour_point, uh_box, fdr_data, fdr_atts, rout_dict): # -------------------------------------------------------------------- # # Find the Timestep from the uh box def find_ts(uh_t): - """ + ''' Determines the (input_interval) based on the timestep given in uhfile - """ - input_interval = uh_t[1]-uh_t[0] + ''' + input_interval = uh_t[1] - uh_t[0] log.debug('Input Timestep = %i seconds', input_interval) return input_interval # -------------------------------------------------------------------- # @@ -177,17 +185,17 @@ def find_ts(uh_t): # -------------------------------------------------------------------- # # Read the flow direction file def read_direction(fdr, dy, dx): - """ + ''' Reads the direction file and makes two grids (to_x) and (to_y). The input grids follow the 1-8 or 1-128 grid directions as shown below. val = direction [to_y][to_x] - """ + ''' log.debug('Reading direction input and finding target row/columns') to_y = np.zeros_like(fdr, dtype=np.int16) to_x = np.zeros_like(fdr, dtype=np.int16) - valid_dirs = dy.keys() + valid_dirs = list(dy.keys()) for (y, x), d in np.ndenumerate(fdr): if d in valid_dirs: @@ -204,7 +212,7 @@ def read_direction(fdr, dy, dx): # -------------------------------------------------------------------- # # Search the catchment def search_catchment(to_y, to_x, pour_point, basin_ids, basin_id): - """ + ''' Find all cells upstream of pour point. Retrun a dictionary with x_inds, yinds, and #of cell to downstream pour point. All are sorted the by the latter. For each x,y pair, the flow path is followed until either the @@ -212,7 +220,7 @@ def search_catchment(to_y, to_x, pour_point, basin_ids, basin_id): (if (yy==pour_point.basiny and xx==pour_point.basinx):) or the flowpath leads outside of grid. *** Does not handle wrapped coordinates. *** - """ + ''' log.debug('Searching catchment') (len_y, len_x) = to_x.shape @@ -240,11 +248,11 @@ def search_catchment(to_y, to_x, pour_point, basin_ids, basin_id): cells = 0 - for yy, xx in zip(byinds, bxinds): + for yy, xx in pyzip(byinds, bxinds): if in_catch[yy, xx] >= 0: # set the old path to zero - pathy[:cells+1] = 0 - pathx[:cells+1] = 0 + pathy[:cells + 1] = 0 + pathx[:cells + 1] = 0 # reset the cells counter cells = 0 @@ -260,14 +268,14 @@ def search_catchment(to_y, to_x, pour_point, basin_ids, basin_id): or in_catch[yy, xx] == 1: # get the path inds - py = pathy[:cells+1] - px = pathx[:cells+1] + py = pathy[:cells + 1] + px = pathx[:cells + 1] # Add path to in_catch and count_ds in_catch[py, px] = 1 # reverse the path count - temp_path = path_count[:cells+1][::-1] + temp_path = path_count[:cells + 1][::-1] count_ds[py, px] = (count_ds[yy, xx] + temp_path) break @@ -289,10 +297,9 @@ def search_catchment(to_y, to_x, pour_point, basin_ids, basin_id): catch_fracs[cyinds, cxinds] = 1.0 count = len(cyinds) - tempy, tempx = np.nonzero(count_ds) - log.debug("Found %i upstream grid cells from present station", count) - log.debug("Expected at most %i upstream grid cells from present station", + log.debug('Found %i upstream grid cells from present station', count) + log.debug('Expected at most %i upstream grid cells from present station', bsize) if count > bsize: @@ -313,25 +320,25 @@ def search_catchment(to_y, to_x, pour_point, basin_ids, basin_id): # -------------------------------------------------------------------- # # Make uh Grid def make_uh(dt, t_cell, y_inds, x_inds, velocity, diffusion, xmask): - """ + ''' Calculate the impulse response function for grid cells using equation 15 from Lohmann, et al. (1996) Tellus article. Return 3d uh grid. - """ + ''' log.debug('Making uh for each cell') uh = np.zeros((t_cell, xmask.shape[0], xmask.shape[1]), dtype=np.float64) - time = np.arange(dt, t_cell*dt+dt, dt, dtype=np.float64) + time = np.arange(dt, t_cell * dt + dt, dt, dtype=np.float64) - for y, x in zip(y_inds, x_inds): + for y, x in pyzip(y_inds, x_inds): xm = xmask[y, x] v = velocity[y, x] d = diffusion[y, x] - exponent = -1*np.power(v*time-xm, 2)/(4*d*time) - green = xm/(2*time*np.sqrt(np.pi*time*d))*np.exp(exponent) + exponent = -1 * np.power(v * time - xm, 2) / (4 * d * time) + green = xm / (2 * time * np.sqrt(np.pi * time * d)) * np.exp(exponent) # Normalize - uh[:, y, x] = green/green.sum() + uh[:, y, x] = green / green.sum() return uh # -------------------------------------------------------------------- # @@ -340,17 +347,17 @@ def make_uh(dt, t_cell, y_inds, x_inds, velocity, diffusion, xmask): # Make uh river def make_grid_uh_river(t_uh, t_cell, uh, to_y, to_x, pour_point, y_inds, x_inds, count_ds): - """ + ''' Calculate impulse response function for river routing. Starts at downstream point incrementally moves upstream. - """ - log.debug("Making uh_river grid....") + ''' + log.debug('Making uh_river grid....') y_ind = pour_point.basiny x_ind = pour_point.basinx uh_river = np.zeros((t_uh, uh.shape[1], uh.shape[2]), dtype=np.float64) - for (y, x, d) in zip(y_inds, x_inds, count_ds): + for (y, x, d) in pyzip(y_inds, x_inds, count_ds): if d > 0: yy = to_y[y, x] xx = to_x[y, x] @@ -373,22 +380,23 @@ def make_grid_uh_river(t_uh, t_cell, uh, to_y, to_x, pour_point, y_inds, # Make grid uh def make_grid_uh(t_uh, t_cell, uh_river, uh_box, to_y, to_x, y_inds, x_inds, count_ds): - """ + ''' Combines the uh_box with downstream cell uh_river. Cell [0] is given the uh_box without river routing - """ - log.debug("Making unit_hydrograph grid") + ''' + log.debug('Making unit_hydrograph grid') unit_hydrograph = np.zeros((t_uh, uh_river.shape[1], uh_river.shape[2])) - irf_temp = np.zeros(t_uh+t_cell, dtype=np.float64) + irf_temp = np.zeros(t_uh + t_cell, dtype=np.float64) - for (y, x, d) in zip(y_inds, x_inds, count_ds): + for (y, x, d) in pyzip(y_inds, x_inds, count_ds): irf_temp[:] = 0.0 if d > 0: yy = to_y[y, x] xx = to_x[y, x] irf_temp = np.convolve(uh_box, uh_river[:, yy, xx]) - unit_hydrograph[:, y, x] = irf_temp[:t_uh]/np.sum(irf_temp[:t_uh]) + unit_hydrograph[:, y, x] = \ + irf_temp[:t_uh] / np.sum(irf_temp[:t_uh]) else: irf_temp[:len(uh_box)] = uh_box[:] unit_hydrograph[:, y, x] = irf_temp[:t_uh] @@ -401,28 +409,27 @@ def make_grid_uh(t_uh, t_cell, uh_river, uh_box, to_y, to_x, y_inds, x_inds, # Adjust the timestep def adjust_uh_timestep(unit_hydrograph, t_uh, input_interval, output_interval, x_inds, y_inds): - """ + ''' Aggregates to timestep (output_interval). output_interval must be a multiple of input_interval. This function is not setup to disaggregate the Unit Hydrographs to a output_interval 0: + raise ValueError('outlets in finish_params are empty') + # ------------------------------------------------------------ # # netCDF variable options ncvaropts = {} @@ -44,7 +49,8 @@ def finish_params(outlets, dom_data, config_dict, directories): # subset (shorten time base) if options['SUBSET_DAYS'] and \ options['SUBSET_DAYS'] < routing['BASIN_FLOWDAYS']: - subset_length = options['SUBSET_DAYS']*SECSPERDAY/routing['OUTPUT_INTERVAL'] + subset_length = (options['SUBSET_DAYS'] * + SECSPERDAY / routing['OUTPUT_INTERVAL']) outlets, full_time_length, \ before, after = subset(outlets, subset_length=subset_length) @@ -65,7 +71,7 @@ def finish_params(outlets, dom_data, config_dict, directories): else: log.info('Not subsetting because either SUBSET_DAYS is null or ' 'SUBSET_DAYS dom_fractions) log.info('Adjust fractions for %s grid cells', len(yi)) - ratio_fraction[yi, xi] = dom_fractions[yi, xi]/fractions[yi, xi] + ratio_fraction[yi, xi] = dom_fractions[yi, xi] / fractions[yi, xi] # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Adjust fracs based on ratio_fraction - for cell_id, outlet in outlets.iteritems(): + for key, outlet in iteritems(outlets): y = outlet.y_source x = outlet.x_source if adjust: - outlets[cell_id].fractions *= ratio_fraction[y, x] + outlet.fractions *= ratio_fraction[y, x] # For Diagnostics only - adjusted_fractions[y, x] += outlets[cell_id].fractions - sum_uh_fractions[y, x] += outlets[cell_id].unit_hydrograph.sum(axis=0) + adjusted_fractions[y, x] += outlet.fractions + sum_uh_fractions[y, x] += outlet.unit_hydrograph.sum(axis=0) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # @@ -290,32 +303,33 @@ def adjust_fractions(outlets, dom_fractions, adjust=True): # -------------------------------------------------------------------- # # Shorten the unit hydrograph def subset(outlets, subset_length=None): - """ Shorten the Unit Hydrograph""" + ''' Shorten the Unit Hydrograph''' log.info('subsetting unit-hydrographs now...') log.debug('Subset Length: %s', subset_length) - - for i, (cell_id, outlet) in enumerate(outlets.iteritems()): + log.debug(outlets) + for i, (key, outlet) in enumerate(iteritems(outlets)): if i == 0: full_time_length = outlet.unit_hydrograph.shape[0] log.debug('Subset Length: %s', subset_length) + log.debug('full_time_length: %s', full_time_length) if not subset_length: subset_length = full_time_length log.debug('No subset_length provided, using full_time_length') - before = outlets[cell_id].unit_hydrograph + before = outlet.unit_hydrograph else: - before = np.append(before, outlets[cell_id].unit_hydrograph, + before = np.append(before, outlet.unit_hydrograph, axis=1) - outlets[cell_id].offset = np.empty(outlet.unit_hydrograph.shape[1], - dtype=np.int16) + outlet.offset = np.empty(outlet.unit_hydrograph.shape[1], + dtype=np.int16) out_uh = np.zeros((subset_length, outlet.unit_hydrograph.shape[1]), dtype=np.float64) - d_left = -1*subset_length/2 - d_right = subset_length/2 + d_left = -1 * subset_length / 2 + d_right = subset_length / 2 - for j in xrange(outlet.unit_hydrograph.shape[1]): + for j in pyrange(outlet.unit_hydrograph.shape[1]): # find index position of maximum maxind = np.argmax(outlet.unit_hydrograph[:, j]) @@ -345,18 +359,18 @@ def subset(outlets, subset_length=None): raise ValueError('Subsetting failed left:{0} or right {1} does' ' not fit inside bounds'.format(left, right)) - outlets[cell_id].offset[j] = left + outlet.offset[j] = left # clip and normalize tot = outlet.unit_hydrograph[left:right, j].sum() out_uh[:, j] = outlet.unit_hydrograph[left:right, j] / tot - outlets[cell_id].unit_hydrograph = out_uh + outlet.unit_hydrograph = out_uh if i == 0: - after = outlets[cell_id].unit_hydrograph + after = outlet.unit_hydrograph else: - after = np.append(after, outlets[cell_id].unit_hydrograph, axis=1) + after = np.append(after, outlet.unit_hydrograph, axis=1) log.info('Done subsetting') @@ -366,13 +380,13 @@ def subset(outlets, subset_length=None): # -------------------------------------------------------------------- # def group(outlets, subset_length): - """ + ''' group the outlets into one set of arrays - """ + ''' n_outlets = len(outlets) n_sources = 0 - for outlet in outlets.itervalues(): + for key, outlet in iteritems(outlets): n_sources += len(outlet.y_source) gd = {} @@ -403,7 +417,7 @@ def group(outlets, subset_length): gd['outlet_y_ind'] = np.empty(n_outlets, dtype=np.int16) gd['outlet_decomp_ind'] = np.empty(n_outlets, dtype=np.int16) gd['outlet_number'] = np.empty(n_outlets, dtype=np.int16) - gd['outlet_name'] = np.empty(n_outlets, dtype="S{0}".format(MAX_NC_CHARS)) + gd['outlet_name'] = np.empty(n_outlets, dtype='S{0}'.format(MAX_NC_CHARS)) gd['outlet_upstream_gridcells'] = np.empty(n_outlets, dtype=np.int16) gd['outlet_upstream_area'] = np.empty(n_outlets, dtype=np.float64) # ---------------------------------------------------------------- # @@ -411,9 +425,9 @@ def group(outlets, subset_length): # ---------------------------------------------------------------- # # place outlet and source vars into gd dictionary a = 0 - for i, (cell_id, outlet) in enumerate(outlets.iteritems()): + for i, (key, outlet) in enumerate(iteritems(outlets)): b = a + len(outlet.y_source) - log.debug('outlet.unit_hydrograph.shape %s', + log.debug('%s unit_hydrograph.shape %s', outlet.name, outlet.unit_hydrograph.shape) # -------------------------------------------------------- # # Point specific values @@ -434,7 +448,7 @@ def group(outlets, subset_length): gd['outlet_lat'][i] = outlet.lat gd['outlet_x_ind'][i] = outlet.domx gd['outlet_y_ind'][i] = outlet.domy - gd['outlet_decomp_ind'][i] = cell_id + gd['outlet_decomp_ind'][i] = outlet.cell_id gd['outlet_number'][i] = i gd['outlet_name'][i] = outlet.name gd['outlet_upstream_gridcells'][i] = outlet.upstream_gridcells diff --git a/rvic/core/plots.py b/rvic/core/plots.py index b3a64cd..e5df0ec 100644 --- a/rvic/core/plots.py +++ b/rvic/core/plots.py @@ -1,9 +1,10 @@ -""" +# -*- coding: utf-8 -*- +''' plots.py -""" +''' import os import logging -from log import LOG_NAME +from .log import LOG_NAME import numpy as np from datetime import date try: @@ -14,9 +15,9 @@ try: from mpl_toolkits.basemap import Basemap basemap_available = False - except: + except ImportError: basemap_available = False -except: +except ImportError: matplotlib_available = False @@ -28,14 +29,10 @@ # -------------------------------------------------------------------- # def uhs(data, title, case_id, plot_dir): - """ + ''' Plot diagnostic plot showing all unit hydrographs - """ - today = date.today().strftime('%Y%m%d') - file_name = "{0}_{1}_{2}.png".format(title.lower().replace(" ", "_"), - case_id.lower().replace(" ", "_"), - today) - pfname = os.path.join(plot_dir, file_name) + ''' + pfname = _make_filename(title, case_id, plot_dir) fig = plt.figure() plt.plot(data) @@ -50,22 +47,18 @@ def uhs(data, title, case_id, plot_dir): # -------------------------------------------------------------------- # def _fractions_grid(data, dom_x, dom_y, title, case_id, plot_dir): - """ + ''' Plot diagnostic plots of fraction variables - """ + ''' # ---------------------------------------------------------------- # # Plot Fractions - today = date.today().strftime('%Y%m%d') - file_name = "{0}_{1}_{2}.png".format(title.lower().replace(" ", "_"), - case_id.lower().replace(" ", "_"), - today) - pfname = os.path.join(plot_dir, file_name) + pfname = _make_filename(title, case_id, plot_dir) mask = data <= 0.0 data = np.ma.array(data, mask=mask) cmap = matplotlib.cm.cool - cmap.set_bad(color=u'w') + cmap.set_bad(color='w') fig = plt.figure() plt.pcolormesh(data, cmap=cmap) @@ -75,7 +68,8 @@ def _fractions_grid(data, dom_x, dom_y, title, case_id, plot_dir): plt.title(title) plt.xlabel('x') plt.ylabel('y') - # plt.gca().invert_yaxis() + plt.ylim([0, dom_y.shape[0]]) + plt.xlim([0, dom_x.shape[1]]) fig.savefig(pfname) # ---------------------------------------------------------------- # return pfname @@ -84,19 +78,15 @@ def _fractions_grid(data, dom_x, dom_y, title, case_id, plot_dir): # -------------------------------------------------------------------- # def _fractions_map(data, dom_x, dom_y, title, case_id, plot_dir): - """ + ''' Plot diagnostic plots of fraction variables using Basemap - """ + ''' # ---------------------------------------------------------------- # # Plot Fractions - today = date.today().strftime('%Y%m%d') - file_name = "{0}_{1}_{2}.png".format(title.lower().replace(" ", "_"), - case_id.lower().replace(" ", "_"), - today) - pfname = os.path.join(plot_dir, file_name) + pfname = _make_filename(title, case_id, plot_dir) fig = plt.figure(figsize=(8, 8)) - ax = fig.add_axes([0.1, 0.1, 0.8, 0.8]) + fig.add_axes([0.1, 0.1, 0.8, 0.8]) dom_x[dom_x < 0] += 360.0 @@ -104,11 +94,11 @@ def _fractions_map(data, dom_x, dom_y, title, case_id, plot_dir): data = np.ma.array(data, mask=mask) cmap = matplotlib.cm.cool - cmap.set_bad(color=u'w') + cmap.set_bad(color='w') # define projection - midx = int(dom_x.shape[1]/2) - midy = int(dom_x.shape[0]/2) + midx = int(dom_x.shape[1] / 2) + midy = int(dom_x.shape[0] / 2) projection = {'projection': 'stere', 'lon_0': dom_x[-1, midx], @@ -136,7 +126,7 @@ def _fractions_map(data, dom_x, dom_y, title, case_id, plot_dir): x, y = m(dom_x, dom_y) # compute map proj coordinates. cs = m.pcolormesh(x, y, data, cmap=cmap) - m.colorbar(cs, location='right', pad="5%") + m.colorbar(cs, location='right', pad='5%') plt.title(title) fig.savefig(pfname) # ---------------------------------------------------------------- # @@ -145,16 +135,24 @@ def _fractions_map(data, dom_x, dom_y, title, case_id, plot_dir): # -------------------------------------------------------------------- # -def _fractions_dummy(data, dom_x, dom_y, title, case_id, plot_dir): - """ - Pass on plotting - """ - pfname = 'None <-- could not import matplotlib' - pass - # ---------------------------------------------------------------- # +def _make_filename(title, case_id, plot_dir): + today = date.today().strftime('%Y%m%d') + file_name = '{0}_{1}_{2}.png'.format(title.lower().replace(' ', '_'), + case_id.lower().replace(' ', '_'), + today) + pfname = os.path.join(plot_dir, file_name) return pfname # -------------------------------------------------------------------- # + +# -------------------------------------------------------------------- # +def _fractions_dummy(*args): + ''' + Pass on plotting + ''' + return 'None <-- could not import matplotlib' +# -------------------------------------------------------------------- # + # -------------------------------------------------------------------- # # Set function aliases if matplotlib_available and basemap_available: diff --git a/rvic/core/pycompat.py b/rvic/core/pycompat.py new file mode 100644 index 0000000..f1e1b50 --- /dev/null +++ b/rvic/core/pycompat.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +'''module to support compatability between Python 2 and Python 3 and to handle +default and optional packages''' + +import sys + +py3 = sys.version_info[0] >= 3 + +if py3: + basestring = str + unicode_type = str + bytes_type = bytes + + def iteritems(d): + return iter(d.items()) + + def itervalues(d): + return iter(d.values()) + pyrange = range + pyzip = zip + from configparser import SafeConfigParser +else: + # Python 2 + basestring = basestring + unicode_type = unicode + bytes_type = str + + def iteritems(d): + return d.iteritems() + + def itervalues(d): + return d.itervalues() + pyrange = xrange + from itertools import izip as pyzip + from ConfigParser import SafeConfigParser +try: + from cyordereddict import OrderedDict +except ImportError: + try: + from collections import OrderedDict + except ImportError: + from ordereddict import OrderedDict diff --git a/rvic/core/read_forcing.py b/rvic/core/read_forcing.py index 518c794..3be9b19 100644 --- a/rvic/core/read_forcing.py +++ b/rvic/core/read_forcing.py @@ -1,6 +1,7 @@ -""" +# -*- coding: utf-8 -*- +''' read_forcings.py -""" +''' import os import numpy as np @@ -8,9 +9,10 @@ from bisect import bisect_right from netCDF4 import Dataset, date2num, date2index, num2date from logging import getLogger -from log import LOG_NAME -from time_utility import ord_to_datetime -from share import MMPERMETER, CMPERMETER, WATERDENSITY, TIMEUNITS, SECSPERDAY +from .log import LOG_NAME +from .time_utility import ord_to_datetime +from .share import MMPERMETER, CMPERMETER, WATERDENSITY, TIMEUNITS, SECSPERDAY +from .pycompat import pyrange # -------------------------------------------------------------------- # # create logger @@ -21,7 +23,7 @@ # -------------------------------------------------------------------- # # Data Model class DataModel(object): - """ RVIC Forcing Data Model Class""" + '''RVIC Forcing Data Model Class''' # ---------------------------------------------------------------- # # Initialize @@ -44,12 +46,12 @@ def __init__(self, path, file_str, time_fld, lat_fld, liq_flds, self.fld_mult = {} if start: - if type(start) == float: + if type(start) in [float, int]: start = [int(start)] end = [int(end)] else: - start = map(int, start.split('-')) - end = map(int, end.split('-')) + start = list(map(int, start.split('-'))) + end = list(map(int, end.split('-'))) # single files without year in them if not start: @@ -57,18 +59,23 @@ def __init__(self, path, file_str, time_fld, lat_fld, liq_flds, # yearly files elif len(start) == 1: - for year in xrange(start[0], end[0]+1): + for year in pyrange(start[0], end[0] + 1): self.files.append(os.path.join(self.path, file_str.replace('$YYYY', - "{0:04d}".format(year)))) + '{0:04d}'.format(year)))) # Monthly elif len(start) == 2: year = start[0] month = start[1] while True: - self.files.append(os.path.join(self.path, - file_str.replace('$YYYY', "{0:04d}".format(year)).replace('$MM', "{0:02d}".format(month)))) + self.files.append( + os.path.join( + self.path, + file_str.replace( + '$YYYY', + '{0:04d}'.format(year)).replace( + '$MM', '{0:02d}'.format(month)))) if year == end[0] and month == end[1]: break else: @@ -84,8 +91,16 @@ def __init__(self, path, file_str, time_fld, lat_fld, liq_flds, month = start[1] day = start[2] while True: - self.files.append(os.path.join(self.path, - file_str.replace('$YYYY', "{0:04d}".format(year)).replace('$MM', "{0:02d}".format(month)).replace('$DD', "{0:02d}".format(day)))) + self.files.append( + os.path.join( + self.path, + file_str.replace( + '$YYYY', + '{0:04d}'.format(year)).replace( + '$MM', + '{0:02d}'.format(month)).replace( + '$DD', + '{0:02d}'.format(day)))) if year == end[0] and month == end[1] and day == end[2]: break else: @@ -150,7 +165,7 @@ def __init__(self, path, file_str, time_fld, lat_fld, liq_flds, t1 = date2num(num2date(time_series[1], self.time_units, calendar=self.calendar), TIMEUNITS, calendar=self.calendar) - self.secs_per_step = (t1-t0) * SECSPERDAY + self.secs_per_step = (t1 - t0) * SECSPERDAY else: raise ValueError('Need more than 1 forcing timestep in order to ' 'calculate timestep') @@ -159,7 +174,7 @@ def __init__(self, path, file_str, time_fld, lat_fld, liq_flds, # ---------------------------------------------------------------- # def start(self, timestamp, rout_var): - """ Initialize the first files inputs""" + '''Initialize the first files inputs''' # ------------------------------------------------------------ # # find and open first file self.ordtime = date2num(timestamp, self.time_units, @@ -177,9 +192,9 @@ def start(self, timestamp, rout_var): self.current_fhdl = Dataset(self.current_file, 'r') try: - self.current_tind = date2index(timestamp, - self.current_fhdl.variables[self.time_fld]) - except Exception, e: + self.current_tind = date2index( + timestamp, self.current_fhdl.variables[self.time_fld]) + except Exception as e: log.error(num2date(self.start_dates, self.time_units, calendar=self.calendar)) log.error('timestamp: %s', timestamp) @@ -190,6 +205,7 @@ def start(self, timestamp, rout_var): # ------------------------------------------------------------ # # find multiplier for units all fields will be in liquid flux # (kg m-2 s-1) + # Todo: use udunits to convert units for fld in self.liq_flds: units = self.current_fhdl.variables[fld].units @@ -201,9 +217,10 @@ def start(self, timestamp, rout_var): elif units in ['m', 'M', 'meters', 'Meters']: self.fld_mult[fld] = WATERDENSITY / self.secs_per_step elif units in ['cm', 'CM', 'centimeters', 'Centimeters']: - self.fld_mult[fld] = WATERDENSITY / CMPERMETER / self.secs_per_step + self.fld_mult[fld] = (WATERDENSITY / CMPERMETER / + self.secs_per_step) else: - raise ValueError('unknown forcing units') + raise ValueError('unknown forcing units: %s' % units) # ------------------------------------------------------------ # # ------------------------------------------------------------ # @@ -213,7 +230,8 @@ def start(self, timestamp, rout_var): try: fill_val = getattr(self.current_fhdl.variables[fld], '_FillValue') - fmask = np.squeeze(self.current_fhdl.variables[fld][0]) == fill_val + fmask = np.squeeze( + self.current_fhdl.variables[fld][0]) == fill_val if np.any(fmask[rout_var.source_y_ind, rout_var.source_x_ind]): log.error('There are fill values in the routing domain') @@ -229,11 +247,12 @@ def start(self, timestamp, rout_var): # ---------------------------------------------------------------- # def read(self, timestamp): - """ Read the current timestamp from the data stream """ + '''Read the current timestamp from the data stream ''' # ------------------------------------------------------------ # # Get the current data index location - if self.current_tind == len(self.current_fhdl.variables[self.time_fld]): + if self.current_tind == len( + self.current_fhdl.variables[self.time_fld]): # close file and open next self.current_fhdl.close() self.current_filenum += 1 @@ -244,18 +263,20 @@ def read(self, timestamp): # ------------------------------------------------------------ # # check that the timestamp is what was expected - expected_timestamp = ord_to_datetime(self.current_fhdl.variables[self.time_fld][self.current_tind], - self.time_units, - calendar=self.calendar) + expected_timestamp = ord_to_datetime( + self.current_fhdl.variables[self.time_fld][self.current_tind], + self.time_units, + calendar=self.calendar) if timestamp != expected_timestamp: log.warning('Timestamp is not what was expected') log.warning('Got timestamp %s', timestamp) log.warning('Expected timestamp %s', expected_timestamp) # Attempt to find the correct timestamp - self.ordtime = date2num(timestamp, - self.current_fhdl.variables[self.time_fld].units, - calendar=self.calendar) + self.ordtime = date2num( + timestamp, + self.current_fhdl.variables[self.time_fld].units, + calendar=self.calendar) # check to make sure the timestamp exists in input dataset if self.ordtime > max(self.end_ords): @@ -269,27 +290,30 @@ def read(self, timestamp): self.current_file = self.files[new_filenum] self.current_fhdl = Dataset(self.current_file, 'r') - self.current_tind = date2index(timestamp, - self.current_fhdl.variables[self.time_fld]) + self.current_tind = date2index( + timestamp, self.current_fhdl.variables[self.time_fld]) # ------------------------------------------------------------ # # ------------------------------------------------------------ # # Get the liquid fluxes log.info('getting fluxes for %s (%s)', timestamp, self.current_tind) forcing = {} - for i, fld in enumerate(self.liq_flds): - self.current_fhdl.variables[fld].set_auto_maskandscale(False) - if i == 0: - forcing['LIQ'] = self.current_fhdl.variables[fld][self.current_tind, :, :] * self.fld_mult[fld] - else: - forcing['LIQ'] += self.current_fhdl.variables[fld][self.current_tind, :, :] * self.fld_mult[fld] + for flux, fields in (('LIQ', self.liq_flds), ('ICE', self.ice_flds)): + for i, fld in enumerate(fields): + # set auto_maskandscale to False to avoid slowdown from numpy + # masked array + self.current_fhdl.variables[fld].set_auto_maskandscale(False) + temp = self.current_fhdl.variables[fld][self.current_tind] + fill_val = getattr(self.current_fhdl.variables[fld], + '_FillValue', None) + # replace fill values with numpy.nan + if fill_val is not None: + temp[temp == fill_val] = np.nan - for i, fld in enumerate(self.ice_flds): - self.current_fhdl.variables[fld].set_auto_maskandscale(False) - if i == 0: - forcing['ICE'] = self.current_fhdl.variables[fld][self.current_tind, :, :] * self.fld_mult[fld] - else: - forcing['ICE'] += self.current_fhdl.variables[fld][self.current_tind, :, :] * self.fld_mult[fld] + if i == 0: + forcing[flux] = self.fld_mult[fld] * temp + else: + forcing[flux] += self.fld_mult[fld] * temp # move forward one step self.current_tind += 1 diff --git a/rvic/core/remap.py b/rvic/core/remap.py index 3f0aac8..1b9aec1 100644 --- a/rvic/core/remap.py +++ b/rvic/core/remap.py @@ -1,12 +1,13 @@ -""" +# -*- coding: utf-8 -*- +''' remap.py -""" +''' import os from cdo import Cdo cdo = Cdo() from logging import getLogger -from log import LOG_NAME +from .log import LOG_NAME # -------------------------------------------------------------------- # # create logger @@ -18,7 +19,7 @@ # Remap a file using CDO def remap(grid_file, in_file, out_file, operator='remapcon', remap_options=None): - """Remap infile using cdo""" + '''Remap infile using cdo''' log.info('Remapping %s to %s', in_file, out_file) diff --git a/rvic/core/share.py b/rvic/core/share.py index b735789..a5378a4 100644 --- a/rvic/core/share.py +++ b/rvic/core/share.py @@ -1,11 +1,12 @@ -""" +# -*- coding: utf-8 -*- +''' share.py -""" +''' import sys import socket import string import time as time_mod -from collections import OrderedDict +from .pycompat import OrderedDict, iteritems from netCDF4 import default_fillvals from getpass import getuser @@ -22,8 +23,8 @@ # time # reference time REFERENCE_STRING = '0001-1-1 0:0:0' -REFERENCE_DATE = 10101 # i.e. REFERENCE_STRING -REFERENCE_TIME = 0 # i.e. REFERENCE_STRING +REFERENCE_DATE = 10101 # i.e. REFERENCE_STRING +REFERENCE_TIME = 0 # i.e. REFERENCE_STRING TIMEUNITS = 'days since ' + REFERENCE_STRING # do not change (MUST BE DAYS)! TIMESTAMPFORM = '%Y-%m-%d-%H' CALENDAR = 'noleap' @@ -65,11 +66,11 @@ 5: ['360_day'], 6: ['julian']} -VALID_CHARS = "-_. %s%s" % (string.ascii_letters, string.digits) +VALID_CHARS = '-_. %s%s' % (string.ascii_letters, string.digits) # ----------------------- NETCDF VARIABLES --------------------------------- # -class NcGlobals: +class NcGlobals(object): def __init__(self, title=None, casename=None, @@ -91,252 +92,254 @@ def __init__(self, self.atts = OrderedDict() - if title: + if title is not None: self.atts['title'] = title - if comment: + if comment is not None: self.atts['comment'] = comment - if Conventions: + if Conventions is not None: self.atts['Conventions'] = Conventions - if history: + if history is not None: self.atts['history'] = history - if source: + if source is not None: self.atts['source'] = source - if institution: + if institution is not None: self.atts['institution'] = institution - if hostname: + if hostname is not None: self.atts['hostname'] = hostname else: self.atts['hostname'] = socket.gethostname() - if username: + if username is not None: self.atts['username'] = username else: self.atts['username'] = getuser() - if casename: + if casename is not None: self.atts['casename'] = casename - if references: + if casestr is not None: + self.atts['casestr'] = casestr + + if references is not None: self.atts['references'] = references - if version: + if version is not None: self.atts['version'] = version else: try: from rvic import version self.atts['version'] = version.short_version - except: + except ImportError: self.atts['version'] = 'unknown' - if RvicPourPointsFile: + if RvicPourPointsFile is not None: self.atts['RvicPourPointsFile'] = RvicPourPointsFile - if RvicUHFile: + if RvicUHFile is not None: self.atts['RvicUHFile'] = RvicUHFile - if RvicFdrFile: + if RvicFdrFile is not None: self.atts['RvicFdrFile'] = RvicFdrFile - if RvicDomainFile: + if RvicDomainFile is not None: self.atts['RvicDomainFile'] = RvicDomainFile def update(self): - self.atts['history'] = 'Created: {0}'.format(time_mod.ctime(time_mod.time())) - + self.atts['history'] = \ + 'Created: {0}'.format(time_mod.ctime(time_mod.time())) -class NcVar: - def __init__(self, **kwargs): - for key, val in kwargs.iteritems(): - setattr(self, key, val) # Coordinate Variables -time = NcVar(long_name='time', - units=TIMEUNITS) +time = dict(long_name='time', + units=TIMEUNITS) -time_bnds = NcVar() +time_bnds = dict() -timesteps = NcVar(long_name='Series of timesteps', - units='unitless') +timesteps = dict(long_name='Series of timesteps', + nits='unitless') -lon = NcVar(long_name='longitude', - units='degrees_east') +lon = dict(long_name='longitude', + nits='degrees_east') -lat = NcVar(long_name='latitude', - units='degrees_north') +lat = dict(long_name='latitude', + units='degrees_north') -xc = NcVar(long_name='longitude', - units='degrees_east') +xc = dict(long_name='longitude', + units='degrees_east') -yc = NcVar(long_name='latitude', - units='degrees_north') +yc = dict(long_name='latitude', + units='degrees_north') # Data Variables -fraction = NcVar(long_name='fraction of grid cell that is active', - units='unitless') +fraction = dict(long_name='fraction of grid cell that is active', + units='unitless') -unit_hydrograph = NcVar(long_name='Unit Hydrograph', - units='unitless') +unit_hydrograph = dict(long_name='Unit Hydrograph', + units='unitless') -avg_velocity = NcVar(long_name='Flow Velocity Parameter', - units='m s-1') +avg_velocity = dict(long_name='Flow Velocity Parameter', + units='m s-1') -avg_diffusion = NcVar(long_name='Diffusion Parameter', - units='m2 s-1') +avg_diffusion = dict(long_name='Diffusion Parameter', + units='m2 s-1') -global_basin_id = NcVar(long_name='Global Basin ID from RvicFdrFile', - units='unitless') +global_basin_id = dict(long_name='Global Basin ID from RvicFdrFile', + units='unitless') -full_time_length = NcVar(long_name='Length of original unit hydrograph', - units='timesteps') +full_time_length = dict(long_name='Length of original unit hydrograph', + units='timesteps') -subset_length = NcVar(long_name='Shortened length of the unit hydrograph', - units='timesteps') +subset_length = dict(long_name='Shortened length of the unit hydrograph', + units='timesteps') -unit_hydrograph_dt = NcVar(long_name='Unit hydrograph timestep', - units='seconds') +unit_hydrograph_dt = dict(long_name='Unit hydrograph timestep', + units='seconds') -outlet_x_ind = NcVar(long_name='x grid coordinate of outlet grid cell', - units='unitless') +outlet_x_ind = dict(long_name='x grid coordinate of outlet grid cell', + units='unitless') -outlet_y_ind = NcVar(long_name='y grid coordinate of outlet grid cell', - units='unitless') +outlet_y_ind = dict(long_name='y grid coordinate of outlet grid cell', + units='unitless') -outlet_lon = NcVar(long_name='Longitude coordinate of outlet grid cell', - units='degrees_east') +outlet_lon = dict(long_name='Longitude coordinate of outlet grid cell', + units='degrees_east') -outlet_lat = NcVar(long_name='Latitude coordinate of outlet grid cell', - units='degrees_north') +outlet_lat = dict(long_name='Latitude coordinate of outlet grid cell', + units='degrees_north') -outlet_decomp_ind = NcVar(long_name='1d grid location of outlet grid cell', - units='unitless') +outlet_decomp_ind = dict(long_name='1d grid location of outlet grid cell', + units='unitless') -outlet_number = NcVar(long_name='outlet number', - units='unitless') +outlet_number = dict(long_name='outlet number', + units='unitless') -outlet_mask = NcVar(long_name='type of outlet point', - units='0-ocean, 1-land, 2-guage, 3-none') +outlet_mask = dict(long_name='type of outlet point', + units='0-ocean, 1-land, 2-guage, 3-none') -outlet_name = NcVar(long_name='Outlet guage name', - units='unitless') +outlet_name = dict(long_name='Outlet guage name', + units='unitless') -outlet_upstream_area = NcVar(long_name='Upstream catchment area contributing ' - 'to outlet', - units='m2') +outlet_upstream_area = dict(long_name='Upstream catchment area contributing ' + 'to outlet', + units='m2') -outlet_upstream_gridcells = NcVar(long_name='Number of upstream grid cells ' - 'contributing to outlet', - units='number of grid cells') +outlet_upstream_gridcells = dict(long_name='Number of upstream grid cells ' + 'contributing to outlet', + units='number of grid cells') -source_x_ind = NcVar(long_name='x grid coordinate of source grid cell', - units='unitless') +source_x_ind = dict(long_name='x grid coordinate of source grid cell', + units='unitless') -source_y_ind = NcVar(long_name='y grid coordinate of source grid cell', - units='unitless') +source_y_ind = dict(long_name='y grid coordinate of source grid cell', + units='unitless') -source_lon = NcVar(long_name='Longitude coordinate of source grid cell', - units='degrees_east') +source_lon = dict(long_name='Longitude coordinate of source grid cell', + units='degrees_east') -source_lat = NcVar(long_name='Latitude coordinate of source grid cell', - units='degrees_north') +source_lat = dict(long_name='Latitude coordinate of source grid cell', + units='degrees_north') -source_decomp_ind = NcVar(long_name='1d grid location of source grid cell', - units='unitless') -source_time_offset = NcVar(long_name='Number of leading timesteps ommited', - units='timesteps') +source_decomp_ind = dict(long_name='1d grid location of source grid cell', + units='unitless') +source_time_offset = dict(long_name='Number of leading timesteps ommited', + units='timesteps') -source2outlet_ind = NcVar(long_name='source to outlet index mapping', - units='unitless') +source2outlet_ind = dict(long_name='source to outlet index mapping', + units='unitless') -ring = NcVar(long_name='Convolution Ring', - units='kg m-2 s-1') +ring = dict(long_name='Convolution Ring', + units='kg m-2 s-1') -streamflow = NcVar(long_name='Streamflow at outlet grid cell', - units='kg m-2 s-1') +streamflow = dict(long_name='Streamflow at outlet grid cell', + units='kg m-2 s-1') -storage = NcVar(long_name='Mass storage in stream upstream of outlet grid cell', - units='kg m-2 s-1') +storage = dict(long_name='Mass storage in stream upstream of outlet ' + 'grid cell', + units='kg m-2 s-1') -# valid values http://cf-pcmdi.llnl.gov/documents/cf-conventions/1.6/cf-conventions.html#calendar -timemgr_rst_type = NcVar(long_name='calendar type', - units='unitless', - flag_values='0, 1, 2, 3, 4, 5, 6', - flag_meanings='NONE, NO_LEAP_C, GREGORIAN, ' - 'PROLEPTIC_GREGORIAN, ALL_LEAP, ' - '360_DAY, JULIAN') +# valid values http://cf-pcmdi.llnl.gov/documents/cf-conventions/1.6/\ +# cf-conventions.html#calendar +timemgr_rst_type = dict(long_name='calendar type', + units='unitless', + flag_values='0, 1, 2, 3, 4, 5, 6', + flag_meanings='NONE, NO_LEAP_C, GREGORIAN, ' + 'PROLEPTIC_GREGORIAN, ALL_LEAP, ' + '360_DAY, JULIAN') -timemgr_rst_step_sec = NcVar(long_name='seconds component of timestep size', +timemgr_rst_step_sec = dict(long_name='seconds component of timestep size', + units='sec', + valid_range='0, 86400') + +timemgr_rst_start_ymd = dict(long_name='start date', + units='YYYYMMDD') + +timemgr_rst_start_tod = dict(long_name='start time of day', units='sec', valid_range='0, 86400') -timemgr_rst_start_ymd = NcVar(long_name='start date', - units='YYYYMMDD') +timemgr_rst_ref_ymd = dict(long_name='reference date', + units='YYYYMMDD') -timemgr_rst_start_tod = NcVar(long_name='start time of day', - units='sec', - valid_range='0, 86400') +timemgr_rst_ref_tod = dict(long_name='reference time of day', + units='sec', + valid_range='0, 86400') -timemgr_rst_ref_ymd = NcVar(long_name='reference date', +timemgr_rst_curr_ymd = dict(long_name='current date', units='YYYYMMDD') -timemgr_rst_ref_tod = NcVar(long_name='reference time of day', +timemgr_rst_curr_tod = dict(long_name='current time of day', units='sec', valid_range='0, 86400') -timemgr_rst_curr_ymd = NcVar(long_name='current date', - units='YYYYMMDD') - -timemgr_rst_curr_tod = NcVar(long_name='current time of day', - units='sec', - valid_range='0, 86400') - -nhtfrq = NcVar(long_name='Frequency of history writes', - units='absolute value of negative is in hours, 0=monthly, positive is time-steps', - comment='Namelist item') - -mfilt = NcVar(long_name='Number of history time samples on a file', - units='initless', +nhtfrq = dict(long_name='Frequency of history writes', + units='absolute value of negative is in hours, 0=monthly, ' + 'positive is time-steps', comment='Namelist item') -ncprec = NcVar(long_name='Flag for data precision', - flag_values='1, 2', - flag_meanings='single-precision double-precision', - comment='Namelist item', - valid_range='1, 2') +mfilt = dict(long_name='Number of history time samples on a file', + units='initless', + comment='Namelist item') -fincl = NcVar(long_name='Fieldnames to include', - comment='Namelist item') +ncprec = dict(long_name='Flag for data precision', + flag_values='1, 2', + flag_meanings='single-precision double-precision', + comment='Namelist item', + valid_range='1, 2') -fexcl = NcVar(long_name='Fieldnames to exclude', - comment='Namelist item') +fincl = dict(long_name='Fieldnames to include', + comment='Namelist item') + +fexcl = dict(long_name='Fieldnames to exclude', + comment='Namelist item') -nflds = NcVar(long_name='Number of fields on file', - units='unitless') +nflds = dict(long_name='Number of fields on file', + units='unitless') -ntimes = NcVar(long_name='Number of time steps on file', - units='time-step') -is_endhist = NcVar(long_name='End of history file', - flag_values='0, 1', - flag_meanings='FALSE TRUE', - comment='Namelist item', - valid_range='0, 1') +ntimes = dict(long_name='Number of time steps on file', + units='time-step') +is_endhist = dict(long_name='End of history file', + flag_values='0, 1', + flag_meanings='FALSE TRUE', + comment='Namelist item', + valid_range='0, 1') -begtime = NcVar(long_name='Beginning time', - units='time units') +begtime = dict(long_name='Beginning time', + units='time units') -hpindex = NcVar(long_name='History pointer index', - units='units') +hpindex = dict(long_name='History pointer index', + units='units') -avgflag = NcVar(long_name='Averaging flag', - units='"A=Average, X=Maximum, M=Minimum, I=Instantaneous') +avgflag = dict(long_name='Averaging flag', + units='A=Average, X=Maximum, M=Minimum, I=Instantaneous') -name = NcVar(long_name='Fieldnames') +name = dict(long_name='Fieldnames') -long_name = NcVar(long_name='Long descriptive names for fields') +long_name = dict(long_name='Long descriptive names for fields') -units = NcVar(long_name='Units for each history field output') +units = dict(long_name='Units for each history field output') diff --git a/rvic/core/time_utility.py b/rvic/core/time_utility.py index d83015e..a68c9af 100644 --- a/rvic/core/time_utility.py +++ b/rvic/core/time_utility.py @@ -1,17 +1,19 @@ -""" +# -*- coding: utf-8 -*- +''' time_utility.py time units conventions: - Timesteps are in seconds (unit_hydrograph_dt) - Time ordinal is in days (time_ord) -""" +''' import numpy as np from netCDF4 import num2date, date2num from datetime import datetime from dateutil.relativedelta import relativedelta -from share import TIMEUNITS, SECSPERDAY, MINSPERDAY, HOURSPERDAY, TIMESTAMPFORM +from .share import TIMEUNITS, SECSPERDAY, MINSPERDAY, HOURSPERDAY, \ + TIMESTAMPFORM from logging import getLogger -from log import LOG_NAME +from .log import LOG_NAME # -------------------------------------------------------------------- # # create logger @@ -22,7 +24,7 @@ # -------------------------------------------------------------------- # # RVIC Time Class class Dtime(object): - """ A Time Module for handling flags and timesteps """ + '''A Time Class for handling flags and timesteps ''' # ---------------------------------------------------------------- # # Setup Dtim object @@ -42,7 +44,7 @@ def __init__(self, start_date, stop_option, stop_n, stop_date, self.stop_option = stop_option self.stop_n = stop_n if self.stop_option == 'date': - date = map(int, stop_date.split('-')) + date = list(map(int, stop_date.split('-'))) self.stop_date = datetime(*date) else: self.stop_date = False @@ -51,7 +53,7 @@ def __init__(self, start_date, stop_option, stop_n, stop_date, self.rest_option = rest_option self.rest_n = rest_n if self.rest_option == 'date': - date = map(int, rest_date.split('-')) + date = list(map(int, rest_date.split('-'))) self.rest_date = datetime(*date) else: self.rest_date = False @@ -115,7 +117,8 @@ def __stop(self): elif self.stop_option == 'nmonths': temp = ord_to_datetime(self.time_ord + self.dt, TIMEUNITS, calendar=self.calendar) - if relativedelta(temp, self.start_date).months % self.stop_n == 0.0: + if relativedelta(temp, + self.start_date).months % self.stop_n == 0.0: flag = True elif self.stop_option == 'nmonth': temp = ord_to_datetime(self.time_ord + self.dt, TIMEUNITS, @@ -178,7 +181,8 @@ def __rest(self): elif self.rest_option == 'nmonths': temp = ord_to_datetime(self.time_ord + self.dt, TIMEUNITS, calendar=self.calendar) - if relativedelta(temp, self.start_date).months % self.rest_n == 0.0: + if relativedelta(temp, + self.start_date).months % self.rest_n == 0.0: flag = True elif self.rest_option == 'nmonth': temp = ord_to_datetime(self.time_ord + self.dt, TIMEUNITS, @@ -211,10 +215,10 @@ def __rest(self): # -------------------------------------------------------------------- # def ord_to_datetime(time, units, calendar='standard'): - """ + ''' netCDF4.num2date yields a fake datetime object, this function converts converts that back to a real datetime object - """ + ''' # this is the netCDF4.datetime object # if the calendar is standard, t is already a real datetime object if type(time) == np.ndarray: diff --git a/rvic/core/utilities.py b/rvic/core/utilities.py index 17d0f1c..4cc2c92 100644 --- a/rvic/core/utilities.py +++ b/rvic/core/utilities.py @@ -1,18 +1,20 @@ -""" +# -*- coding: utf-8 -*- +''' utilities.py -""" +''' import os import tarfile from scipy.spatial import cKDTree import numpy as np from shutil import rmtree, copyfile -from ConfigParser import SafeConfigParser +from .pycompat import iteritems, SafeConfigParser from netCDF4 import Dataset from logging import getLogger -from log import LOG_NAME -from share import TIMESTAMPFORM, RPOINTER, EARTHRADIUS, METERSPERMILE -from share import METERS2PERACRE, METERSPERKM, VALID_CHARS -from config import read_config +from .log import LOG_NAME +from .share import TIMESTAMPFORM, RPOINTER, EARTHRADIUS, METERSPERMILE +from .share import METERS2PERACRE, METERSPERKM, VALID_CHARS +from .config import read_config +from .pycompat import pyzip # -------------------------------------------------------------------- # # create logger @@ -23,7 +25,7 @@ # -------------------------------------------------------------------- # # find x y coordinates def latlon2yx(plats, plons, glats, glons): - """find y x coordinates """ + '''find y x coordinates ''' # use astronomical conventions for longitude # (i.e. negative longitudes to the east of 0) @@ -43,7 +45,7 @@ def latlon2yx(plats, plons, glats, glons): points = list(np.vstack((np.array(plats), np.array(plons))).transpose()) mytree = cKDTree(combined) - dist, indexes = mytree.query(points, k=1) + indexes = mytree.query(points, k=1)[1] y, x = np.unravel_index(indexes, glons.shape) return y, x @@ -52,33 +54,41 @@ def latlon2yx(plats, plons, glats, glons): # -------------------------------------------------------------------- # # Search neighboring grid cells for channel -def search_for_channel(source_area, routys, routxs, search=2, tol=10): - """Search neighboring grid cells for channel""" +def search_for_channel(source_area, routys, routxs, search=1, tol=10): + '''Search neighboring grid cells for channel''' - log.debug('serching for channel') + log.debug('serching for channel, tol: %f, search: %i', tol, search) - new_ys = np.empty_like(routys) - new_xs = np.empty_like(routxs) + new_ys = np.copy(routys) + new_xs = np.copy(routxs) - for i, (y, x) in enumerate(zip(routys, routxs)): + ysize, xsize = source_area.shape + + for i, (y, x) in enumerate(pyzip(routys, routxs)): area0 = source_area[y, x] - search_area = source_area[y-search:y+search+1, x-search:x+search+1] + for j in range(search + 1): + ymin = np.clip(y - j, 0, ysize) + ymax = np.clip(y + j + 1, 0, ysize) + xmin = np.clip(x - j, 0, xsize) + xmax = np.clip(x + j + 1, 0, xsize) + + search_area = source_area[ymin:ymax, xmin:xmax] + + if np.any(search_area / area0 > tol): + sy, sx = np.unravel_index(search_area.argmax(), + search_area.shape) + + new_ys[i] = np.clip(y + sy - j, 0, ysize) + new_xs[i] = np.clip(x + sx - j, 0, xsize) - if np.any(search_area > area0*tol): - sy, sx = np.unravel_index(search_area.argmax(), search_area.shape) + log.debug('Moving pour point to channel y: %s->%s, x: %s->%s', + y, new_ys[i], x, new_xs[i]) + log.debug('Source Area has increased from %s to %s', + area0, source_area[new_ys[i], new_xs[i]]) - new_ys[i] = y + sy - search - new_xs[i] = x + sx - search + break - log.debug('Moving pour point to channel y: ' - '{0}->{1}, x: {2}->{3}'.format(y, new_ys[i], - x, new_xs[i])) - log.debug('Source Area has increased from {0}' - ' to {1}'.format(area0, source_area[new_ys[i], new_xs[i]])) - else: - new_ys[i] = y - new_xs[i] = x return new_ys, new_xs # -------------------------------------------------------------------- # @@ -87,7 +97,7 @@ def search_for_channel(source_area, routys, routxs, search=2, tol=10): # -------------------------------------------------------------------- # # Write rpointer file def write_rpointer(restart_dir, restart_file, timestamp): - """ Write a configuration file with restart file and time """ + ''' Write a configuration file with restart file and time ''' rpointer_file = os.path.join(restart_dir, RPOINTER) config = SafeConfigParser() @@ -108,17 +118,17 @@ def write_rpointer(restart_dir, restart_file, timestamp): # -------------------------------------------------------------------- # # A helper function to read a netcdf file def read_netcdf(nc_file, variables=None, coords=None): - """ + ''' Read data from input netCDF. Will read all variables if none provided. Will also return all variable attributes. Both variables (data and attributes) are returned as dictionaries named by variable - """ + ''' f = Dataset(nc_file, 'r') if not variables: - variables = f.variables.keys() + variables = list(f.variables.keys()) if not coords: coords = slice(None) @@ -145,10 +155,10 @@ def read_netcdf(nc_file, variables=None, coords=None): # -------------------------------------------------------------------- # # Check to make sure all the expected variables are present in the dictionary def check_ncvars(config_section, nckeys): - """ + ''' Make sure the variables listed in the config file are present in the netcdf - """ - for key, value in config_section.iteritems(): + ''' + for key, value in iteritems(config_section): if key.endswith('var'): if value not in nckeys: log.error('%s (%s) not in %s', value, key, @@ -162,22 +172,22 @@ def check_ncvars(config_section, nckeys): # -------------------------------------------------------------------- # # Find the index of the the nearest value def find_nearest(array, value): - """ Find the index location in (array) with value nearest to (value)""" - return np.abs(array-value).argmin() + ''' Find the index location in (array) with value nearest to (value)''' + return np.abs(array - value).argmin() # -------------------------------------------------------------------- # # -------------------------------------------------------------------- # # Delete all the files in a directory def clean_dir(directory): - """ Clean all files in a directory""" + ''' Clean all files in a directory''' for file_name in os.listdir(directory): file_path = os.path.join(directory, file_name) try: if os.path.isfile(file_path): os.unlink(file_path) - except: - log.exception('Error cleaning file: %s' % file_path) + except Exception: + log.exception('Error cleaning file: %s', file_path) return # -------------------------------------------------------------------- # @@ -185,12 +195,12 @@ def clean_dir(directory): # -------------------------------------------------------------------- # # Delete a particular file def clean_file(file_name): - """ Delete the file""" + ''' Delete the file''' try: if os.path.isfile(file_name): os.unlink(file_name) - except: - log.exception('Error cleaning file: %s' % file_name) + except Exception: + log.exception('Error cleaning file: %s', file_name) return # -------------------------------------------------------------------- # @@ -198,7 +208,7 @@ def clean_file(file_name): # -------------------------------------------------------------------- # # Make a set of directories def make_directories(rundir, subdir_names): - """Make rvic directory structure""" + '''Make rvic directory structure''' if not os.path.exists(rundir): os.makedirs(rundir) @@ -213,7 +223,7 @@ def make_directories(rundir, subdir_names): # -------------------------------------------------------------------- # # Move all the input files to a central location -def copy_inputs(config_file, InputsDir): +def copy_inputs(config_file, inputs_dir): config_dict = read_config(config_file) @@ -221,20 +231,20 @@ def copy_inputs(config_file, InputsDir): config.optionxform = str config.read(config_file) - new_config = os.path.join(InputsDir, os.path.split(config_file)[1]) + new_config = os.path.join(inputs_dir, os.path.split(config_file)[1]) # ---------------------------------------------------------------- # # copy the inputs - for key, section in config_dict.iteritems(): - if 'FILE_NAME' in section.keys(): - new_file_name = os.path.join(InputsDir, - os.path.split(section['FILE_NAME'])[1]) + for key, section in iteritems(config_dict): + if 'FILE_NAME' in list(section.keys()): + new_file_name = os.path.join( + inputs_dir, os.path.split(section['FILE_NAME'])[1]) copyfile(section['FILE_NAME'], new_file_name) # update the config file for an easy restart config.set(key, 'FILE_NAME', - os.path.join(InputsDir, + os.path.join(inputs_dir, os.path.split(section['FILE_NAME'])[1])) # update the config_dict with the new value @@ -253,7 +263,7 @@ def copy_inputs(config_file, InputsDir): # -------------------------------------------------------------------- # def tar_inputs(inputs, suffix='', tar_type='tar'): - """ Tar the inputss directory or file at the end of a run""" + ''' Tar the inputss directory or file at the end of a run''' # ---------------------------------------------------------------- # # Make the TarFile if tar_type == 'tar': @@ -299,13 +309,13 @@ def tar_inputs(inputs, suffix='', tar_type='tar'): # -------------------------------------------------------------------- # # Read the domain def read_domain(domain_dict, lat0_is_min=False): - """ + ''' Read the domain file and return all the variables and attributes. Area is returned in m2 - """ + ''' dom_data, dom_vatts, dom_gatts = read_netcdf(domain_dict['FILE_NAME']) - check_ncvars(domain_dict, dom_data.keys()) + check_ncvars(domain_dict, list(dom_data.keys())) # ---------------------------------------------------------------- # # Create the cell_ids variable @@ -328,7 +338,7 @@ def read_domain(domain_dict, lat0_is_min=False): if (dom_data[dom_lat][-1] > dom_data[dom_lat][0]) != lat0_is_min: log.debug('Domain Inputs came in upside down, flipping everything ' 'now.') - var_list = dom_data.keys() + var_list = list(dom_data.keys()) var_list.remove(dom_lon) for var in var_list: dom_data[var] = np.flipud(dom_data[var]) @@ -346,24 +356,24 @@ def read_domain(domain_dict, lat0_is_min=False): dom_area = domain_dict['AREA_VAR'] area_units = dom_vatts[dom_area]['units'] - if area_units in ["rad2", "radians2", "radian2", "radian^2", "rad^2", - "radians^2", "rads^2", "radians squared", - "square-radians"]: - dom_data[dom_area] = dom_data[dom_area]*EARTHRADIUS*EARTHRADIUS - elif area_units in ["m2", "m^2", "meters^2", "meters2", "square-meters", - "meters squared"]: + if area_units in ['rad2', 'radians2', 'radian2', 'radian^2', 'rad^2', + 'radians^2', 'rads^2', 'radians squared', + 'square-radians']: + dom_data[dom_area] = dom_data[dom_area] * EARTHRADIUS * EARTHRADIUS + elif area_units in ['m2', 'm^2', 'meters^2', 'meters2', 'square-meters', + 'meters squared']: dom_data[dom_area] = dom_data[dom_area] - elif area_units in ["km2", "km^2", "kilometers^2", "kilometers2", - "square-kilometers", "kilometers squared"]: - dom_data[dom_area] = dom_data[dom_area]*METERSPERKM*METERSPERKM - elif area_units in ["mi2", "mi^2", "miles^2", "miles", "square-miles", - "miles squared"]: - dom_data[dom_area] = dom_data[dom_area]*METERSPERMILE*METERSPERMILE - elif area_units in ["acres", "ac", "ac."]: - dom_data[dom_area] = dom_data[dom_area]*METERS2PERACRE + elif area_units in ['km2', 'km^2', 'kilometers^2', 'kilometers2', + 'square-kilometers', 'kilometers squared']: + dom_data[dom_area] = dom_data[dom_area] * METERSPERKM * METERSPERKM + elif area_units in ['mi2', 'mi^2', 'miles^2', 'miles', 'square-miles', + 'miles squared']: + dom_data[dom_area] = dom_data[dom_area] * METERSPERMILE * METERSPERMILE + elif area_units in ['acres', 'ac', 'ac.']: + dom_data[dom_area] = dom_data[dom_area] * METERS2PERACRE else: - log.warning("WARNING: UNKNOWN AREA units (%s), ASSUMING THEY ARE IN " - "SQUARE METERS", + log.warning('WARNING: UNKNOWN AREA units (%s), ASSUMING THEY ARE IN ' + 'SQUARE METERS', dom_data[domain_dict['AREA_VAR']]['units']) # ---------------------------------------------------------------- # return dom_data, dom_vatts, dom_gatts diff --git a/rvic/core/variables.py b/rvic/core/variables.py index 0e6d38e..fc960df 100644 --- a/rvic/core/variables.py +++ b/rvic/core/variables.py @@ -1,17 +1,19 @@ -""" +# -*- coding: utf-8 -*- +''' variables.py -""" +''' import os import numpy as np from netCDF4 import Dataset, date2num, stringtochar from logging import getLogger -from log import LOG_NAME -from time_utility import ord_to_datetime -from share import TIMEUNITS, NC_INT, NC_DOUBLE, NC_CHAR -from share import RVIC_TRACERS, NcGlobals, SECSPERDAY, MAX_NC_CHARS -from share import CALENDAR_KEYS, REFERENCE_DATE, REFERENCE_TIME -from convolution_wrapper import rvic_convolve -import share +from .log import LOG_NAME +from .time_utility import ord_to_datetime +from .share import TIMEUNITS, NC_INT, NC_DOUBLE, NC_CHAR +from .share import RVIC_TRACERS, NcGlobals, SECSPERDAY, MAX_NC_CHARS +from .share import CALENDAR_KEYS, REFERENCE_DATE, REFERENCE_TIME +from .convolution_wrapper import rvic_convolve +from .pycompat import iteritems +from . import share # -------------------------------------------------------------------- # # create logger @@ -45,19 +47,19 @@ def __init__(self, lat=-9999.9, lon=-9999.9, domx=-9999, domy=-9999, return def __str__(self): - return ("Point({0}, lat:{1:3.4f}, lon:{2:3.4f}, y:{3:d}, " - "x:{4:d})".format(self.name, self.lat, self.lon, self.domy, + return ('Point({0}, lat:{1:3.4f}, lon:{2:3.4f}, y:{3:d}, ' + 'x:{4:d})'.format(self.name, self.lat, self.lon, self.domy, self.domx)) def __repr__(self): - return (" -- Point -- \n" - "name:\t{0}\n" - "lat:\t{1:3.4f}\n" - "lon:\t{2:3.4f}\n" - "domy:\t{3:d}\n" - "domx:\t{4:d}\n" - "routy:\t{5:d}\n" - "routx:\t{6:d}\n".format(self.name, self.lat, self.lon, + return (' -- Point -- \n' + 'name:\t{0}\n' + 'lat:\t{1:3.4f}\n' + 'lon:\t{2:3.4f}\n' + 'domy:\t{3:d}\n' + 'domx:\t{4:d}\n' + 'routy:\t{5:d}\n' + 'routx:\t{6:d}\n'.format(self.name, self.lat, self.lon, self.domy, self.domx, self.routy, self.routx)) # -------------------------------------------------------------------- # @@ -66,7 +68,7 @@ def __repr__(self): # -------------------------------------------------------------------- # # Rvar Object class Rvar(object): - """ Creates a RVIC structure """ + ''' Creates a RVIC structure ''' # ---------------------------------------------------------------- # # Initialize @@ -76,8 +78,8 @@ def __init__(self, param_file, case_name, calendar, out_dir, file_format, f = Dataset(param_file, 'r') self.n_sources = len(f.dimensions['sources']) self.n_outlets = len(f.dimensions['outlets']) - self.subset_length = int(f.variables['subset_length'][:]) - self.full_time_length = int(f.variables['full_time_length'][:]) + self.subset_length = f.variables['subset_length'][:] + self.full_time_length = f.variables['full_time_length'][:] self.unit_hydrograph_dt = f.variables['unit_hydrograph_dt'][:] self.source_lon = f.variables['source_lon'][:] self.source_lat = f.variables['source_lat'][:] @@ -99,7 +101,8 @@ def __init__(self, param_file, case_name, calendar, out_dir, file_format, except KeyError: log.warning('Could not find unit hydrograph var %s', tname) log.warning('trying var name unit_hydrograph') - self.unit_hydrograph[tracer] = f.variables['unit_hydrograph'][:] + self.unit_hydrograph[tracer] = \ + f.variables['unit_hydrograph'][:] except: raise ValueError('Cant find Unit Hydrograph Variable') self.outlet_name = f.variables['outlet_name'][:] @@ -110,7 +113,7 @@ def __init__(self, param_file, case_name, calendar, out_dir, file_format, self.file_format = file_format try: self.outlet_upstream_area = f.variables['outlet_upstream_area'][:] - except: + except KeyError: self.outlet_upstream_area = None self.glob_atts = NcGlobals(title='RVIC restart file', RvicPourPointsFile=f.RvicPourPointsFile, @@ -132,12 +135,13 @@ def __init__(self, param_file, case_name, calendar, out_dir, file_format, # ------------------------------------------------------------ # self._calendar = calendar - self.__fname_format = os.path.join(out_dir, "%s.r.%%Y-%%m-%%d-%%H-%%M-%%S.nc" % (case_name)) + self.__fname_format = os.path.join( + out_dir, '%s.r.%%Y-%%m-%%d-%%H-%%M-%%S.nc' % (case_name)) # ------------------------------------------------------------ # # CESM calendar key (only NO_LEAP_C, GREGORIAN are supported in CESM) self._calendar_key = 0 - for key, cals in CALENDAR_KEYS.iteritems(): + for key, cals in iteritems(CALENDAR_KEYS): if self._calendar in cals: self._calendar_key = key break @@ -155,9 +159,9 @@ def __init__(self, param_file, case_name, calendar, out_dir, file_format, # ---------------------------------------------------------------- # # Check that dom file matches def _check_domain_file(self, domain_file): - """ + ''' Confirm that the dom files match in the parameter and domain files - """ + ''' input_file = os.path.split(domain_file)[1] log.info('domain_file: %s', input_file) log.info('Parameter RvicDomainFile: %s', self.RvicDomainFile) @@ -171,7 +175,7 @@ def _check_domain_file(self, domain_file): # ---------------------------------------------------------------- # def set_domain(self, dom_data, domain, lat0_is_min): - """ Set the domain size """ + ''' Set the domain size ''' self._check_domain_file(domain['FILE_NAME']) self.domain_shape = dom_data[domain['LAND_MASK_VAR']].shape @@ -195,9 +199,9 @@ def set_domain(self, dom_data, domain, lat0_is_min): # ---------------------------------------------------------------- # # Flip the y index order def _flip_y_inds(self): - """ + ''' Flip the y index order - """ + ''' self.source_y_ind = self.ysize - self.source_y_ind - 1 self.outlet_y_ind = self.ysize - self.outlet_y_ind - 1 # ---------------------------------------------------------------- # @@ -211,9 +215,10 @@ def init_state(self, state_file, run_type, timestamp): for tracer in RVIC_TRACERS: self.ring[tracer] = f.variables['{0}_ring'.format(tracer)][:] - file_timestamp = ord_to_datetime(f.variables['time'][:], - f.variables['time'].units, - calendar=f.variables['time'].calendar) + file_timestamp = ord_to_datetime( + f.variables['time'][:], + f.variables['time'].units, + calendar=f.variables['time'].calendar) if run_type == 'restart': self.timestamp = file_timestamp @@ -257,13 +262,13 @@ def init_state(self, state_file, run_type, timestamp): # ---------------------------------------------------------------- # # Convolve def convolve(self, aggrunin, time_ord): - """ + ''' This convoluition funciton works by looping over all points and doing the convolution one timestep at a time. This is accomplished by creating a convolution ring. Contributing flow from each timestep is added to the convolution ring. The convolution ring is saved as the state. The first column of values in the ring are the current runoff. - """ + ''' # ------------------------------------------------------------ # # Check that the time_ord is in sync # This is the time at the start of the current step (end of last step) @@ -301,9 +306,9 @@ def convolve(self, aggrunin, time_ord): self.source_y_ind, self.source_x_ind, self.source_time_offset, - self.unit_hydrograph[tracer][:, :], + self.unit_hydrograph[tracer], aggrunin[tracer], - self.ring[tracer][:, :]) + self.ring[tracer]) # -------------------------------------------------------- # # ------------------------------------------------------------ # @@ -319,22 +324,22 @@ def convolve(self, aggrunin, time_ord): # ---------------------------------------------------------------- # def get_time_mode(self, cpl_secs_per_step): - """ + ''' Determine the relationship between the coupling period and the unit- hydrograph period. In cases where they do not match, the model will aggregate the appropriate quantities before/after the confolution step. - """ + ''' log.info('Coupling Timestep is (seconds): %s', cpl_secs_per_step) log.info('RVIC Timestep is (seconds): %s', self.unit_hydrograph_dt) if (self.unit_hydrograph_dt % cpl_secs_per_step == 0) and \ (self.unit_hydrograph_dt >= cpl_secs_per_step): - self.agg_tsteps = self.unit_hydrograph_dt/cpl_secs_per_step + self.agg_tsteps = self.unit_hydrograph_dt / cpl_secs_per_step else: log.error('unit_hydrograph_dt must be a multiple of the ' 'cpl_secs_per_step') - raise ValueError("Stopped due to error in determining agg_tsteps") + raise ValueError('Stopped due to error in determining agg_tsteps') log.info('RVIC will run 1 time for every %i coupling periods', self.agg_tsteps) @@ -342,7 +347,7 @@ def get_time_mode(self, cpl_secs_per_step): # ---------------------------------------------------------------- # def get_rof(self): - """Extract the current rof""" + '''Extract the current rof''' rof = {} for tracer in RVIC_TRACERS: rof[tracer] = self.ring[tracer][0, :] @@ -351,7 +356,7 @@ def get_rof(self): # ---------------------------------------------------------------- # def get_storage(self): - """Extract the current storage""" + '''Extract the current storage''' storage = {} for tracer in RVIC_TRACERS: storage[tracer] = self.ring[tracer].sum(axis=1) @@ -360,7 +365,7 @@ def get_storage(self): # ---------------------------------------------------------------- # def write_initial(self): - """write initial flux""" + '''write initial flux''' pass # ---------------------------------------------------------------- # @@ -382,7 +387,7 @@ def write_restart(self, current_history_files, history_restart_files): time = f.createVariable('time', NC_DOUBLE, ('time',), **self.ncvaropts) time[:] = date2num(self.timestamp, TIMEUNITS, calendar=self._calendar) - for key, val in share.time.__dict__.iteritems(): + for key, val in iteritems(share.time): if val: setattr(time, key, val) time.calendar = self._calendar @@ -393,7 +398,7 @@ def write_restart(self, current_history_files, history_restart_files): **self.ncvaropts) timesteps[:] = np.arange(self.full_time_length) - for key, val in share.timesteps.__dict__.iteritems(): + for key, val in iteritems(share.timesteps): if val: setattr(timesteps, key, val) timesteps.timestep_length = 'unit_hydrograph_dt' @@ -402,14 +407,14 @@ def write_restart(self, current_history_files, history_restart_files): unit_hydrograph_dt = f.createVariable('unit_hydrograph_dt', NC_DOUBLE, (), **self.ncvaropts) unit_hydrograph_dt[:] = self.unit_hydrograph_dt - for key, val in share.unit_hydrograph_dt.__dict__.iteritems(): + for key, val in iteritems(share.unit_hydrograph_dt): if val: setattr(unit_hydrograph_dt, key, val) timemgr_rst_type = f.createVariable('timemgr_rst_type', NC_DOUBLE, (), **self.ncvaropts) timemgr_rst_type[:] = self._calendar_key - for key, val in share.timemgr_rst_type.__dict__.iteritems(): + for key, val in iteritems(share.timemgr_rst_type): if val: setattr(timemgr_rst_type, key, val) @@ -417,16 +422,16 @@ def write_restart(self, current_history_files, history_restart_files): NC_DOUBLE, (), **self.ncvaropts) timemgr_rst_step_sec[:] = self.unit_hydrograph_dt - for key, val in share.timemgr_rst_step_sec.__dict__.iteritems(): + for key, val in iteritems(share.timemgr_rst_step_sec): if val: setattr(timemgr_rst_step_sec, key, val) timemgr_rst_start_ymd = f.createVariable('timemgr_rst_start_ymd', NC_DOUBLE, (), **self.ncvaropts) - timemgr_rst_start_ymd[:] = self._start_date.year*10000 \ - + self._start_date.month*100 + self._start_date.day - for key, val in share.timemgr_rst_start_ymd.__dict__.iteritems(): + timemgr_rst_start_ymd[:] = self._start_date.year * 10000 \ + + self._start_date.month * 100 + self._start_date.day + for key, val in iteritems(share.timemgr_rst_start_ymd): if val: setattr(timemgr_rst_start_ymd, key, val) @@ -434,7 +439,7 @@ def write_restart(self, current_history_files, history_restart_files): NC_DOUBLE, (), **self.ncvaropts) timemgr_rst_start_tod[:] = (self._start_ord % 1) * SECSPERDAY - for key, val in share.timemgr_rst_start_tod.__dict__.iteritems(): + for key, val in iteritems(share.timemgr_rst_start_tod): if val: setattr(timemgr_rst_start_tod, key, val) @@ -442,7 +447,7 @@ def write_restart(self, current_history_files, history_restart_files): NC_DOUBLE, (), **self.ncvaropts) timemgr_rst_ref_ymd[:] = REFERENCE_DATE - for key, val in share.timemgr_rst_ref_ymd.__dict__.iteritems(): + for key, val in iteritems(share.timemgr_rst_ref_ymd): if val: setattr(timemgr_rst_ref_ymd, key, val) @@ -450,55 +455,62 @@ def write_restart(self, current_history_files, history_restart_files): NC_DOUBLE, (), **self.ncvaropts) timemgr_rst_ref_tod[:] = REFERENCE_TIME - for key, val in share.timemgr_rst_ref_tod.__dict__.iteritems(): + for key, val in iteritems(share.timemgr_rst_ref_tod): if val: setattr(timemgr_rst_ref_tod, key, val) timemgr_rst_curr_ymd = f.createVariable('timemgr_rst_curr_ymd', NC_DOUBLE, (), **self.ncvaropts) - timemgr_rst_curr_ymd[:] = self.timestamp.year*10000 + \ - self.timestamp.month*100+self.timestamp.day - for key, val in share.timemgr_rst_curr_ymd.__dict__.iteritems(): + timemgr_rst_curr_ymd[:] = self.timestamp.year * 10000 + \ + self.timestamp.month * 100 + self.timestamp.day + for key, val in iteritems(share.timemgr_rst_curr_ymd): if val: setattr(timemgr_rst_curr_ymd, key, val) timemgr_rst_curr_tod = f.createVariable('timemgr_rst_curr_tod', NC_DOUBLE, (), **self.ncvaropts) - timemgr_rst_curr_tod[:] = (self.time_ord % 1)*SECSPERDAY - for key, val in share.timemgr_rst_curr_tod.__dict__.iteritems(): + timemgr_rst_curr_tod[:] = (self.time_ord % 1) * SECSPERDAY + for key, val in iteritems(share.timemgr_rst_curr_tod): if val: setattr(timemgr_rst_curr_tod, key, val) # ------------------------------------------------------------ # # Setup Tape Dimensions coords = ('tapes', 'max_chars') - ntapes = f.createDimension(coords[0], len(history_restart_files)) - ntapes = f.createDimension(coords[1], MAX_NC_CHARS) + f.createDimension(coords[0], len(history_restart_files)) + f.createDimension(coords[1], MAX_NC_CHARS) # ------------------------------------------------------------ # # ------------------------------------------------------------ # # Write Fields locfnh = f.createVariable('locfnh', NC_CHAR, coords, **self.ncvaropts) + print('locfnh.shape', locfnh.shape) for i, string in enumerate(current_history_files): - locfnh[i, :] = stringtochar(np.array(string.ljust(MAX_NC_CHARS))) + b_string = string.encode() + assert type(b_string) == bytes + locfnh[i, :] = stringtochar(np.array(b_string.ljust(MAX_NC_CHARS))) locfnh.long_name = 'History filename' - locfnh.comment = 'This variable is NOT needed for startup or branch simulations' + locfnh.comment = 'This variable is NOT needed for startup or branch '\ + 'simulations' locfnhr = f.createVariable('locfnhr', NC_CHAR, coords, **self.ncvaropts) for i, string in enumerate(history_restart_files): - locfnh[i, :] = stringtochar(np.array(string.ljust(MAX_NC_CHARS))) + b_string = string.encode() + assert type(b_string) == bytes + locfnh[i, :] = stringtochar(np.array(b_string.ljust(MAX_NC_CHARS))) locfnhr.long_name = 'History restart filename' - locfnhr.comment = 'This variable NOT needed for startup or branch simulations' + locfnhr.comment = 'This variable NOT needed for startup or branch '\ + 'simulations' # ------------------------------------------------------------ # # ------------------------------------------------------------ # # Setup Point Dimensions coords = ('outlets', ) - outlets = f.createDimension(coords[0], self.n_outlets) + f.createDimension(coords[0], self.n_outlets) # ------------------------------------------------------------ # # ------------------------------------------------------------ # @@ -506,21 +518,21 @@ def write_restart(self, current_history_files, history_restart_files): oyi = f.createVariable('outlet_y_ind', NC_INT, coords[0], **self.ncvaropts) oyi[:] = self.outlet_y_ind - for key, val in share.outlet_y_ind.__dict__.iteritems(): + for key, val in iteritems(share.outlet_y_ind): if val: setattr(oyi, key, val) oxi = f.createVariable('outlet_x_ind', NC_INT, coords[0], **self.ncvaropts) oxi[:] = self.outlet_x_ind - for key, val in share.outlet_x_ind.__dict__.iteritems(): + for key, val in iteritems(share.outlet_x_ind): if val: setattr(oxi, key, val) odi = f.createVariable('outlet_decomp_ind', NC_INT, coords[0], **self.ncvaropts) odi[:] = self.outlet_decomp_ind - for key, val in share.outlet_decomp_ind.__dict__.iteritems(): + for key, val in iteritems(share.outlet_decomp_ind): if val: setattr(odi, key, val) @@ -531,7 +543,7 @@ def write_restart(self, current_history_files, history_restart_files): NC_DOUBLE, tcoords, **self.ncvaropts) ring[:, :] = self.ring[tracer][:, :] - for key, val in share.ring.__dict__.iteritems(): + for key, val in iteritems(share.ring): if val: setattr(ring, key, val) # ------------------------------------------------------------ # @@ -540,7 +552,7 @@ def write_restart(self, current_history_files, history_restart_files): # write global attributes self.glob_atts.update() - for key, val in self.glob_atts.atts.iteritems(): + for key, val in iteritems(self.glob_atts.atts): if val: setattr(f, key, val) # ------------------------------------------------------------ # diff --git a/rvic/core/write.py b/rvic/core/write.py index 0853819..1913452 100644 --- a/rvic/core/write.py +++ b/rvic/core/write.py @@ -1,14 +1,15 @@ -""" +# -*- coding: utf-8 -*- +''' write.py -""" +''' import numpy as np from netCDF4 import Dataset, stringtochar from logging import getLogger -from log import LOG_NAME -from share import NC_DOUBLE, NC_INT, NC_CHAR, FILLVALUE_F, NcGlobals -import share - +from .log import LOG_NAME +from .share import NC_DOUBLE, NC_INT, NC_CHAR, FILLVALUE_F, NcGlobals +from .pycompat import iteritems +from . import share # -------------------------------------------------------------------- # # create logger log = getLogger(LOG_NAME) @@ -17,13 +18,13 @@ # -------------------------------------------------------------------- # # Write the agg netcdf -def write_agg_netcdf(file_name, agg_data, glob_atts, format, zlib=True, +def write_agg_netcdf(file_name, agg_data, glob_atts, nc_format, zlib=True, complevel=4, least_significant_digit=None): - """ + ''' Write output to netCDF. Writes out a netCDF4 data file containing the UH_S and fractions and a full set of history and description attributes. - """ + ''' # ---------------------------------------------------------------- # # netCDF variable options ncvaropts = {'zlib': zlib, @@ -33,15 +34,15 @@ def write_agg_netcdf(file_name, agg_data, glob_atts, format, zlib=True, # ---------------------------------------------------------------- # # Open file - f = Dataset(file_name, 'w', format=format) - log.info('writing aggregated netcdf: %s' % file_name) + f = Dataset(file_name, 'w', format=nc_format) + log.info('writing aggregated netcdf: %s', file_name) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # set dimensions - timesteps = f.createDimension('timesteps', None) - lon = f.createDimension('lon', (len(agg_data['lon']))) - lat = f.createDimension('lat', (len(agg_data['lat']))) + f.createDimension('timesteps', None) + f.createDimension('lon', (len(agg_data['lon']))) + f.createDimension('lat', (len(agg_data['lat']))) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # @@ -49,40 +50,40 @@ def write_agg_netcdf(file_name, agg_data, glob_atts, format, zlib=True, unit_hydrograph_dt = f.createVariable('unit_hydrograph_dt', NC_INT, (), **ncvaropts) unit_hydrograph_dt[:] = agg_data['unit_hydrograph_dt'] - for key, val in share.unit_hydrograph_dt.__dict__.iteritems(): + for key, val in iteritems(share.unit_hydrograph_dt): if val: - setattr(unit_hydrograph_dt, key, val) + setattr(unit_hydrograph_dt, key, val.encode()) timesteps = f.createVariable('timesteps', NC_INT, ('timesteps',), **ncvaropts) - timesteps[:] = agg_data['timesteps'] - for key, val in share.timesteps.__dict__.iteritems(): + timesteps[:] = np.arange(agg_data['unit_hydrograph'].shape[0]) + for key, val in iteritems(share.timesteps): if val: - setattr(timesteps, key, val) + setattr(timesteps, key, val.encode()) lon = f.createVariable('lon', NC_DOUBLE, ('lon',), **ncvaropts) - for key, val in share.lon.__dict__.iteritems(): + for key, val in iteritems(share.lon): if val: - setattr(lon, key, val) + setattr(lon, key, val.encode()) lat = f.createVariable('lat', NC_DOUBLE, ('lat',), **ncvaropts) - for key, val in share.lat.__dict__.iteritems(): + for key, val in iteritems(share.lat): if val: - setattr(lat, key, val) + setattr(lat, key, val.encode()) fraction = f.createVariable('fraction', NC_DOUBLE, ('lat', 'lon',), **ncvaropts) - for key, val in share.fraction.__dict__.iteritems(): + for key, val in iteritems(share.fraction): if val: - setattr(fraction, key, val) + setattr(fraction, key, val.encode()) unit_hydrographs = f.createVariable('unit_hydrograph', NC_DOUBLE, ('timesteps', 'lat', 'lon',), fill_value=FILLVALUE_F, **ncvaropts) - for key, val in share.unit_hydrograph.__dict__.iteritems(): + for key, val in iteritems(share.unit_hydrograph): if val: - setattr(unit_hydrographs, key, val) + setattr(unit_hydrographs, key, val.encode()) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # @@ -95,9 +96,9 @@ def write_agg_netcdf(file_name, agg_data, glob_atts, format, zlib=True, # ---------------------------------------------------------------- # # write globals - for key, val in glob_atts.atts.iteritems(): + for key, val in iteritems(glob_atts.atts): if val: - setattr(f, key, val) + setattr(f, key, val.encode()) # ---------------------------------------------------------------- # f.close() @@ -129,13 +130,12 @@ def write_param_file(file_name, source_decomp_ind=None, source_time_offset=None, source2outlet_ind=None, - source_tracer=None, unit_hydrograph=None, zlib=True, complevel=4, least_significant_digit=None): - """ Write a standard RVIC Parameter file """ + '''Write a standard RVIC Parameter file ''' # ---------------------------------------------------------------- # # netCDF variable options @@ -157,20 +157,20 @@ def write_param_file(file_name, timesteps = f.createVariable('timesteps', NC_DOUBLE, ('timesteps',), **ncvaropts) timesteps[:] = np.arange(subset_length) - for key, val in share.timesteps.__dict__.iteritems(): + for key, val in iteritems(share.timesteps): if val: - setattr(timesteps, key, val) - timesteps.timestep_length = 'unit_hydrograph_dt' + setattr(timesteps, key, val.encode()) + timesteps.timestep_length = b'unit_hydrograph_dt' # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # write global attributes glob_atts.update() - for key, val in glob_atts.atts.iteritems(): + for key, val in iteritems(glob_atts.atts): if val: - setattr(f, key, val) - f.featureType = "timeSeries" + setattr(f, key, val.encode()) + f.featureType = b'timeSeries' # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # @@ -179,23 +179,23 @@ def write_param_file(file_name, # Full time length (size of ring) ftl = f.createVariable('full_time_length', NC_INT, (), **ncvaropts) ftl[:] = full_time_length - for key, val in share.full_time_length.__dict__.iteritems(): + for key, val in iteritems(share.full_time_length): if val: - setattr(ftl, key, val) + setattr(ftl, key, val.encode()) # Subset Length sl = f.createVariable('subset_length', NC_INT, (), **ncvaropts) sl[:] = subset_length - for key, val in share.subset_length.__dict__.iteritems(): + for key, val in iteritems(share.subset_length): if val: - setattr(sl, key, val) + setattr(sl, key, val.encode()) # UH timestep uh_dt = f.createVariable('unit_hydrograph_dt', NC_DOUBLE, (), **ncvaropts) uh_dt[:] = unit_hydrograph_dt - for key, val in share.unit_hydrograph_dt.__dict__.iteritems(): + for key, val in iteritems(share.unit_hydrograph_dt): if val: - setattr(uh_dt, key, val) + setattr(uh_dt, key, val.encode()) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # @@ -206,11 +206,11 @@ def write_param_file(file_name, else: numoutlets = len(outlet_lon) ocoords = ('outlets',) - outlets = f.createDimension(ocoords[0], numoutlets) + f.createDimension(ocoords[0], numoutlets) nocoords = ocoords + ('nc_chars',) char_names = stringtochar(outlet_name) - chars = f.createDimension(nocoords[1], char_names.shape[1]) + f.createDimension(nocoords[1], char_names.shape[1]) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # @@ -219,80 +219,80 @@ def write_param_file(file_name, # Outlet Cell Longitudes olon = f.createVariable('outlet_lon', NC_DOUBLE, ocoords, **ncvaropts) olon[:] = outlet_lon - for key, val in share.outlet_lon.__dict__.iteritems(): + for key, val in iteritems(share.outlet_lon): if val: - setattr(olon, key, val) + setattr(olon, key, val.encode()) # Outlet Cell Latitudes olat = f.createVariable('outlet_lat', NC_DOUBLE, ocoords, **ncvaropts) olat[:] = outlet_lat - for key, val in share.outlet_lat.__dict__.iteritems(): + for key, val in iteritems(share.outlet_lat): if val: - setattr(olat, key, val) + setattr(olat, key, val.encode()) # Outlet Cell X Indicies oxi = f.createVariable('outlet_x_ind', NC_INT, ocoords, **ncvaropts) oxi[:] = outlet_x_ind - for key, val in share.outlet_x_ind.__dict__.iteritems(): + for key, val in iteritems(share.outlet_x_ind): if val: - setattr(oxi, key, val) + setattr(oxi, key, val.encode()) # Outlet Cell Y Indicies oyi = f.createVariable('outlet_y_ind', NC_INT, ocoords, **ncvaropts) oyi[:] = outlet_y_ind - for key, val in share.outlet_y_ind.__dict__.iteritems(): + for key, val in iteritems(share.outlet_y_ind): if val: - setattr(oyi, key, val) + setattr(oyi, key, val.encode()) # Outlet Cell Decomp IDs odi = f.createVariable('outlet_decomp_ind', NC_INT, ocoords, **ncvaropts) odi[:] = outlet_decomp_ind - for key, val in share.outlet_decomp_ind.__dict__.iteritems(): + for key, val in iteritems(share.outlet_decomp_ind): if val: - setattr(odi, key, val) + setattr(odi, key, val.encode()) # Outlet Cell Number on = f.createVariable('outlet_number', NC_INT, ocoords, **ncvaropts) on[:] = outlet_number - for key, val in share.outlet_number.__dict__.iteritems(): + for key, val in iteritems(share.outlet_number): if val: - setattr(on, key, val) + setattr(on, key, val.encode()) # Outlet Mask om = f.createVariable('outlet_mask', NC_INT, ocoords, **ncvaropts) om[:] = outlet_mask - for key, val in share.outlet_mask.__dict__.iteritems(): + for key, val in iteritems(share.outlet_mask): if val: - setattr(om, key, val) + setattr(om, key, val.encode()) # Outlet Upstream area oua = f.createVariable('outlet_upstream_area', NC_DOUBLE, ocoords, **ncvaropts) oua[:] = outlet_upstream_area - for key, val in share.outlet_upstream_area.__dict__.iteritems(): + for key, val in iteritems(share.outlet_upstream_area): if val: - setattr(oua, key, val) + setattr(oua, key, val.encode()) # Outlet Upstream grid cells oug = f.createVariable('outlet_upstream_gridcells', NC_INT, ocoords, **ncvaropts) oug[:] = outlet_upstream_gridcells - for key, val in share.outlet_upstream_gridcells.__dict__.iteritems(): + for key, val in iteritems(share.outlet_upstream_gridcells): if val: - setattr(oug, key, val) + setattr(oug, key, val.encode()) # Outlet Names onm = f.createVariable('outlet_name', NC_CHAR, nocoords) onm[:, :] = char_names - for key, val in share.outlet_name.__dict__.iteritems(): + for key, val in iteritems(share.outlet_name): if val: - setattr(onm, key, val) + setattr(onm, key, val.encode()) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Source Dimension scoords = ('sources',) - sources = f.createDimension(scoords[0], len(source_lon)) + f.createDimension(scoords[0], len(source_lon)) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # @@ -301,69 +301,69 @@ def write_param_file(file_name, # Source Cell Longitudes slon = f.createVariable('source_lon', NC_DOUBLE, scoords, **ncvaropts) slon[:] = source_lon - for key, val in share.source_lon.__dict__.iteritems(): + for key, val in iteritems(share.source_lon): if val: - setattr(slon, key, val) + setattr(slon, key, val.encode()) # Source Cell Latitudes slat = f.createVariable('source_lat', NC_DOUBLE, scoords, **ncvaropts) slat[:] = source_lat - for key, val in share.source_lat.__dict__.iteritems(): + for key, val in iteritems(share.source_lat): if val: - setattr(slat, key, val) + setattr(slat, key, val.encode()) # Source Cell X Indicies sxi = f.createVariable('source_x_ind', NC_INT, scoords, **ncvaropts) sxi[:] = source_x_ind - for key, val in share.source_x_ind.__dict__.iteritems(): + for key, val in iteritems(share.source_x_ind): if val: - setattr(sxi, key, val) + setattr(sxi, key, val.encode()) # Source Cell Y Indicies syi = f.createVariable('source_y_ind', NC_INT, scoords, **ncvaropts) syi[:] = source_y_ind - for key, val in share.source_y_ind.__dict__.iteritems(): + for key, val in iteritems(share.source_y_ind): if val: - setattr(syi, key, val) + setattr(syi, key, val.encode()) # Source Cell Decomp IDs sdi = f.createVariable('source_decomp_ind', NC_INT, scoords, **ncvaropts) sdi[:] = source_decomp_ind - for key, val in share.source_decomp_ind.__dict__.iteritems(): + for key, val in iteritems(share.source_decomp_ind): if val: - setattr(sdi, key, val) + setattr(sdi, key, val.encode()) # Source Cell Time Offset sto = f.createVariable('source_time_offset', NC_INT, scoords, **ncvaropts) sto[:] = source_time_offset - for key, val in share.source_time_offset.__dict__.iteritems(): + for key, val in iteritems(share.source_time_offset): if val: - setattr(sto, key, val) + setattr(sto, key, val.encode()) # Source to Outlet Index Mapping s2o = f.createVariable('source2outlet_ind', NC_INT, scoords, **ncvaropts) s2o[:] = source2outlet_ind - for key, val in share.source2outlet_ind.__dict__.iteritems(): + for key, val in iteritems(share.source2outlet_ind): if val: - setattr(s2o, key, val) + setattr(s2o, key, val.encode()) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # 3-D Source Variables - uhcords = ('timesteps',) + scoords +('tracers',) - tracers = f.createDimension(uhcords[2], 1) + uhcords = ('timesteps',) + scoords + ('tracers',) + f.createDimension(uhcords[2], 1) # Unit Hydrographs uhs = f.createVariable('unit_hydrograph', NC_DOUBLE, uhcords, **ncvaropts) uhs[:, :] = unit_hydrograph - for key, val in share.unit_hydrograph.__dict__.iteritems(): + for key, val in iteritems(share.unit_hydrograph): if val: - setattr(uhs, key, val) + setattr(uhs, key, val.encode()) # ---------------------------------------------------------------- # f.close() - log.info('Finished writing %s' % file_name) + log.info('Finished writing %s', file_name) # ---------------------------------------------------------------- # # -------------------------------------------------------------------- # diff --git a/rvic/parameters.py b/rvic/parameters.py index a1e4bde..3123bc9 100644 --- a/rvic/parameters.py +++ b/rvic/parameters.py @@ -1,35 +1,49 @@ -""" +# -*- coding: utf-8 -*- +''' RVIC parameter file development driver -""" +''' import os import numpy as np import pandas as pd -from collections import OrderedDict from logging import getLogger -from core.log import init_logger, close_logger, LOG_NAME -from core.mpi import LoggingPool -from core.utilities import make_directories, copy_inputs, strip_invalid_char -from core.utilities import read_netcdf, tar_inputs, latlon2yx -from core.utilities import check_ncvars, clean_file, read_domain -from core.utilities import search_for_channel -from core.aggregate import make_agg_pairs, aggregate -from core.make_uh import rout -from core.share import NcGlobals -from core.write import write_agg_netcdf -from core.variables import Point -from core.param_file import finish_params -from core.config import read_config +from .core.log import init_logger, close_logger, LOG_NAME +from .core.multi_proc import error +from .core.utilities import make_directories, copy_inputs, strip_invalid_char +from .core.utilities import read_netcdf, tar_inputs, latlon2yx +from .core.utilities import check_ncvars, clean_file, read_domain +from .core.utilities import search_for_channel +from .core.aggregate import make_agg_pairs, aggregate +from .core.make_uh import rout +from .core.share import NcGlobals +from .core.write import write_agg_netcdf +from .core.variables import Point +from .core.param_file import finish_params +from .core.config import read_config +from .core.pycompat import OrderedDict, iteritems, pyrange, basestring try: - from core.remap import remap + from .core.remap import remap remap_available = True -except: +except ImportError: remap_available = False +# global multiprocessing results container +results = {} + # -------------------------------------------------------------------- # # Top level driver def parameters(config_file, numofproc=1): + ''' + Top level function for RVIC parameter generation function. + + Parameters + ---------- + config_file : str + Path to RVIC parameters configuration file. + numofproc : int + Number of processors to use when developing RVIC parameters. + ''' # ---------------------------------------------------------------- # # Initilize @@ -45,23 +59,37 @@ def parameters(config_file, numofproc=1): # ---------------------------------------------------------------- # # Run if numofproc > 1: - pool = LoggingPool(processes=numofproc) - - for i, (cell_id, outlet) in enumerate(outlets.iteritems()): - log.info('On Outlet #{0} of {1}'.format(i+1, len(outlets))) - pool.apply_async(gen_uh_run, - args=(uh_box, fdr_data, fdr_vatts, dom_data, - outlet, config_dict, directories), - callback=store_result) + from multiprocessing import Pool + pool = Pool(processes=numofproc) + status = [] + + for i, (key, outlet) in enumerate(iteritems(outlets)): + log.info('On Outlet #%s of %s', i + 1, len(outlets)) + stat = pool.apply_async(gen_uh_run, + (uh_box, fdr_data, fdr_vatts, + dom_data, outlet, config_dict, + directories), + callback=store_result, + error_callback=error) + # Store the result + status.append(stat) + + # Close the pool pool.close() + + # Check that everything worked + [stat.get() for stat in status] + pool.join() - outlets = OrderedDict(sorted(results.items(), key=lambda t: t[0])) + outlets = OrderedDict(sorted(results.items(), reverse=True)) else: - for name, outlet in outlets.iteritems(): + for i, (key, outlet) in enumerate(iteritems(outlets)): outlet = gen_uh_run(uh_box, fdr_data, fdr_vatts, dom_data, outlet, config_dict, directories) + if not outlets: + raise ValueError('outlets in parameters are empty') # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # @@ -73,7 +101,40 @@ def parameters(config_file, numofproc=1): def gen_uh_init(config_file): - """Initialize RVIC parameter""" + '''Initialize RVIC parameters script. + + This function: + - Reads the configuration file + - Sets up the RVIC case directories + - Copies all input files to the case directory + - Initializes the logging + - Reads the pour-points, uh-box, FDR, and domain files + - Aggregates pour points into outlet grid cells + + Parameters + ---------- + config_file : str + Path to RVIC parameters configuration file. + + Returns + ---------- + uh_box : numpy.ndarray + UH-box array. + fdr_data : dict + Dictionary of arrays of flow direction, velocity, diffusion, etc. + This dictionary includes all the variables from the FDR netCDF file. + fdr_vatts : dict + Dictionary of attributes from the FDR netCDF file. + dom_data : dict + Dictionary of arrays of mask, fraction, lats, lons, etc. + This dictionary includes all the variables from the domain netCDF file. + outlets : dict + Dictionary of outlet `Point` objects. + config_dict : dict + Dictionary of values from the configuration file. + directories : dict + Dictionary of directories created by this function. + ''' # ---------------------------------------------------------------- # # Read Configuration files @@ -115,22 +176,22 @@ def gen_uh_init(config_file): try: pour_points = pd.read_csv(config_dict['POUR_POINTS']['FILE_NAME'], comment='#') - log.info('Opened Pour Points File: ' - '{0}'.format(config_dict['POUR_POINTS']['FILE_NAME'])) - if not (all(x in pour_points.keys() for x in ['lons', 'lats']) or - all(x in pour_points.keys() for x in ['x', 'y'])): + log.info('Opened Pour Points File: %s', + config_dict['POUR_POINTS']['FILE_NAME']) + if not (all(x in list(pour_points.keys()) for x in ['lons', 'lats']) or + all(x in list(pour_points.keys()) for x in ['x', 'y'])): raise ValueError('Pour Points File must include ' 'variables (lons, lats) or (x, y)') if 'names' in pour_points: pour_points.fillna(inplace=True, value='unknown') for i, name in enumerate(pour_points.names): - pour_points.names[i] = strip_invalid_char(name) + pour_points.ix[i, 'names'] = strip_invalid_char(name) pour_points.drop_duplicates(inplace=True) pour_points.dropna() except Exception as e: - log.error('Error opening pour points file: ' - '{0}'.format(config_dict['POUR_POINTS']['FILE_NAME'])) + log.error('Error opening pour points file: %s', + config_dict['POUR_POINTS']['FILE_NAME']) log.exception(e) raise # ---------------------------------------------------------------- # @@ -145,11 +206,11 @@ def gen_uh_init(config_file): skip_header=uh_header, delimiter=',', unpack=True) - log.info('Opened UHbox File: ' - '{0}'.format(config_dict['UH_BOX']['FILE_NAME'])) + log.info('Opened UHbox File: %s', + config_dict['UH_BOX']['FILE_NAME']) except: - log.exception('Error opening uh_box file: ' - '{0}'.format(config_dict['POUR_POINTS']['FILE_NAME'])) + log.exception('Error opening uh_box file: %s', + config_dict['POUR_POINTS']['FILE_NAME']) raise # ---------------------------------------------------------------- # @@ -173,9 +234,9 @@ def gen_uh_init(config_file): remove_vars = [] - for var, data in fdr_data.iteritems(): + for var, data in iteritems(fdr_data): log.debug('flipping %s', var) - if data.ndim >= 1 and var != fdr_lon: + if data.ndim >= 1 and var != fdr_lon: fdr_data[var] = np.flipud(data) elif data.ndim == 0: remove_vars.append(var) @@ -187,12 +248,14 @@ def gen_uh_init(config_file): # ---------------------------------------------------------------- # # Add velocity and/or diffusion grids if not present yet - if not type(fdr_vel) == str: - fdr_data['velocity'] = np.zeros(fdr_shape) + fdr_vel + if not isinstance(fdr_vel, basestring): + fdr_data['velocity'] = \ + np.zeros(fdr_shape, dtype=np.float64) + fdr_vel config_dict['ROUTING']['VELOCITY'] = 'velocity' log.info('Added velocity grid to fdr_data') - if not type(fdr_dif) == str: - fdr_data['diffusion'] = np.zeros(fdr_shape) + fdr_dif + if not isinstance(fdr_dif, basestring): + fdr_data['diffusion'] = \ + np.zeros(fdr_shape, dtype=np.float64) + fdr_dif config_dict['ROUTING']['DIFFUSION'] = 'diffusion' log.info('Added diffusion grid to fdr_data') if ('SOURCE_AREA_VAR' not in config_dict['ROUTING'] or @@ -201,16 +264,16 @@ def gen_uh_init(config_file): 'source area will be zero.') config_dict['ROUTING']['SOURCE_AREA_VAR'] = 'src_area' fdr_data[config_dict['ROUTING']['SOURCE_AREA_VAR']] = \ - np.zeros(fdr_shape) + np.zeros(fdr_shape, dtype=np.float64) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # fdr_data['resolution'] = np.abs(fdr_data[fdr_lon][1] - fdr_data[fdr_lon][0]) - check_ncvars(config_dict['ROUTING'], fdr_data.keys()) + check_ncvars(config_dict['ROUTING'], list(fdr_data.keys())) # ---------------------------------------------------------------- # - log.info('Opened FDR File: {0}'.format(fdr_file)) + log.info('Opened FDR File: %s', fdr_file) except: log.exception('Error opening FDR file') raise @@ -220,9 +283,8 @@ def gen_uh_init(config_file): # ---------------------------------------------------------------- # # Read domain file domain = config_dict['DOMAIN'] - dom_data, _, _ = read_domain(domain) - log.info('Opened Domain File: ' - '{0}'.format(domain['FILE_NAME'])) + dom_data = read_domain(domain)[0] + log.info('Opened Domain File: %s', domain['FILE_NAME']) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # @@ -232,6 +294,16 @@ def gen_uh_init(config_file): log.error('RVIC parameter generation requires REMAP option to be True' ' if AGGREGATE is True') raise ValueError('Invalid option') + + # If remap is False, then the resolution needs to match the routing data + if not options['REMAP']: + domain_res = np.abs(dom_data[domain['LONGITUDE_VAR']][0, 1] - + dom_data[domain['LONGITUDE_VAR']][0, 0]) + if not np.isclose(fdr_data['resolution'], domain_res): + log.error('routing grid resolution: %s', fdr_data['resolution']) + log.error('domain grid resolution: %s', domain_res) + raise ValueError('If remap is false, domain and routing grid ' + 'resolutions must match.') # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # @@ -243,13 +315,14 @@ def gen_uh_init(config_file): 'pour points and outlet grid cells') else: - outlets = {} - if all(x in pour_points.keys() for x in ['x', 'y', 'lons', 'lats']): + outlets = OrderedDict() + if all(x in list(pour_points.keys()) for x in ['x', 'y', + 'lons', 'lats']): lats = pour_points['lats'].values lons = pour_points['lons'].values routys = pour_points['y'].values routxs = pour_points['x'].values - elif all(x in pour_points.keys() for x in ['x', 'y']): + elif all(x in list(pour_points.keys()) for x in ['x', 'y']): # use x and y (assume from routing inputs grid) # find lons and lats from xs and ys routys = pour_points['y'].values @@ -257,7 +330,7 @@ def gen_uh_init(config_file): lats = fdr_data[fdr_lat][routys] lons = fdr_data[fdr_lon][routxs] else: - # use lons and lats to find xs and ys + # use and lats to find xs and ys lats = pour_points['lats'].values lons = pour_points['lons'].values @@ -270,7 +343,7 @@ def gen_uh_init(config_file): if options['SEARCH_FOR_CHANNEL']: routys, routxs = search_for_channel( fdr_data[config_dict['ROUTING']['SOURCE_AREA_VAR']], - routys, routxs, tol=10, search=2) + routys, routxs, tol=10, search=5) # update lats and lons lats = fdr_data[fdr_lat][routys] @@ -282,10 +355,10 @@ def gen_uh_init(config_file): glats=dom_data[domain['LATITUDE_VAR']], glons=dom_data[domain['LONGITUDE_VAR']]) - for i in xrange(len(lats)): - if 'names' in pour_points.keys(): + for i in pyrange(len(lats)): + if 'names' in list(pour_points.keys()): name = pour_points['names'].values[i] - name = name.replace("'", '').replace(" ", "_") + name = name.replace("'", '').replace(' ', '_') else: # fill name filed with p-outlet_num name = 'p-{0}'.format(i) @@ -297,8 +370,7 @@ def gen_uh_init(config_file): routx=routxs[i], routy=routys[i], name=name, - cell_id=dom_data['cell_ids'][domys[i], - domxs[i]]) + cell_id=dom_data['cell_ids'][domys[i], domxs[i]]) outlets[i].pour_points = [outlets[i]] # ---------------------------------------------------------------- # @@ -314,19 +386,42 @@ def gen_uh_init(config_file): def gen_uh_run(uh_box, fdr_data, fdr_vatts, dom_data, outlet, config_dict, directories): - """ - Run Genuh_run - """ + ''' + Develop unit hydrographs for one outlet `Point`. + + Parameters + ---------- + uh_box : numpy.ndarray + UH-box array. + fdr_data : dict + Dictionary of arrays of flow direction, velocity, diffusion, etc. + This dictionary includes all the variables from the FDR netCDF file. + fdr_vatts : dict + Dictionary of attributes from the FDR netCDF file. + dom_data : dict + Dictionary of arrays of mask, fraction, lats, lons, etc. + This dictionary includes all the variables from the domain netCDF file. + outlet : Point + Outlet Point + config_dict : dict + Dictionary of values from the configuration file. + directories : dict + Dictionary of directories created by gen_uh_init. + + Returns + ---------- + outlet: Point + Point object with unit hydrographs. + ''' log = getLogger(LOG_NAME) - log.info('Running outlet cell id {0}'.format(outlet.cell_id)) + log.info('Running outlet cell id %s', outlet.cell_id) agg_data = {} domain = config_dict['DOMAIN'] dom_lat = domain['LATITUDE_VAR'] dom_lon = domain['LONGITUDE_VAR'] dom_mask = domain['LAND_MASK_VAR'] - options = config_dict['OPTIONS'] # ------------------------------------------------------------ # @@ -342,10 +437,10 @@ def gen_uh_run(uh_box, fdr_data, fdr_vatts, dom_data, outlet, config_dict, # ------------------------------------------------------------ # # Loop over pour points + n_pour_points = len(outlet.pour_points) for j, pour_point in enumerate(outlet.pour_points): - log.info('On pour_point #{0} of' - ' {1}'.format(j+1, len(outlet.pour_points))) + log.info('On pour_point #%s of %s', j + 1, n_pour_points) # -------------------------------------------------------- # # Make the Unit Hydrograph Grid @@ -353,19 +448,17 @@ def gen_uh_run(uh_box, fdr_data, fdr_vatts, dom_data, outlet, config_dict, config_dict['ROUTING']) log.debug('Done routing to pour_point') - log.debug('rout_data: {0}, ' - '{1}'.format(rout_data['unit_hydrograph'].min(), - rout_data['unit_hydrograph'].max())) - log.debug('rout_data sum: {0}, ' - '{1}'.format(rout_data['unit_hydrograph'].sum(), - rout_data['fraction'].sum())) + log.debug('rout_data: %s, %s', rout_data['unit_hydrograph'].min(), + rout_data['unit_hydrograph'].max()) + log.debug('rout_data sum: %s, %s', rout_data['unit_hydrograph'].sum(), + rout_data['fraction'].sum()) # -------------------------------------------------------- # # -------------------------------------------------------- # # aggregate if options['AGGREGATE']: - if j != len(outlet.pour_points)-1: + if j != len(outlet.pour_points) - 1: agg_data = aggregate(rout_data, agg_data, res=fdr_data['resolution']) else: @@ -374,9 +467,9 @@ def gen_uh_run(uh_box, fdr_data, fdr_vatts, dom_data, outlet, config_dict, pad=options['AGG_PAD'], maskandnorm=True) - log.debug('agg_data: {0}, ' - '{1}'.format(agg_data['unit_hydrograph'].min(), - agg_data['unit_hydrograph'].max())) + log.debug('agg_data: %s, %s', + agg_data['unit_hydrograph'].min(), + agg_data['unit_hydrograph'].max()) else: agg_data = rout_data # -------------------------------------------------------- # @@ -384,45 +477,48 @@ def gen_uh_run(uh_box, fdr_data, fdr_vatts, dom_data, outlet, config_dict, # ------------------------------------------------------------ # # write temporary file #1 if options['REMAP']: - glob_atts = NcGlobals(title='RVIC Unit Hydrograph Grid File', - RvicPourPointsFile=os.path.split(config_dict['POUR_POINTS']['FILE_NAME'])[1], - RvicUHFile=os.path.split(config_dict['UH_BOX']['FILE_NAME'])[1], - RvicFdrFile=os.path.split(config_dict['ROUTING']['FILE_NAME'])[1], - RvicDomainFile=os.path.split(domain['FILE_NAME'])[1]) - - temp_file_1 = os.path.join(directories['aggregated'], - 'aggUH_{0}.nc'.format(outlet.name.replace(" ", "_"))) + glob_atts = NcGlobals( + title='RVIC Unit Hydrograph Grid File', + RvicPourPointsFile=os.path.split( + config_dict['POUR_POINTS']['FILE_NAME'])[1], + RvicUHFile=os.path.split(config_dict['UH_BOX']['FILE_NAME'])[1], + RvicFdrFile=os.path.split(config_dict['ROUTING']['FILE_NAME'])[1], + RvicDomainFile=os.path.split(domain['FILE_NAME'])[1]) + + temp_file_1 = os.path.join( + directories['aggregated'], + 'aggUH_{0}.nc'.format(outlet.name.replace(' ', '_'))) write_agg_netcdf(temp_file_1, agg_data, glob_atts, options['NETCDF_FORMAT'], **ncvaropts) # -------------------------------------------------------- # # Remap temporary file #1 to temporary file #2 - temp_file_2 = os.path.join(directories['remapped'], - 'remapUH_{0}.nc'.format(outlet.name.replace(" ", "_"))) + temp_file_2 = os.path.join( + directories['remapped'], + 'remapUH_{0}.nc'.format(outlet.name.replace(' ', '_'))) remap(domain['FILE_NAME'], temp_file_1, temp_file_2) # -------------------------------------------------------- # # Read temporary file #2 - final_data, _, _ = read_netcdf(temp_file_2, - variables=['unit_hydrograph', - 'fraction', - dom_lat]) + final_data = read_netcdf( + temp_file_2, variables=['unit_hydrograph', 'fraction', dom_lat])[0] # -------------------------------------------------------- # # Check latitude order, flip if necessary. if final_data[dom_lat].ndim == 1: if final_data[dom_lat][-1] > final_data[dom_lat][0]: - var_list = final_data.keys() + var_list = list(final_data.keys()) - log.debug('Remapped inputs came in upside down, flipping {0}' - ' now.'.format(", ".join(var_list))) + log.debug('Remapped inputs came in upside down, flipping %s', + ', '.join(var_list)) # flip lattiutude and fraction along y axis (axis 0) final_data[dom_lat] = final_data[dom_lat][::-1] final_data['fraction'] = final_data['fraction'][::-1, :] # flip unit hydrograph along y axis (axis 1) - final_data['unit_hydrograph'] = final_data['unit_hydrograph'][:, ::-1, :] + final_data['unit_hydrograph'] = \ + final_data['unit_hydrograph'][:, ::-1, :] assert dom_data['cord_lats'][0] == final_data[dom_lat][0] # -------------------------------------------------------- # @@ -433,36 +529,40 @@ def gen_uh_run(uh_box, fdr_data, fdr_vatts, dom_data, outlet, config_dict, clean_file(temp_file_2) # -------------------------------------------------------- # - # -------------------------------------------------------- # - # Set the domain index offest to zero and use the remapped fraction - # as the final fractions - y0, x0 = 0, 0 - final_fracs = final_data['fraction'] - # -------------------------------------------------------- # else: # -------------------------------------------------------- # # Put the agg data back onto the original grid - final_data = agg_data - final_fracs = np.zeros_like(fdr_data['velocity'], - dtype=np.float64) - x0 = final_data['dom_x_min'] - x1 = final_data['dom_x_max'] - y0 = final_data['dom_y_min'] - y1 = final_data['dom_y_max'] - final_fracs[y0:y1, x0:x1] = final_data['fraction'] + uh_shape = (agg_data['unit_hydrograph'].shape[0], ) + \ + dom_data[dom_mask].shape + final_data = {} + final_data['unit_hydrograph'] = np.zeros(uh_shape, dtype=np.float64) + final_data['fraction'] = np.zeros(dom_data[dom_mask].shape, + dtype=np.float64) + + bys, bxs = np.nonzero(agg_data['fraction']) + + ys = bys + outlet.domy - agg_data['basiny'] + xs = bxs + outlet.domx - agg_data['basinx'] + + if (ys < 0).any() or (xs < 0).any(): + raise ValueError('Negative indicies found when mapping ' + '`non-remapped` rout_data to domain grid.') + + final_data['unit_hydrograph'][:, ys, xs] = \ + agg_data['unit_hydrograph'][:, bys, bxs] + final_data['fraction'][ys, xs] = agg_data['fraction'][bys, bxs] # -------------------------------------------------------- # # ------------------------------------------------------------ # # ------------------------------------------------------------ # - # Add to "adjust fractions structure" - y, x = np.nonzero((final_fracs > 0.0) * (dom_data[dom_mask] == 1)) - yi = y - y0 - xi = x - x0 + # Add to 'adjust fractions structure' + y, x = np.nonzero((final_data['fraction'] > 0.) * + (dom_data[dom_mask] > np.finfo(np.float).resolution)) # From final data outlet.time = np.arange(final_data['unit_hydrograph'].shape[0]) - outlet.fractions = final_data['fraction'][yi, xi] - outlet.unit_hydrograph = final_data['unit_hydrograph'][:, yi, xi] + outlet.fractions = final_data['fraction'][y, x] + outlet.unit_hydrograph = final_data['unit_hydrograph'][:, y, x] # From domain data outlet.lon_source = dom_data[dom_lon][y, x] @@ -476,13 +576,28 @@ def gen_uh_run(uh_box, fdr_data, fdr_vatts, dom_data, outlet, config_dict, def gen_uh_final(outlets, dom_data, config_dict, directories): - """ + ''' Make the RVIC Parameter File - """ + + Parameters + ---------- + outlets : dict + Dictionary of outlet `Point` objects. + dom_data : dict + Dictionary of arrays of mask, fraction, lats, lons, etc. + This dictionary includes all the variables from the domain netCDF file. + config_dict : dict + Dictionary of values from the configuration file. + directories : dict + Dictionary of directories created by gen_uh_init. + ''' log = getLogger(LOG_NAME) log.info('In gen_uh_final') + if not len(outlets) > 0: + raise ValueError('outlets in gen_uh_final are empty') + # ---------------------------------------------------------------- # # Write the parameter file param_file, today = finish_params(outlets, dom_data, config_dict, @@ -508,8 +623,21 @@ def gen_uh_final(outlets, dom_data, config_dict, directories): # -------------------------------------------------------------------- # # store_result helper function def store_result(result): - # This is called whenever foo_pool(i) returns a result. - # result_list is modified only by the main process, not the pool workers. + ''' + Store values returned by a multiprocessing.pool member. + + This is called whenever foo_pool(i) returns a result. + result_list is modified only by the main process, not the pool workers. + + Parameters + ---------- + result : object + Result to append to the global `results` list + + Globals + ---------- + results : dict + Global results container for multiprocessing results to be appended to. + ''' results[result.cell_id] = result # -------------------------------------------------------------------- # -results = {} diff --git a/rvic/version.py b/rvic/version.py new file mode 100644 index 0000000..e284ae3 --- /dev/null +++ b/rvic/version.py @@ -0,0 +1,2 @@ +version = '1.1.0' +short_version = '1.1.0' diff --git a/config/rvic_convolution_example.cfg b/samples/configs/rvic.convolution.rasm.cfg similarity index 90% rename from config/rvic_convolution_example.cfg rename to samples/configs/rvic.convolution.rasm.cfg index e3a3ca8..ac2d35e 100644 --- a/config/rvic_convolution_example.cfg +++ b/samples/configs/rvic.convolution.rasm.cfg @@ -19,13 +19,10 @@ VERBOSE:True #-- ====================================== --# #--case run directory (char) --# -CASE_DIR : /path/to/%(CASEID)s/ - -#--RVIC tag (char) --# -RVIC_TAG : 1.0beta +CASE_DIR : ./samples/cases/%(CASEID)s/ #--case description (char) --# -CASEID : testing +CASEID : sample_rasm_convolution #--case description (char) --# CASESTR : a simple test run @@ -39,7 +36,7 @@ CALENDAR : noleap RUN_TYPE : drystart #--Run start date (yyyy-mm-dd-hh). Only used for startup and drystart runs (char) --# -RUN_STARTDATE : 1989-01-01-01 +RUN_STARTDATE : 1979-09-01-00 #-- ====================================== --# @@ -106,7 +103,7 @@ RVICHIST_OUTTYPE: grid, array RVICHIST_NCFORM: NETCDF4_CLASSIC, NETCDF4_CLASSIC #-- Output parameter file compression options --# -# Descriptions of these options can be found in +# Descriptions of these options can be found in RVICHIST_NETCDF_ZLIB: False RVICHIST_NETCDF_COMPLEVEL: 4 RVICHIST_NETCDF_SIGFIGS: None @@ -119,7 +116,7 @@ RVICHIST_UNITS: m3/s, m3/s [DOMAIN] #--rof domain file (char) --> -FILE_NAME: /path/to/domain.lnd.nc +FILE_NAME: ./samples/domains/domain.lnd.wr50a_ar9v4.100920.nc LONGITUDE_VAR: xc LATITUDE_VAR: yc AREA_VAR: area @@ -136,24 +133,24 @@ FILE_NAME: None [PARAM_FILE] #--rvic parameter file file (char) --> -FILE_NAME:/path/to/rvic.prm.nc +FILE_NAME: ./samples/cases/sample_rasm_parameters/params/sample_rasm_parameters.rvic.prm.wr50a.20151024.nc #-- ====================================== --# [INPUT_FORCINGS] -DATL_PATH:/path/to/ +DATL_PATH: ./samples/forcings/ # prfix.$YYYY[-$MM-[$DD[-$HH]]].nc -DATL_FILE: inputs.lnd.h.$YYYY-$MM.nc +DATL_FILE: rasm_sample_runoff.nc #--variable names (char) --> TIME_VAR: time -LATITUDE_VAR: lat -DATL_LIQ_FLDS: l2x_Flrl_rofliq +LATITUDE_VAR: yc +DATL_LIQ_FLDS: Runoff, Baseflow #--start date, date formate YYYY[-MM[-DD]] (char) --> -START:1989-01 +START: #--end date, date formate YYYY[-MM[-DD]] (char) --> -END:1989-12 +END: #-- ====================================== --# diff --git a/samples/configs/rvic.parameters.pnw.cfg b/samples/configs/rvic.parameters.pnw.cfg new file mode 100644 index 0000000..a61483a --- /dev/null +++ b/samples/configs/rvic.parameters.pnw.cfg @@ -0,0 +1,133 @@ +#-- ========================================================================== --# +#-- --# +#-- This RVIC namelist contains options and paths for the --# +#-- development of the RVIC parameter file. --# +#-- --# +#-- --# +#-- ========================================================================== --# + +# Note: namelist is parsed by the python ConfigParser module. %(Interploation) is +# supported inside [sections] only. + +[OPTIONS] +#-- ====================================== --# +#--Level to log output at (char) --# +# valid values: DEBUG, INFO, WARNING, ERROR, CRITICAL +LOG_LEVEL:DEBUG + +#--Print output to console in addition to the log file (bool) --# +# valid values: True, False +VERBOSE:True + +#--Delete temporary files, only used if REMAP=True (bool) --# +# valid values: True, False +CLEAN: False + +#--case description (char) --# +CASEID: sample_pnw_parameters + +#--routing domain grid shortname (char) --# +GRIDID: pnw + +#--case run directory (char) --# +CASE_DIR: ./samples/cases/%(CASEID)s + +#--Directory to use for temporary read/write operations (char) --# +TEMP_DIR:%(CASE_DIR)s/temp/ + +#--Remap Unit Hydrographs from [ROUTING] grid to [DOMAIN] grid (bool) --# +# valid values: True, False +REMAP:False + +#--Aggregate all [POUR_POINTS] inside each [DOMAIN] grid cell (bool) --# +# This should only be used when routing to coastal grid cells for CESM +AGGREGATE:False + +#--Size of pad to add to aggregated files prior to remapping (int) --# +AGG_PAD:25 + +#-- Output parameter file format (char) --# +# Valid Values: NETCDF3_CLASSIC, NETCDF3_64BIT, NETCDF4_CLASSIC, and NETCDF4 +# For use with CESM, NETCDF3_CLASSIC is recommended. +NETCDF_FORMAT:NETCDF3_CLASSIC + +#-- Output parameter file compression options --# +# Descriptions of these options can be found in +NETCDF_ZLIB: False +NETCDF_COMPLEVEL: 4 +NETCDF_SIGFIGS: None + +#-- Length of unit hydrograph subset in days (int) --# +SUBSET_DAYS:10 + +#-- Constrain the final unit hydrographs sum to be less than or equal to the domain fractions --# +# True when routing to coastal grid cells, else False +CONSTRAIN_FRACTIONS: False + +SEARCH_FOR_CHANNEL: True + +#-- ====================================== --# + +[POUR_POINTS] +#-- ====================================== --# +#-- Path to Pour Points File (char) --# +# A comma separated file of outlets to route to [lons, lats] - one coordinate pair per line (order not important) +# May optionally include a column [names] - which will (if not aggregating) be included in param file +FILE_NAME: ./samples/pour_points/columbia_sample_pour_points.csv + +#-- ====================================== --# + +[UH_BOX] +#-- ====================================== --# +#-- Path to UH Box File (char) --# +# This defines the unit hydrograph to rout flow to the edge of each grid cell. +# A comma separated file of [time in seconds, unit hydrograph ordinate] - one timestep per line +# The timestep should be 1hr (3600 sec) or less. +FILE_NAME: ./samples/uh_box/UH_Columbia_hourly.csv + +#-- Number of Header lines to ignore in [UH_BOX]FILE_NAME (INT) --# +HEADER_LINES = 1 +#-- ====================================== --# + +[ROUTING] +#-- ====================================== --# +#-- Path to routing inputs netcdf (char) --# +FILE_NAME: ./samples/flow_directions/pnw.RVIC.input_20140218.nc + +#-- netCDF Variable Names --# +LONGITUDE_VAR: lon +LATITUDE_VAR: lat +FLOW_DISTANCE_VAR: Flow_Distance +FLOW_DIRECTION_VAR: Flow_Direction +BASIN_ID_VAR: Basin_ID +SOURCE_AREA_VAR: Source_Area + +#-- Velocity and diffusion --# +# The velocity and diffusion parameters may either be specified as variables in +# the routing netcdf (char) or as a single value (float or int) +VELOCITY: 1 +DIFFUSION: 2000 + +#-- Output Interval --# +# Timestep of output unit hydrographs. Must be a multiple of the timestep in the UH_BOX +OUTPUT_INTERVAL:86400 + +#-- Maximum time for runoff to reach outlet (days) --# +BASIN_FLOWDAYS:50 + +#-- Maximum time for runoff to pass through a grid cell (days) --# +CELL_FLOWDAYS:2 +#-- ====================================== --# + +[DOMAIN] +#-- ====================================== --# +#-- Path to cesm complient domain file (char) --# +FILE_NAME: ./samples/domains/domain.lnd.bpa304.20140311.nc + +#-- netCDF Variable Names --# +LONGITUDE_VAR: lon +LATITUDE_VAR: lat +LAND_MASK_VAR: mask +FRACTION_VAR: frac +AREA_VAR: area +#-- ====================================== --# diff --git a/config/rvic_parameters_example.cfg b/samples/configs/rvic.parameters.rasm.cfg similarity index 89% rename from config/rvic_parameters_example.cfg rename to samples/configs/rvic.parameters.rasm.cfg index 01f514f..3877a19 100644 --- a/config/rvic_parameters_example.cfg +++ b/samples/configs/rvic.parameters.rasm.cfg @@ -24,13 +24,13 @@ VERBOSE:True CLEAN:False #--case description (char) --# -CASEID:example2 +CASEID: sample_rasm_parameters #--routing domain grid shortname (char) --# -GRIDID: some grid +GRIDID: wr50a #--case run directory (char) --# -CASE_DIR:/path/to/%(CASEID)s/ +CASE_DIR: ./samples/cases/%(CASEID)s #--Directory to use for temporary read/write operations (char) --# TEMP_DIR:%(CASE_DIR)s/temp/ @@ -48,11 +48,11 @@ AGG_PAD:25 #-- Output parameter file format (char) --# # Valid Values: NETCDF3_CLASSIC, NETCDF3_64BIT, NETCDF4_CLASSIC, and NETCDF4 -# For use with CESM, NETCDF3_CLASSIC is reccomended. +# For use with CESM, NETCDF3_CLASSIC is recommended. NETCDF_FORMAT:NETCDF3_CLASSIC #-- Output parameter file compression options --# -# Descriptions of these options can be found in +# Descriptions of these options can be found in NETCDF_ZLIB: False NETCDF_COMPLEVEL: 4 NETCDF_SIGFIGS: None @@ -62,7 +62,9 @@ SUBSET_DAYS:10 #-- Constrain the final unit hydrographs sum to be less than or equal to the domain fractions --# # True when routing to coastal grid cells, else False -CONSTRAIN_FRACTIONS:False +CONSTRAIN_FRACTIONS: True + +SEARCH_FOR_CHANNEL: False #-- ====================================== --# @@ -71,7 +73,7 @@ CONSTRAIN_FRACTIONS:False #-- Path to Pour Points File (char) --# # A comma separated file of outlets to route to [lons, lats] - one coordinate pair per line (order not important) # May optionally include a column [names] - which will (if not aggregating) be included in param file -FILE_NAME: /path/to/rout2points.csv +FILE_NAME: ./samples/pour_points/rasm_sample_pour_points.csv #-- ====================================== --# @@ -81,7 +83,7 @@ FILE_NAME: /path/to/rout2points.csv # This defines the unit hydrograph to rout flow to the edge of each grid cell. # A comma separated file of [time in seconds, unit hydrograph ordinate] - one timestep per line # The timestep should be 1hr (3600 sec) or less. -FILE_NAME: /path/to/UH_file.csv +FILE_NAME: ./samples/uh_box/UH_RASM_hourly.csv #-- Number of Header lines to ignore in [UH_BOX]FILE_NAME (INT) --# HEADER_LINES = 1 @@ -90,7 +92,7 @@ HEADER_LINES = 1 [ROUTING] #-- ====================================== --# #-- Path to routing inputs netcdf (char) --# -FILE_NAME: /path/to/routing_inputs.nc +FILE_NAME: ./samples/flow_directions/Wu_routing_inputs_060313.nc #-- netCDF Variable Names --# LONGITUDE_VAR: lon @@ -108,7 +110,7 @@ DIFFUSION: 2000 #-- Output Interval --# # Timestep of output unit hydrographs. Must be a multiple of the timestep in the UH_BOX -OUTPUT_INTERVAL:3600 +OUTPUT_INTERVAL:86400 #-- Maximum time for runoff to reach outlet (days) --# BASIN_FLOWDAYS:50 @@ -120,7 +122,7 @@ CELL_FLOWDAYS:2 [DOMAIN] #-- ====================================== --# #-- Path to cesm complient domain file (char) --# -FILE_NAME: /path/to/domain.lnd.nc +FILE_NAME: ./samples/domains/domain.lnd.wr50a_ar9v4.100920.nc #-- netCDF Variable Names --# LONGITUDE_VAR: xc diff --git a/samples/domains/domain.lnd.bpa304.20140311.nc b/samples/domains/domain.lnd.bpa304.20140311.nc new file mode 100644 index 0000000..0b656b2 --- /dev/null +++ b/samples/domains/domain.lnd.bpa304.20140311.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d80e82382b3b63e5ce85439b4f09ad6d37a8e0e9286055e45048e95420529a0e +size 1429444 diff --git a/samples/domains/domain.lnd.wr50a_ar9v4.100920.nc b/samples/domains/domain.lnd.wr50a_ar9v4.100920.nc new file mode 100644 index 0000000..650b14f --- /dev/null +++ b/samples/domains/domain.lnd.wr50a_ar9v4.100920.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8da151a4973ba9f5da754d6f9ac2f7ec0b3d3a00bfe48bcbe5903feb94c7203f +size 5639788 diff --git a/samples/flow_directions/Wu_routing_inputs_060313.nc b/samples/flow_directions/Wu_routing_inputs_060313.nc new file mode 100644 index 0000000..732480f --- /dev/null +++ b/samples/flow_directions/Wu_routing_inputs_060313.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f3aaa36c73babae9f658040b52b53ca7a1785c7a1fef562731f51259ec6d65c2 +size 9176972 diff --git a/samples/flow_directions/pnw.RVIC.input_20140218.nc b/samples/flow_directions/pnw.RVIC.input_20140218.nc new file mode 100644 index 0000000..0ae1a14 --- /dev/null +++ b/samples/flow_directions/pnw.RVIC.input_20140218.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d542a17c7699a003324df0ac59b878e0b9ff6b8f132d23dc035bf446e6155d61 +size 578248 diff --git a/samples/forcings/rasm_sample_runoff.nc b/samples/forcings/rasm_sample_runoff.nc new file mode 100644 index 0000000..20acbd3 --- /dev/null +++ b/samples/forcings/rasm_sample_runoff.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:391d25ede08bdfb9dac4375ae37d7a3f4dc02fb62bc72078a9ce52a2ebf61c91 +size 28862969 diff --git a/samples/pour_points/columbia_sample_pour_points.csv b/samples/pour_points/columbia_sample_pour_points.csv new file mode 100644 index 0000000..c76fda6 --- /dev/null +++ b/samples/pour_points/columbia_sample_pour_points.csv @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ce13e2314839c6c2a560db0381b32df6e09300f35a0f4591dc256e0690f9610e +size 518 diff --git a/samples/pour_points/rasm_sample_pour_points.csv b/samples/pour_points/rasm_sample_pour_points.csv new file mode 100644 index 0000000..2acaaab --- /dev/null +++ b/samples/pour_points/rasm_sample_pour_points.csv @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:16458c8ced7dd66fb6116a81adb42ce2742cfc4b97b5d6f6788b90ce6668e735 +size 289 diff --git a/samples/uh_box/UH_Columbia_hourly.csv b/samples/uh_box/UH_Columbia_hourly.csv new file mode 100644 index 0000000..a63327a --- /dev/null +++ b/samples/uh_box/UH_Columbia_hourly.csv @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5d71cfae383a339a5f3168ef304fc571bf9f81b49417392be3f1881c3b030def +size 881 diff --git a/samples/uh_box/UH_RASM_hourly.csv b/samples/uh_box/UH_RASM_hourly.csv new file mode 100644 index 0000000..a63327a --- /dev/null +++ b/samples/uh_box/UH_RASM_hourly.csv @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5d71cfae383a339a5f3168ef304fc571bf9f81b49417392be3f1881c3b030def +size 881 diff --git a/scripts/rvic b/scripts/rvic index b2d8f89..69ec3ea 100755 --- a/scripts/rvic +++ b/scripts/rvic @@ -41,12 +41,16 @@ def main(): if args.script == 'parameters': from rvic.parameters import parameters parameters(args.config_file, numofproc=args.numofproc) + else: + if args.numofproc > 1: + print('{0} processors were specified but script {1} only ' + 'excepts 1'.format(args.numofproc, args.script)) if args.script == 'convolution': from rvic.convolution import convolution - convolution(args.config_file, numofproc=args.numofproc) + convolution(args.config_file) if args.script == 'convert': from rvic.convert import convert - convert(args.config_file, numofproc=args.numofproc) + convert(args.config_file) else: parser.print_help() return diff --git a/setup.py b/setup.py index 23d4b22..5fec1e1 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,4 @@ - #!/usr/bin/env python +#!/usr/bin/env python import os import re import sys @@ -11,9 +11,9 @@ from distutils.extension import Extension MAJOR = 1 -MINOR = 0 -MICRO = 1 -ISRELEASED = False +MINOR = 1 +MICRO = 0 +ISRELEASED = True VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO) QUALIFIER = '' @@ -23,10 +23,10 @@ # -------------------------------------------------------------------- # def write_version_py(filename=None): - cnt = """\ + cnt = '''\ version = '%s' short_version = '%s' -""" +''' if not filename: filename = os.path.join( os.path.dirname(__file__), 'rvic', 'version.py') @@ -52,7 +52,7 @@ def write_version_py(filename=None): for cmd in ['git', 'git.cmd']: try: pipe = subprocess.Popen( - [cmd, "describe", "--always", "--match", "v[0-9]*"], + [cmd, 'describe', '--always', '--match', 'v[0-9]*'], stdout=subprocess.PIPE) (so, serr) = pipe.communicate() if pipe.returncode == 0: @@ -77,11 +77,11 @@ def write_version_py(filename=None): if sys.version_info[0] >= 3: rev = rev.decode('ascii') - if not rev.startswith('v') and re.match("[a-zA-Z0-9]{7,9}", rev): + if not rev.startswith('v') and re.match('[a-zA-Z0-9]{7,9}', rev): # partial clone, manually construct version string # this is the format before we started using git-describe # to get an ordering on dev version strings. - rev = "v%s.dev-%s" % (VERSION, rev) + rev = 'v%s.dev-%s' % (VERSION, rev) # Strip leading v from tags format "vx.y.z" to get th version string FULLVERSION = rev.lstrip('v') @@ -101,14 +101,24 @@ def write_version_py(filename=None): 'original model of Lohmann, et al., 1996, Tellus, ' '48(A), 708-721', author='Joe Hamman', - author_email='jhamman@hydro.washington.edu', + author_email='jhamman1@uw.edu', + classifiers=['Development Status :: 4 - Beta', + 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', + 'Operating System :: OS Independent', + 'Intended Audience :: Science/Research', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + 'Topic :: Scientific/Engineering'], install_requires=['scipy >= 0.13', 'numpy >= 1.8', - 'netCDF4 >= 1.0.6', 'matplotlib >= 1.3.1'], + 'netCDF4 >= 1.0.6', 'matplotlib >= 1.3.1', + 'pandas >= 0.15.1'], tests_require=['pytest >= 2.5.2'], - url='https://github.com/jhamman/RVIC', - test_suite='pytest.collector', + url='https://github.com/UW-Hydro/RVIC', packages=['rvic', 'rvic.core'], - platform=['any'], py_modules=['rvic.parameters', 'rvic.convolution', 'rvic.convert'], scripts=['scripts/rvic', 'tools/find_pour_points.py', 'tools/fraction2domain.bash'], diff --git a/tests/run_tests.py b/tests/run_tests.py index 63d3ad2..dbc46ff 100755 --- a/tests/run_tests.py +++ b/tests/run_tests.py @@ -1,43 +1,45 @@ #!/usr/bin/env python -"""RVIC command line testing interface""" +'''RVIC command line testing interface''' -from __future__ import print_function import os +import sys import textwrap import argparse import pytest import cProfile import pstats -import StringIO +import io +from rvic.core.pycompat import OrderedDict from rvic import convert, convolution, parameters from rvic.core.config import read_config +from rvic.core.pycompat import py3, iteritems if not os.environ.get('RVIC_TEST_DIR'): print('\n$RVIC_TEST_DIR not set.') - os.environ["RVIC_TEST_DIR"] = os.path.abspath(os.path.dirname(__file__)) + os.environ['RVIC_TEST_DIR'] = os.path.abspath(os.path.dirname(__file__)) print('Setting to run_tests.py dir: ' - '{0}\n'.format(os.environ["RVIC_TEST_DIR"])) + '{0}\n'.format(os.environ['RVIC_TEST_DIR'])) if not os.environ.get('WORKDIR'): print('\n$WORKDIR not set.') - os.environ["WORKDIR"] = os.environ["RVIC_TEST_DIR"] + os.environ['WORKDIR'] = os.environ['RVIC_TEST_DIR'] print('Setting to output run_tests.py dir to $WORKDIR: ' - '{0}\n'.format(os.environ["WORKDIR"])) + '{0}\n'.format(os.environ['WORKDIR'])) # -------------------------------------------------------------------- # def main(): - """ + ''' Run RVIC tests - """ + ''' # Parse arguments parser = argparse.ArgumentParser(description='Test script for RVIC') - parser.add_argument("test_set", type=str, - help="Test set to run", + parser.add_argument('test_set', type=str, + help='Test set to run', choices=['all', 'unit', 'examples'], default=['all'], nargs='+') - parser.add_argument("--examples", type=str, - help="examples configuration file", + parser.add_argument('--examples', type=str, + help='examples configuration file', default='examples/examples.cfg') args = parser.parse_args() @@ -45,16 +47,17 @@ def main(): if any(i in ['all', 'unit'] for i in args.test_set): # run unit tests - pytest.main('-x unit') + exit_code = pytest.main('-x unit') if any(i in ['all', 'examples'] for i in args.test_set): - run_examples(args.examples) + exit_code = run_examples(args.examples) + sys.exit(exit_code) return # -------------------------------------------------------------------- # # -------------------------------------------------------------------- # def run_examples(config_file): - """ Run examples from config file """ + ''' Run examples from config file ''' # ---------------------------------------------------------------- # # Read Configuration files config_dict = read_config(config_file) @@ -62,15 +65,17 @@ def run_examples(config_file): # ---------------------------------------------------------------- # # run tests - num_tests = len(config_dict.keys()) + num_tests = len(list(config_dict.keys())) - for i, (test, test_dict) in enumerate(config_dict.iteritems()): - print("".center(100, '-')) - print("Starting Test #{0} of {1}: {2}".format(i+1, num_tests, + test_outcomes = OrderedDict() + + for i, (test, test_dict) in enumerate(iteritems(config_dict)): + print(''.center(100, '-')) + print('Starting Test #{0} of {1}: {2}'.format(i + 1, num_tests, test).center(100)) - desc = textwrap.fill(", ".join(test_dict['description']), 100) - print("Description: {0}".format(desc)) - print("".center(100, '-')) + desc = textwrap.fill(', '.join(test_dict['description']), 100) + print('Description: {0}'.format(desc)) + print(''.center(100, '-')) if 'processors' in test_dict: numofproc = test_dict['processors'] @@ -81,34 +86,56 @@ def run_examples(config_file): pr.enable() if test_dict['function'] == 'convert': - convert.convert(test_dict['config_file']) + try: + convert.convert(test_dict['config_file']) + test_outcomes[test] = 'Passed' + except Exception as e: + print('Error in conversion example: {0}'.format(test)) + test_outcomes[test] = 'Failed: {0}'.format(e) elif test_dict['function'] == 'convolution': - convolution.convolution(test_dict['config_file']) + try: + convolution.convolution(test_dict['config_file']) + test_outcomes[test] = 'Passed' + except Exception as e: + print('Error in convolution example: {0}'.format(test)) + test_outcomes[test] = 'Failed: {0}'.format(e) elif test_dict['function'] == 'parameters': - parameters.parameters(test_dict['config_file'], - numofproc=numofproc) + try: + parameters.parameters(test_dict['config_file'], + numofproc=numofproc) + test_outcomes[test] = 'Passed' + except Exception as e: + print('Error in parameters example: {0}'.format(test)) + test_outcomes[test] = 'Failed: {0}'.format(e) else: - raise ValueError('Unknow function variable: ' + raise ValueError('Unknown function variable: ' '{0}'.format(test_dict['function'])) pr.disable() - s = StringIO.StringIO() + if py3: + s = io.StringIO() + else: + s = io.BytesIO() sortby = 'cumulative' ps = pstats.Stats(pr, stream=s).sort_stats(sortby) ps.print_stats() - print("".center(100, '-')) - print("Done With Test #{0} of {1}: {2}".format(i+1, num_tests, + print(''.center(100, '-')) + print('Done With Test #{0} of {1}: {2}'.format(i + 1, num_tests, test).center(100)) - print(".....Printing Profile Information.....".center(100)) - print("".center(100, '-')) + print('.....Printing Profile Information.....'.center(100)) + print(''.center(100, '-')) print(s.getvalue()) - print("".center(100, '-')) + print(''.center(100, '-')) - return + print('Done with examples...printing summary') + for test, outcome in iteritems(test_outcomes): + print('\t{0}: {1}'.format(test, outcome)) + + return 0 # -------------------------------------------------------------------- # -if __name__ == "__main__": +if __name__ == '__main__': main() # -------------------------------------------------------------------- # diff --git a/tests/unit/rvic_unit_test.py b/tests/unit/rvic_unit_test.py deleted file mode 100644 index 624f316..0000000 --- a/tests/unit/rvic_unit_test.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/local/env python -""" -rvic_unit_test.py - -Set to run with pytest - -Usage: py.test (from RVIC or test directory) -""" -import numpy as np -import sys -sys.path.append("../") - -# -------------------------------------------------------------------- # -# Unit tests for utilities.py -from rvic.core.utilities import * -from rvic.core.config import * - - -def test_config_type_int(): - assert config_type('1') == 1 - - -def test_config_type_float(): - assert config_type('1.75') == 1.75 - - -def test_config_type_bool(): - assert config_type('True') - - -def test_find_nearest(): - assert find_nearest(np.array([8, 19, 39, 100, 399]), 20) == 1 - - -def test_find_nearest_max(): - assert find_nearest(np.array([8, 19, 39, 100, 399]), 500) == 4 - - -def test_find_nearest_min(): - assert find_nearest(np.array([8, 19, 39, 100, 399]), 1) == 0 -# -------------------------------------------------------------------- # - -# -------------------------------------------------------------------- # -# Unit tests for make_uh.py -from rvic.core.make_uh import * - - -def test_find_ts(): - assert find_ts(np.array([0, 86400, 172800])) == 86400 -# -------------------------------------------------------------------- # - -# -------------------------------------------------------------------- # -# Unit tests for make_uh.py -from rvic.core.time_utility import * -from rvic.core.share import TIMEUNITS -from netCDF4 import date2num -from datetime import datetime - - -def test_ord_to_datetime(): - # Independence day - date = datetime(1776, 7, 4, 12, 0, 0, 0) - ord_time = date2num(date, TIMEUNITS) - # Independence day (note that this fails if date has microseconds != 0) - assert ord_to_datetime(ord_time, TIMEUNITS) == date -# -------------------------------------------------------------------- # diff --git a/tests/unit/test_aggregate.py b/tests/unit/test_aggregate.py new file mode 100644 index 0000000..eadef67 --- /dev/null +++ b/tests/unit/test_aggregate.py @@ -0,0 +1,84 @@ +import pytest +import numpy as np +import pandas as pd +from rvic.core.aggregate import make_agg_pairs, aggregate + + +config_dict = {} +config_dict['DOMAIN'] = {'LONGITUDE_VAR': 'LONGITUDE', + 'LATITUDE_VAR': 'LATITUDE'} +config_dict['ROUTING'] = {'LONGITUDE_VAR': 'LONGITUDE', + 'LATITUDE_VAR': 'LATITUDE', + 'SOURCE_AREA_VAR': 'SOURCE_AREA'} + + +@pytest.fixture() +def dom_data(scope='function'): + dom = {} + lons, lats = np.meshgrid(np.linspace(-10, 0., 20), np.linspace(40, 50, 20)) + dom[config_dict['DOMAIN']['LONGITUDE_VAR']] = lons + dom[config_dict['DOMAIN']['LATITUDE_VAR']] = lats + dom['cell_ids'] = np.arange(lons.size).reshape(lons.shape) + return dom + + +@pytest.fixture() +def fdr_data(scope='function'): + fdr = {} + fdr['resolution'] = 1.0 + lons, lats = np.meshgrid(np.arange(-15, 15, fdr['resolution']), + np.arange(20, 60, fdr['resolution'])) + fdr[config_dict['ROUTING']['LONGITUDE_VAR']] = lons + fdr[config_dict['ROUTING']['LATITUDE_VAR']] = lats + fdr[config_dict['ROUTING']['SOURCE_AREA_VAR']] = np.ones_like(lons) + return fdr + + +@pytest.fixture() +def in_data(fdr_data, scope='function'): + rout_data = {} + rout_data['lat'] = fdr_data[config_dict['ROUTING']['LATITUDE_VAR']] + rout_data['lon'] = fdr_data[config_dict['ROUTING']['LONGITUDE_VAR']] + shape = fdr_data[config_dict['ROUTING']['LATITUDE_VAR']].shape + rout_data['unit_hydrograph'] = np.random.random((10, ) + shape) + rout_data['fraction'] = np.ones(shape) + rout_data['unit_hydrograph_dt'] = 3600. + return rout_data + + +@pytest.fixture() +def agg_data(fdr_data, scope='function'): + rout_data = {} + rout_data['lat'] = fdr_data[config_dict['ROUTING']['LATITUDE_VAR']][:4] + rout_data['lon'] = fdr_data[config_dict['ROUTING']['LONGITUDE_VAR']][:4] + shape = rout_data['lat'].shape + rout_data['unit_hydrograph'] = np.random.random((10, ) + shape) + rout_data['fraction'] = np.ones(shape) + rout_data['unit_hydrograph_dt'] = 3600. + return rout_data + + +def test_make_agg_pairs_all_unique(dom_data, fdr_data): + pour_points = pd.DataFrame({'lons': [-2.3, 0.3, 10.4, 17.8], + 'lats': [42.1, 40.5, 49.0, 45.2]}) + outlets = make_agg_pairs(pour_points, dom_data, fdr_data, config_dict) + assert len(outlets) == len(pour_points) + + +def test_make_agg_pairs_with_overlap(dom_data, fdr_data): + pour_points = pd.DataFrame({'lons': [-2.3, 0.3, 10.4, 17.8, -2.31], + 'lats': [42.1, 40.5, 49.0, 45.2, 42.11]}) + outlets = make_agg_pairs(pour_points, dom_data, fdr_data, config_dict) + assert len(outlets) == len(pour_points) - 1 + + +def test_aggregate_no_agg_data(in_data, fdr_data): + agg_data = aggregate(in_data, {}, res=fdr_data['resolution'], + pad=0, maskandnorm=False) + assert all([k in agg_data for k in in_data]) + + +def test_aggregate_with_aggdata(in_data, agg_data, fdr_data): + agg_data = aggregate(in_data, agg_data, res=fdr_data['resolution'], + pad=0, maskandnorm=False) + assert all([k in agg_data for k in in_data]) diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py new file mode 100644 index 0000000..7da0bf8 --- /dev/null +++ b/tests/unit/test_config.py @@ -0,0 +1,25 @@ +from rvic.core.config import config_type, isfloat, isint + + +def test_config_type_int(): + assert config_type('1') == 1 + + +def test_config_type_float(): + assert config_type('1.75') == 1.75 + + +def test_config_type_bool(): + assert config_type('True') + + +def test_isfloat(): + assert isfloat(4.3) + assert isfloat('4.3') + + +def test_isint(): + assert isint(4) + assert isint('4') + assert not isint(4.3) + assert not isint('4.3') diff --git a/tests/unit/test_convolution_wrapper.py b/tests/unit/test_convolution_wrapper.py new file mode 100644 index 0000000..307f678 --- /dev/null +++ b/tests/unit/test_convolution_wrapper.py @@ -0,0 +1,109 @@ +import pytest +import numpy as np +from rvic.core.convolution_wrapper import rvic_convolve + + +def test_convolution_wraper(): + n_sources = 4 + n_outlets = 2 + time_length = 6 + ysize = 10 + xsize = 10 + source2outlet_ind = np.array([0, 0, 1, 1], dtype=np.int32) + source_y_ind = np.array([2, 4, 6, 8], dtype=np.int32) + source_x_ind = np.array([2, 6, 3, 5], dtype=np.int32) + source_time_offset = np.zeros(n_sources, dtype=np.int32) + unit_hydrograph = np.array([[0., 1., 0.5, 0.25], + [1., 0., 0.5, 0.25], + [0., 0., 0., 0.25], + [0., 0., 0., 0.25], + [0., 0., 0., 0.], + [0., 0., 0., 0.]]) + aggrunin = np.ones((ysize, xsize), dtype=np.float64) + ring = np.zeros((time_length, n_outlets)) + + rvic_convolve(n_sources, + n_outlets, + time_length, + xsize, + source2outlet_ind, + source_y_ind, + source_x_ind, + source_time_offset, + unit_hydrograph, + aggrunin, + ring) + assert ring.sum() == 4 + np.testing.assert_almost_equal(ring.sum(axis=1), + unit_hydrograph.sum(axis=1)) + np.testing.assert_almost_equal(ring.sum(axis=0), + [2, 2]) + + +def test_convolution_wraper_1_outlet(): + n_sources = 4 + n_outlets = 1 + time_length = 6 + ysize = 10 + xsize = 10 + source2outlet_ind = np.array([0, 0, 0, 0], dtype=np.int32) + source_y_ind = np.array([2, 4, 6, 8], dtype=np.int32) + source_x_ind = np.array([2, 6, 3, 5], dtype=np.int32) + source_time_offset = np.zeros(n_sources, dtype=np.int32) + unit_hydrograph = np.array([[0., 1., 0.5, 0.25], + [1., 0., 0.5, 0.25], + [0., 0., 0., 0.25], + [0., 0., 0., 0.25], + [0., 0., 0., 0.], + [0., 0., 0., 0.]]) + aggrunin = np.ones((ysize, xsize), dtype=np.float64) + ring = np.zeros((time_length, n_outlets)) + + rvic_convolve(n_sources, + n_outlets, + time_length, + xsize, + source2outlet_ind, + source_y_ind, + source_x_ind, + source_time_offset, + unit_hydrograph, + aggrunin, + ring) + assert ring.sum() == 4 + np.testing.assert_almost_equal(ring.sum(axis=1), + unit_hydrograph.sum(axis=1)) + np.testing.assert_almost_equal(ring.sum(axis=0), + [4]) + + +def test_convolution_wraper_1_source_1_outlet(): + n_sources = 1 + n_outlets = 1 + time_length = 6 + ysize = 10 + xsize = 10 + source2outlet_ind = np.array([0, 0, 1, 1], dtype=np.int32) + source_y_ind = np.array([2], dtype=np.int32) + source_x_ind = np.array([5], dtype=np.int32) + source_time_offset = np.zeros(n_sources, dtype=np.int32) + unit_hydrograph = np.array([[0., 0.25, 0.5, 0.25, 0., 0.]]).T + aggrunin = np.ones((ysize, xsize), dtype=np.float64) + ring = np.zeros((time_length, n_outlets)) + + rvic_convolve(n_sources, + n_outlets, + time_length, + xsize, + source2outlet_ind, + source_y_ind, + source_x_ind, + source_time_offset, + unit_hydrograph, + aggrunin, + ring) + assert ring.sum() == 1 + np.testing.assert_almost_equal(ring.sum(axis=1), + unit_hydrograph.sum(axis=1)) + np.testing.assert_almost_equal(ring.sum(axis=0), + [1]) diff --git a/tests/unit/test_history.py b/tests/unit/test_history.py new file mode 100644 index 0000000..9768ff6 --- /dev/null +++ b/tests/unit/test_history.py @@ -0,0 +1,19 @@ +import pytest +import os +import numpy as np +from rvic.core.variables import Rvar +from rvic.core.history import Tape + + +@pytest.fixture() +def rvar(scope='function'): + dirname = os.path.dirname(__file__) + infile = os.path.join(dirname, 'unit_test_data', + 'gunnison_parameters_01.rvic.prm.BLMSA.20150226.nc') + rv = Rvar(infile, 'test_case', 'noleap', dirname, 'NETCDF4') + return rv + + +def test_create_tape_instance(rvar): + history_tape = Tape(1.25, 'test', rvar, grid_area=np.zeros((10, 11)), + outtype='array') diff --git a/tests/unit/test_log.py b/tests/unit/test_log.py new file mode 100644 index 0000000..83c0e8e --- /dev/null +++ b/tests/unit/test_log.py @@ -0,0 +1,18 @@ +import pytest +from rvic.core.log import StreamToFile + + +def test_setup_stream(): + stream = StreamToFile() + stream.write('buf') + stream.flush() + + +def test_stream_raises_with_bad_log_level(): + with pytest.raises(TypeError): + stream = StreamToFile(log_level='junk') + stream.write('buf') + stream.flush() + + +# Cannot test logger in interactive session or using pytest diff --git a/tests/unit/test_make_uh.py b/tests/unit/test_make_uh.py new file mode 100644 index 0000000..aee9173 --- /dev/null +++ b/tests/unit/test_make_uh.py @@ -0,0 +1,183 @@ +import pytest +import numpy as np +from rvic.core.make_uh import (find_ts, read_direction, search_catchment, + make_uh, make_grid_uh_river, make_grid_uh, + adjust_uh_timestep) +from rvic.core.variables import Point + + +@pytest.fixture() +def fdr_vic(scope='function'): + a = [[0, 0, 0, 5, 5, 5, 6, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 4, 0, 4, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 5, 3, 5, 5, 6, 7, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 5, 4, 0, 5, 5, 5, 5, 6, 7, 5, 5, 5, 7, 5, 0, 0, 0, 0, 0, 0], + [0, 0, 5, 4, 0, 5, 4, 5, 5, 7, 7, 5, 6, 7, 5, 6, 0, 0, 0, 0, 0, 0], + [4, 0, 5, 7, 4, 0, 3, 5, 5, 7, 5, 5, 7, 4, 5, 6, 0, 0, 0, 0, 0, 0], + [4, 0, 3, 4, 0, 4, 0, 4, 3, 5, 6, 7, 7, 4, 5, 6, 0, 0, 0, 0, 0, 0], + [0, 4, 0, 4, 0, 3, 1, 2, 4, 0, 4, 5, 7, 5, 5, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 3, 4, 3, 1, 8, 4, 5, 4, 0, 5, 5, 3, 5, 6, 5, 0, 0, 0, 0, 0], + [0, 0, 0, 3, 4, 5, 5, 4, 5, 4, 3, 5, 6, 7, 7, 5, 5, 7, 0, 0, 0, 0], + [0, 0, 0, 4, 3, 5, 5, 3, 4, 0, 4, 3, 4, 3, 5, 5, 7, 7, 0, 0, 0, 0], + [0, 0, 0, 4, 0, 5, 4, 0, 4, 3, 4, 0, 4, 5, 3, 5, 7, 7, 0, 0, 0, 0], + [0, 0, 0, 0, 4, 0, 4, 0, 4, 0, 4, 5, 3, 6, 7, 7, 5, 5, 6, 0, 5, 7], + [0, 0, 0, 0, 0, 4, 0, 3, 4, 3, 5, 3, 6, 7, 6, 5, 7, 7, 7, 7, 7, 7], + [0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 4, 5, 7, 5, 6, 7, 1, 7, 1, 7, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 3, 4, 0, 5, 5, 6, 7, 7, 7, 6, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 4, 5, 7, 1, 7, 7, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 2, 4, 5, 7, 7, 1, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]] + return np.array(a, dtype=np.int) + + +@pytest.fixture() +def fdr_vic_small(scope='function'): + a = [[0, 0, 0, 5, 5], + [0, 0, 0, 4, 0], + [0, 0, 5, 3, 5], + [0, 0, 5, 4, 0], + [0, 0, 5, 4, 0]] + return np.array(a, dtype=np.int) + + +@pytest.fixture() +def dy_vic(scope='module'): + dy = {1: -1, 2: -1, 3: 0, 4: 1, 5: 1, 6: 1, 7: 0, 8: -1} + return dy + + +@pytest.fixture() +def dx_vic(scope='module'): + dx = {1: 0, 2: 1, 3: 1, 4: 1, 5: 0, 6: -1, 7: -1, 8: - 1} + return dx + + +@pytest.fixture() +def pour_point(scope='module'): + p = Point() + p.basinx = 4 + p.basiny = 2 + return p + + +def test_find_ts(): + assert find_ts(np.array([0, 86400, 172800])) == 86400 + + +def test_find_ts_raises_when_scalar(): + with pytest.raises(TypeError): + find_ts(4) + + +def test_read_direction(fdr_vic, dy_vic, dx_vic): + to_y, to_x = read_direction(fdr_vic, dy_vic, dx_vic) + np.testing.assert_equal(to_y.shape, to_x.shape) + assert to_y.max() <= fdr_vic.shape[0] + 1 + assert to_x.max() <= fdr_vic.shape[1] + 1 + + +def test_search_catchment(fdr_vic_small, dy_vic, dx_vic, pour_point): + basin_ids = np.ones_like(fdr_vic_small, dtype=np.int) + basin_id = 1 + to_y, to_x = read_direction(fdr_vic_small, dy_vic, dx_vic) + catchment, catch_fracs = search_catchment(to_y, to_x, pour_point, + basin_ids, basin_id) + assert catch_fracs.min() <= 0. + assert catch_fracs.max() == 1. + assert type(catchment) == dict + assert all([k in catchment for k in ['count_ds', 'x_inds', 'y_inds']]) + assert len(catchment['count_ds']) > 0 + assert len(catchment['count_ds']) == len(catchment['x_inds']) + assert len(catchment['count_ds']) == len(catchment['y_inds']) + + +def test_make_uh(fdr_vic_small): + ndays = 4 + y_inds, x_inds = np.nonzero(fdr_vic_small) + velocity = np.zeros(fdr_vic_small.shape, dtype=np.float) + 2. + diffusion = np.zeros(fdr_vic_small.shape, dtype=np.float) + 3000 + xmask = np.ones(fdr_vic_small.shape, dtype=np.float) + uh = make_uh(86400, ndays, y_inds, x_inds, velocity, diffusion, xmask) + assert uh.shape[0] == ndays + assert uh.shape[1:] == fdr_vic_small.shape + assert uh.min() >= 0. + assert uh.max() <= 1. + np.testing.assert_almost_equal(uh.sum(axis=0)[y_inds, x_inds], 1) + + +def test_make_grid_uh_river(fdr_vic_small, dy_vic, dx_vic, pour_point): + ndays = 4 + t_uh = 40 + basin_ids = np.ones_like(fdr_vic_small, dtype=np.int) + basin_id = 1 + to_y, to_x = read_direction(fdr_vic_small, dy_vic, dx_vic) + uh = np.zeros((ndays, ) + fdr_vic_small.shape) + uh[0, :, :] = 1. + catchment, _ = search_catchment(to_y, to_x, pour_point, + basin_ids, basin_id) + uh_river = make_grid_uh_river(t_uh, ndays, uh, to_y, to_x, pour_point, + catchment['y_inds'], catchment['x_inds'], + catchment['count_ds']) + assert uh_river.shape[0] == t_uh + assert uh_river.max() <= 1. + assert uh_river.min() >= 0. + np.testing.assert_almost_equal(uh_river.sum(axis=0)[catchment['y_inds'], + catchment['x_inds']], + 1) + + +def test_make_grid_uh(fdr_vic_small, dy_vic, dx_vic, pour_point): + ndays = 4 + t_uh = 40 + basin_ids = np.ones_like(fdr_vic_small, dtype=np.int) + basin_id = 1 + to_y, to_x = read_direction(fdr_vic_small, dy_vic, dx_vic) + catchment, _ = search_catchment(to_y, to_x, pour_point, + basin_ids, basin_id) + uh_river = np.zeros((t_uh, ) + fdr_vic_small.shape) + uh_river[0] = 1. + uh_box = np.array([1., 0, 0, 0]) + unit_hydrograph = make_grid_uh(t_uh, ndays, uh_river, uh_box, to_y, to_x, + catchment['y_inds'], catchment['x_inds'], + catchment['count_ds']) + assert unit_hydrograph.shape[0] == t_uh + assert unit_hydrograph.max() <= 1. + assert unit_hydrograph.min() >= 0. + np.testing.assert_almost_equal( + unit_hydrograph.sum(axis=0)[catchment['y_inds'], catchment['x_inds']], + 1) + + +def test_adjust_uh_timestep_nochange(fdr_vic_small): + t_uh = 40 + y_inds, x_inds = np.nonzero(fdr_vic_small) + unit_hydrograph = np.zeros((t_uh, ) + fdr_vic_small.shape, dtype=np.float) + unit_hydrograph[0] = 1. + + uh = adjust_uh_timestep(unit_hydrograph, t_uh, 3600, 3600, x_inds, y_inds) + assert uh.shape[0] == t_uh + np.testing.assert_almost_equal( + uh.sum(axis=0)[y_inds, x_inds], 1) + + +def test_adjust_uh_timestep_downscale(fdr_vic_small): + t_uh = 40 + y_inds, x_inds = np.nonzero(fdr_vic_small) + unit_hydrograph = np.zeros((t_uh, ) + fdr_vic_small.shape, dtype=np.float) + unit_hydrograph[0] = 1. + + uh = adjust_uh_timestep(unit_hydrograph, t_uh, 3600, 1800, x_inds, y_inds) + np.testing.assert_almost_equal( + uh.sum(axis=0)[y_inds, x_inds], 1) + + +def test_adjust_uh_timestep_upscale(fdr_vic_small): + t_uh = 40 + y_inds, x_inds = np.nonzero(fdr_vic_small) + unit_hydrograph = np.zeros((t_uh, ) + fdr_vic_small.shape, dtype=np.float) + unit_hydrograph[0] = 1. + + uh = adjust_uh_timestep(unit_hydrograph, t_uh, 1800, 3600, x_inds, y_inds) + np.testing.assert_almost_equal( + uh.sum(axis=0)[y_inds, x_inds], 1) diff --git a/tests/unit/test_param_file.py b/tests/unit/test_param_file.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/test_plots.py b/tests/unit/test_plots.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/test_pycompat.py b/tests/unit/test_pycompat.py new file mode 100644 index 0000000..d2d3b7b --- /dev/null +++ b/tests/unit/test_pycompat.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- +import pytest +from rvic.core.pycompat import (pyrange, pyzip, iteritems, itervalues, + OrderedDict, SafeConfigParser) + + +def test_pyrange(): + x = pyrange(10) + assert hasattr(x, '__iter__') + assert len(x) == 10 + assert x[9] == 9 + + +def test_pyzip(): + a = [1, 2, 3] + b = ['a', 'b', 'c'] + x = pyzip(a, b) + assert hasattr(x, '__iter__') + for i, (aa, bb) in enumerate(x): + assert aa == a[i] + assert bb == b[i] + with pytest.raises(TypeError): + len(x) + x[1] + + +def test_iteritems(): + a = [1, 2, 3] + b = ['a', 'b', 'c'] + d = dict(pyzip(a, b)) + for k, v in iteritems(d): + assert k in a + assert v in b + assert d[k] == v + + +def test_iteritems_ordered_dict(): + a = [1, 2, 3] + b = ['a', 'b', 'c'] + d = OrderedDict(pyzip(a, b)) + for i, (k, v) in enumerate(iteritems(d)): + assert k in a + assert v in b + assert d[k] == v + assert b[i] == v + + +def test_itervalues(): + a = [1, 2, 3] + b = ['a', 'b', 'c'] + d = dict(pyzip(a, b)) + for v in itervalues(d): + assert v in b + + +def test_itervalues_ordered_dict(): + a = [1, 2, 3] + b = ['a', 'b', 'c'] + d = OrderedDict(pyzip(a, b)) + for i, v in enumerate(itervalues(d)): + assert v in b + assert b[i] == v + + +def test_ordered_dict_order(): + a = [1, 2, 3] + b = ['a', 'b', 'c'] + d = OrderedDict(pyzip(a, b)) + assert list(d.keys()) == a + assert list(d.values()) == b + + +def test_ordered_dict_append(): + a = [1, 2, 3] + b = ['a', 'b', 'c'] + d = OrderedDict(pyzip(a, b)) + d[4] = 'd' + assert len(d) == 4 + assert list(d.keys())[-1] == 4 + + +def test_init_safe_config_parser(): + SafeConfigParser() diff --git a/tests/unit/test_read_forcing.py b/tests/unit/test_read_forcing.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/test_remap.py b/tests/unit/test_remap.py new file mode 100644 index 0000000..455f380 --- /dev/null +++ b/tests/unit/test_remap.py @@ -0,0 +1,13 @@ +import pytest +try: + from rvic.core.remap import remap + from cdo import CDOException + cdo_unavailable = False +except ImportError: + cdo_unavailable = True + + +@pytest.mark.skipif(cdo_unavailable, reason='cdo not installed') +def test_cdo_raises_exception(): + with pytest.raises(CDOException): + remap('grid_file', 'in_file', 'out_file') diff --git a/tests/unit/test_share.py b/tests/unit/test_share.py new file mode 100644 index 0000000..67382e9 --- /dev/null +++ b/tests/unit/test_share.py @@ -0,0 +1,6 @@ +from rvic.core.share import NcGlobals + + +def test_nc_globals(): + ncg = NcGlobals() + ncg.update() diff --git a/tests/unit/test_time_utility.py b/tests/unit/test_time_utility.py new file mode 100644 index 0000000..8758a64 --- /dev/null +++ b/tests/unit/test_time_utility.py @@ -0,0 +1,28 @@ +# -------------------------------------------------------------------- # +# Unit tests for make_uh.py +from rvic.core.time_utility import ord_to_datetime, Dtime +from rvic.core.share import TIMEUNITS +from netCDF4 import date2num +from datetime import datetime + + +def test_ord_to_datetime(): + # Independence day + date = datetime(1776, 7, 4, 12, 0, 0, 0) + ord_time = date2num(date, TIMEUNITS) + # Independence day (note that this fails if date has microseconds != 0) + assert ord_to_datetime(ord_time, TIMEUNITS) == date + + +def test_dtime(): + dt = Dtime('2014-12-01-00', 'ndays', 5, None, 'ndays', 5, None, 'noleap', + 3600.00001) + assert dt.timestamp.year == 2014 + assert dt.timestamp.month == 12 + assert dt.timestamp.day == 1 + assert dt.timestamp.hour == 0 + dt.advance_timestep() + assert dt.timestamp.year == 2014 + assert dt.timestamp.month == 12 + assert dt.timestamp.day == 1 + assert dt.timestamp.hour == 1 diff --git a/tests/unit/test_utilities.py b/tests/unit/test_utilities.py new file mode 100644 index 0000000..17921bd --- /dev/null +++ b/tests/unit/test_utilities.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- +import numpy as np +import os +import datetime +from rvic.core.utilities import (latlon2yx, search_for_channel, find_nearest, + write_rpointer, strip_non_ascii, + strip_invalid_char) +from rvic.core.share import RPOINTER + + +def test_latlon2yx(): + glats, glons = np.meshgrid(np.arange(0, 20, 2), np.arange(10, 30, 2)) + plats = np.array([0, 2, 17]) + plons = np.array([28, 17, 12]) + y, x = latlon2yx(plats, plons, glats, glons) + assert len(y) == len(x) == len(plats) + assert y[0] == 9 + assert x[0] == 0 + + +def test_search_for_channel(): + source_area = np.array([[0.00, 0.00, 0.00, 1.00, 0.00, 0.00, 0.00, 0.00], + [0.00, 0.00, 0.00, 2.00, 0.00, 0.00, 0.00, 0.00], + [0.00, 0.00, 0.00, 3.00, 0.00, 0.00, 8.00, 9.00], + [51.0, 52.0, 53.0, 0.00, 4.00, 0.00, 7.00, 10.0], + [50.0, 1.00, 54.0, 0.00, 0.00, 5.00, 6.00, 11.0], + [1.00, 77.0, 1.00, 0.00, 0.00, 0.00, 12.0, 0.00], + [78.0, 20.0, 19.0, 0.00, 0.00, 13.0, 0.00, 0.00], + [79.0, 21.0, 18.0, 0.00, 0.00, 14.0, 0.00, 0.00], + [80.0, 22.0, 0.00, 17.0, 16.0, 15.0, 0.00, 0.00]]) + + routys = np.array([4, 8, 0, 1]) + routxs = np.array([1, 0, 3, 7]) + + new_ys, new_xs = search_for_channel(source_area, routys, routxs, search=1) + + print(new_ys, new_xs) + + np.testing.assert_equal(new_ys, [5, 8, 0, 2]) + np.testing.assert_equal(new_xs, [1, 0, 3, 7]) + np.testing.assert_equal(source_area[new_ys, new_xs], [77., 80., 1., 9.]) + + +def test_write_rpointer(): + write_rpointer('./', 'test_restart_file.nc', datetime.datetime.now()) + testfile = os.path.join('./', RPOINTER) + assert os.path.isfile(testfile) + os.remove(testfile) + + +def test_find_nearest(): + assert find_nearest(np.array([8, 19, 39, 100, 399]), 20) == 1 + + +def test_find_nearest_max(): + assert find_nearest(np.array([8, 19, 39, 100, 399]), 500) == 4 + + +def test_find_nearest_min(): + assert find_nearest(np.array([8, 19, 39, 100, 399]), 1) == 0 + + +def test_strip_non_ascii(): + test = u'éáé123456tgreáé@€' + new = strip_non_ascii(test) + assert new == '123456tgre@' + test = 'standard' + new = strip_non_ascii(test) + assert new == test + + +def test_strip_invalid_char(): + test = '123$%^789' + new = strip_invalid_char(test) + assert new == '123789' diff --git a/tests/unit/test_variables.py b/tests/unit/test_variables.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/test_write.py b/tests/unit/test_write.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/unit_test_data/gunnison_parameters_01.rvic.prm.BLMSA.20150226.nc b/tests/unit/unit_test_data/gunnison_parameters_01.rvic.prm.BLMSA.20150226.nc new file mode 100644 index 0000000..cb26482 Binary files /dev/null and b/tests/unit/unit_test_data/gunnison_parameters_01.rvic.prm.BLMSA.20150226.nc differ diff --git a/tools/find_pour_points.py b/tools/find_pour_points.py index d93fac8..66b50d9 100755 --- a/tools/find_pour_points.py +++ b/tools/find_pour_points.py @@ -2,7 +2,7 @@ """ Find the outlet location of basins in a grid """ -from future import print_function +from __future__ import print_function import numpy as np from netCDF4 import Dataset import time as tm @@ -17,10 +17,10 @@ def main(): input_file, output_file, which_points, verbose = process_command_line() # ---------------------------------------------------------------- # - #Load input Rasters (Basin Mask and Accumulated Upstream Area) - #Input rasters need to be the same size + # Load input Rasters (Basin Mask and Accumulated Upstream Area) + # Input rasters need to be the same size if verbose: - print 'Reading input file: %s' % input_file + print('Reading input file: %s' % input_file) f = Dataset(input_file, 'r') basin_id = f.variables['Basin_ID'][:] @@ -38,10 +38,9 @@ def main(): if which_points == 'all': print('Returning all land cells as pour pour points') basin, x_outlet, y_outlet, max_area, min_x, min_y, max_x, \ - max_y = find_all(basin_id, source_area, land_mask, lons, lats, res, - verbose) + max_y = find_all(basin_id, source_area, land_mask, lons, lats, res) else: - print 'Returning all basin outlet grid cells as pour points' + print('Returning all basin outlet grid cells as pour points') basin, x_outlet, y_outlet, max_area, min_x, min_y, max_x, \ max_y = find_outlets(basin_id, source_area, lons, lats, res, verbose) @@ -50,20 +49,20 @@ def main(): # ---------------------------------------------------------------- # # Write output file if verbose: - print 'writing to outfile:', output_file + print('writing to outfile:', output_file) if os.path.splitext(output_file)[1] != '.nc': write_ascii_file(basin, x_outlet, y_outlet, max_area, min_x, min_y, max_x, max_y, output_file) else: - write_netcdf_file(basin, x_outlet, y_outlet, max_area, min_x, min_y, + write_netcdf_file(basin, x_outlet, y_outlet, min_x, min_y, max_x, max_y, output_file) # ---------------------------------------------------------------- # # -------------------------------------------------------------------- # # -------------------------------------------------------------------- # -def find_all(basin_id, source_area, land_mask, lons, lats, res, verbose): +def find_all(basin_id, source_area, land_mask, lons, lats, res): """Return the info for all land points """ lat, lon = np.meshgrid(lats, lons) @@ -84,19 +83,19 @@ def find_all(basin_id, source_area, land_mask, lons, lats, res, verbose): # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # - #Loop over every basin id, finding the maximum upstream area [and location] - #and record the basin#,longitude,latitude,area + # Loop over every basin id, finding the maximum upstream area, and location + # and record the basin#,longitude,latitude,area - for i, (yi, xi, bi) in enumerate(zip(y, x, basin)): + for i, bi in enumerate(basin): inds = np.nonzero(basin_id == bi) x_basin = lon[inds] y_basin = lat[inds] x_outlet[i] = lon[y, x] y_outlet[i] = lat[y, x] min_x[i] = min(x_basin) - max_x[i] = max(x_basin)+res + max_x[i] = max(x_basin) + res min_y[i] = min(y_basin) - max_y[i] = max(y_basin)+res + max_y[i] = max(y_basin) + res # ---------------------------------------------------------------- # return basin, x_outlet, y_outlet, max_area, min_x, min_y, max_x, max_y @@ -107,12 +106,12 @@ def find_all(basin_id, source_area, land_mask, lons, lats, res, verbose): def find_outlets(basin_id, source_area, lons, lats, res, verbose): """ Find the outlet location of each basin """ # ---------------------------------------------------------------- # - #Make arrays of same dimensions as input arrays of lat/lon values + # Make arrays of same dimensions as input arrays of lat/lon values x, y = np.meshgrid(lons, lats) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # - #Setup basin in/out arrays + # Setup basin in/out arrays basin_ids = np.arange(np.min(basin_id), np.max(basin_id)) num_basins = len(basin_ids) @@ -127,8 +126,8 @@ def find_outlets(basin_id, source_area, lons, lats, res, verbose): # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # - #Loop over every basin id, finding the maximum upstream area [and location] - #and record the basin#,longitude,latitude,area + # Loop over every basin id, finding the maximum upstream area, and location + # and record the basin#,longitude,latitude,area if verbose: sys.stdout.write('Done reading input file...\n ') sys.stdout.write('Searching in %i basins for pour ' @@ -148,9 +147,9 @@ def find_outlets(basin_id, source_area, lons, lats, res, verbose): x_outlet[i] = x_basin[max_ind] y_outlet[i] = y_basin[max_ind] min_x[i] = min(x_basin) - max_x[i] = max(x_basin)+res + max_x[i] = max(x_basin) + res min_y[i] = min(y_basin) - max_y[i] = max(y_basin)+res + max_y[i] = max(y_basin) + res # ---------------------------------------------------------------- # return basin, x_outlet, y_outlet, max_area, min_x, min_y, max_x, max_y # -------------------------------------------------------------------- # @@ -158,7 +157,7 @@ def find_outlets(basin_id, source_area, lons, lats, res, verbose): # -------------------------------------------------------------------- # # write output netcdf -def write_netcdf_file(basin, x_outlet, y_outlet, max_area, min_x, min_y, +def write_netcdf_file(basin, x_outlet, y_outlet, min_x, min_y, max_x, max_y, out_file): """ save the list of pour points as a comma seperated text file @@ -168,12 +167,12 @@ def write_netcdf_file(basin, x_outlet, y_outlet, max_area, min_x, min_y, # ---------------------------------------------------------------- # # set dimensions - points = f.createDimension('points', None) + f.createDimension('points', None) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # initialize variables - OIDs = f.createVariable('OID', 'i8', ('points', )) + oids = f.createVariable('OID', 'i8', ('points', )) x_outlets = f.createVariable('x_outlet', 'f8', ('points', )) y_outlets = f.createVariable('y_outlet', 'f8', ('points', )) max_areas = f.createVariable('max_area', 'i8', ('points', )) @@ -193,8 +192,8 @@ def write_netcdf_file(basin, x_outlet, y_outlet, max_area, min_x, min_y, # ---------------------------------------------------------------- # # write variable attributes - OIDs.long_name = 'Basin Identifier' - OIDs.standard_name = 'OID' + oids.long_name = 'Basin Identifier' + oids.standard_name = 'OID' x_outlets.long_name = 'longitude_coordinate_of_pour_point' x_outlets.standard_name = 'longitude' @@ -228,7 +227,7 @@ def write_netcdf_file(basin, x_outlet, y_outlet, max_area, min_x, min_y, # ---------------------------------------------------------------- # # fill vairables - OIDs[:] = basin + oids[:] = basin x_outlets[:] = x_outlet y_outlets[:] = y_outlet max_areas[:] = max_areas @@ -255,9 +254,10 @@ def write_ascii_file(basin, x_outlet, y_outlet, max_area, min_x, min_y, # ---------------------------------------------------------------- # # set format fmt = ['%i', '%.10f', '%.10f', '%i', '%.10f', '%.10f', '%.10f', '%.10f'] - out = np.column_stack((basin, x_outlet, y_outlet, max_area, min_x, min_y, + out = np.column_stack((basin, x_outlet, y_outlet, max_area, min_x, min_y, max_x, max_y)) - header = 'OID, longitude, latitude, basin_area, min_lon, min_lat, max_lon, max_lat\n' + header = 'OID, longitude, latitude, basin_area, min_lon, min_lat, ' \ + 'max_lon, max_lat\n' with file(out_file, 'w') as outfile: outfile.write(header) np.savetxt(outfile, out, fmt=fmt, delimiter=',')